Merge pull request #3948 from Byron/watcher-refactor

watcher refactor
This commit is contained in:
Kiril Videlov 2024-06-24 15:47:12 +02:00 committed by GitHub
commit 4477cfcb1d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
49 changed files with 118 additions and 508 deletions

View File

@ -54,7 +54,7 @@ jobs:
- 'crates/gitbutler-cli/**' - 'crates/gitbutler-cli/**'
gitbutler-watcher: gitbutler-watcher:
- *rust - *rust
- 'crates/gitbutler-cli/**' - 'crates/gitbutler-watcher/**'
lint-node: lint-node:
needs: changes needs: changes
@ -228,7 +228,7 @@ jobs:
- uses: ./.github/actions/init-env-rust - uses: ./.github/actions/init-env-rust
- uses: ./.github/actions/check-crate - uses: ./.github/actions/check-crate
with: with:
crate: gitbutler-watcher crate: gitbutler-notify-debouncer
features: ${{ toJson(matrix.features) }} features: ${{ toJson(matrix.features) }}
action: ${{ matrix.action }} action: ${{ matrix.action }}

31
Cargo.lock generated
View File

@ -2177,6 +2177,24 @@ dependencies = [
"windows-named-pipe", "windows-named-pipe",
] ]
[[package]]
name = "gitbutler-notify-debouncer"
version = "0.0.0"
dependencies = [
"crossbeam-channel",
"deser-hjson",
"file-id",
"gitbutler-notify-debouncer",
"mock_instant",
"notify",
"parking_lot 0.12.3",
"pretty_assertions",
"rstest",
"serde",
"tracing",
"walkdir",
]
[[package]] [[package]]
name = "gitbutler-tauri" name = "gitbutler-tauri"
version = "0.0.0" version = "0.0.0"
@ -2232,26 +2250,15 @@ version = "0.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"backoff", "backoff",
"crossbeam-channel",
"deser-hjson",
"file-id",
"futures", "futures",
"gitbutler-core", "gitbutler-core",
"gitbutler-watcher", "gitbutler-notify-debouncer",
"gix", "gix",
"itertools 0.13.0",
"mock_instant",
"notify", "notify",
"parking_lot 0.12.3",
"pretty_assertions",
"rand 0.8.5",
"rstest",
"serde",
"thiserror", "thiserror",
"tokio", "tokio",
"tokio-util", "tokio-util",
"tracing", "tracing",
"walkdir",
] ]
[[package]] [[package]]

View File

@ -4,6 +4,7 @@ members = [
"crates/gitbutler-tauri", "crates/gitbutler-tauri",
"crates/gitbutler-git", "crates/gitbutler-git",
"crates/gitbutler-watcher", "crates/gitbutler-watcher",
"crates/gitbutler-watcher/vendor/debouncer",
"crates/gitbutler-testsupport", "crates/gitbutler-testsupport",
"crates/gitbutler-cli", "crates/gitbutler-cli",
] ]

View File

@ -5,11 +5,9 @@ edition = "2021"
publish = false publish = false
[lib] [lib]
test = false
doctest = false doctest = false
[features]
mock_instant = ["dep:mock_instant"]
[dependencies] [dependencies]
gitbutler-core.workspace = true gitbutler-core.workspace = true
thiserror.workspace = true thiserror.workspace = true
@ -22,21 +20,7 @@ gix = { workspace = true, features = ["excludes"] }
backoff = "0.4.0" backoff = "0.4.0"
notify = { version = "6.0.1" } notify = { version = "6.0.1" }
parking_lot = "0.12.3" gitbutler-notify-debouncer.path = "vendor/debouncer"
file-id = "0.2.1"
walkdir = "2.2.2"
crossbeam-channel = "0.5.13"
itertools = "0.13"
mock_instant = { version = "0.3.2", optional = true }
[dev-dependencies]
gitbutler-watcher = { path = ".", features = ["mock_instant"] }
pretty_assertions = "1.3.0"
rstest = "0.20"
serde = { version = "1.0.203", features = ["derive"] }
deser-hjson = "1.1.1"
rand = "0.8.5"
[lints.clippy] [lints.clippy]
all = "deny" all = "deny"

View File

@ -2,12 +2,11 @@ use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use std::time::Duration; use std::time::Duration;
use crate::debouncer::Debouncer; use crate::events::InternalEvent;
use crate::debouncer::FileIdMap;
use crate::{debouncer::new_debouncer, events::InternalEvent};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use gitbutler_core::ops::OPLOG_FILE_NAME; use gitbutler_core::ops::OPLOG_FILE_NAME;
use gitbutler_core::projects::ProjectId; use gitbutler_core::projects::ProjectId;
use gitbutler_notify_debouncer::{new_debouncer, Debouncer, NoCache};
use notify::RecommendedWatcher; use notify::RecommendedWatcher;
use notify::Watcher; use notify::Watcher;
use tokio::task; use tokio::task;
@ -56,7 +55,7 @@ pub fn spawn(
project_id: ProjectId, project_id: ProjectId,
worktree_path: &std::path::Path, worktree_path: &std::path::Path,
out: tokio::sync::mpsc::UnboundedSender<InternalEvent>, out: tokio::sync::mpsc::UnboundedSender<InternalEvent>,
) -> Result<Debouncer<RecommendedWatcher, FileIdMap>> { ) -> Result<Debouncer<RecommendedWatcher, NoCache>> {
let (notify_tx, notify_rx) = std::sync::mpsc::channel(); let (notify_tx, notify_rx) = std::sync::mpsc::channel();
let mut debouncer = new_debouncer( let mut debouncer = new_debouncer(
DEBOUNCE_TIMEOUT, DEBOUNCE_TIMEOUT,

View File

@ -18,7 +18,6 @@ use tokio::{
}; };
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
mod debouncer;
mod file_monitor; mod file_monitor;
mod handler; mod handler;

View File

@ -1,267 +0,0 @@
use file_id::FileId;
use mock_instant::Instant;
use notify::event::{
AccessKind, AccessMode, CreateKind, DataChange, Flag, MetadataKind, ModifyKind, RemoveKind,
RenameMode,
};
use notify::{ErrorKind, EventKind};
use std::collections::{HashMap, VecDeque};
use std::path::PathBuf;
use std::time::Duration;
use crate::{schema, TestCache};
use serde::Deserialize;
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
pub(crate) struct Error {
/// The error kind is parsed by `into_notify_error`
pub kind: String,
/// The error paths
#[serde(default)]
pub paths: Vec<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
pub(crate) struct Event {
/// The timestamp the event occurred
#[serde(default)]
pub time: u64,
/// The event kind is parsed by `into_notify_event`
pub kind: String,
/// The event paths
#[serde(default)]
pub paths: Vec<String>,
/// The event flags
#[serde(default)]
pub flags: Vec<String>,
/// The event tracker
pub tracker: Option<usize>,
/// The event info
pub info: Option<String>,
/// The file id for the file associated with the event
///
/// Only used for the rename event.
pub file_id: Option<u64>,
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
pub(crate) struct Queue {
pub events: Vec<Event>,
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
pub(crate) struct State {
/// Timeout for the debouncer
///
/// Only used for the initial state.
pub timeout: Option<u64>,
/// The event queues for each file
#[serde(default)]
pub queues: HashMap<String, Queue>,
/// Cached file ids
#[serde(default)]
pub cache: HashMap<String, u64>,
/// A map of file ids, used instead of accessing the file system
#[serde(default)]
pub file_system: HashMap<String, u64>,
/// Current rename event
pub rename_event: Option<Event>,
/// Current rescan event
pub rescan_event: Option<Event>,
/// Debounced events
///
/// Only used for the expected state.
#[serde(default)]
pub events: HashMap<String, Vec<Event>>,
/// Errors
#[serde(default)]
pub errors: Vec<Error>,
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
pub(crate) struct TestCase {
/// Initial state
pub state: State,
/// Events that are added during the test
#[serde(default)]
pub events: Vec<Event>,
/// Errors that are added during the test
#[serde(default)]
pub errors: Vec<Error>,
/// Expected state after the test
pub expected: State,
}
impl Error {
pub fn into_notify_error(self) -> notify::Error {
let kind = match &*self.kind {
"path-not-found" => ErrorKind::PathNotFound,
"watch-not-found" => ErrorKind::WatchNotFound,
"max-files-watch" => ErrorKind::MaxFilesWatch,
_ => panic!("unknown error type `{}`", self.kind),
};
let mut error = notify::Error::new(kind);
for p in self.paths {
error = error.add_path(PathBuf::from(p));
}
error
}
}
impl Event {
#[rustfmt::skip]
pub fn into_debounced_event(self, time: Instant, path: Option<&str>) -> DebouncedEvent {
let kind = match &*self.kind {
"any" => EventKind::Any,
"other" => EventKind::Other,
"access-any" => EventKind::Access(AccessKind::Any),
"access-read" => EventKind::Access(AccessKind::Read),
"access-open-any" => EventKind::Access(AccessKind::Open(AccessMode::Any)),
"access-open-execute" => EventKind::Access(AccessKind::Open(AccessMode::Execute)),
"access-open-read" => EventKind::Access(AccessKind::Open(AccessMode::Read)),
"access-open-write" => EventKind::Access(AccessKind::Open(AccessMode::Write)),
"access-open-other" => EventKind::Access(AccessKind::Open(AccessMode::Other)),
"access-close-any" => EventKind::Access(AccessKind::Close(AccessMode::Any)),
"access-close-execute" => EventKind::Access(AccessKind::Close(AccessMode::Execute)),
"access-close-read" => EventKind::Access(AccessKind::Close(AccessMode::Read)),
"access-close-write" => EventKind::Access(AccessKind::Close(AccessMode::Write)),
"access-close-other" => EventKind::Access(AccessKind::Close(AccessMode::Other)),
"access-other" => EventKind::Access(AccessKind::Other),
"create-any" => EventKind::Create(CreateKind::Any),
"create-file" => EventKind::Create(CreateKind::File),
"create-folder" => EventKind::Create(CreateKind::Folder),
"create-other" => EventKind::Create(CreateKind::Other),
"modify-any" => EventKind::Modify(ModifyKind::Any),
"modify-other" => EventKind::Modify(ModifyKind::Other),
"modify-data-any" => EventKind::Modify(ModifyKind::Data(DataChange::Any)),
"modify-data-size" => EventKind::Modify(ModifyKind::Data(DataChange::Size)),
"modify-data-content" => EventKind::Modify(ModifyKind::Data(DataChange::Content)),
"modify-data-other" => EventKind::Modify(ModifyKind::Data(DataChange::Other)),
"modify-metadata-any" => EventKind::Modify(ModifyKind::Metadata(MetadataKind::Any)),
"modify-metadata-access-time" => EventKind::Modify(ModifyKind::Metadata(MetadataKind::AccessTime)),
"modify-metadata-write-time" => EventKind::Modify(ModifyKind::Metadata(MetadataKind::WriteTime)),
"modify-metadata-permissions" => EventKind::Modify(ModifyKind::Metadata(MetadataKind::Permissions)),
"modify-metadata-ownership" => EventKind::Modify(ModifyKind::Metadata(MetadataKind::Ownership)),
"modify-metadata-extended" => EventKind::Modify(ModifyKind::Metadata(MetadataKind::Extended)),
"modify-metadata-other" => EventKind::Modify(ModifyKind::Metadata(MetadataKind::Other)),
"rename-any" => EventKind::Modify(ModifyKind::Name(RenameMode::Any)),
"rename-from" => EventKind::Modify(ModifyKind::Name(RenameMode::From)),
"rename-to" => EventKind::Modify(ModifyKind::Name(RenameMode::To)),
"rename-both" => EventKind::Modify(ModifyKind::Name(RenameMode::Both)),
"rename-other" => EventKind::Modify(ModifyKind::Name(RenameMode::Other)),
"remove-any" => EventKind::Remove(RemoveKind::Any),
"remove-file" => EventKind::Remove(RemoveKind::File),
"remove-folder" => EventKind::Remove(RemoveKind::Folder),
"remove-other" => EventKind::Remove(RemoveKind::Other),
_ => panic!("unknown event type `{}`", self.kind),
};
let mut event = notify::Event::new(kind);
for p in self.paths {
event = event.add_path(if p == "*" {
PathBuf::from(path.expect("cannot replace `*`"))
} else {
PathBuf::from(p)
});
if let Some(tracker) = self.tracker {
event = event.set_tracker(tracker);
}
if let Some(info) = &self.info {
event = event.set_info(info.as_str());
}
}
for f in self.flags {
let flag = match &*f {
"rescan" => Flag::Rescan,
_ => panic!("unknown event flag `{f}`"),
};
event = event.set_flag(flag);
}
DebouncedEvent { event, time: time + Duration::from_millis(self.time) }
}
}
impl State {
pub(crate) fn into_debounce_data_inner(self, time: Instant) -> DebounceDataInner<TestCache> {
let queues = self
.queues
.into_iter()
.map(|(path, queue)| {
let queue = Queue {
events: queue
.events
.into_iter()
.map(|event| event.into_debounced_event(time, Some(&path)))
.collect::<VecDeque<_>>(),
};
(path.into(), queue)
})
.collect();
let cache = self
.cache
.into_iter()
.map(|(path, id)| {
let path = PathBuf::from(path);
let id = FileId::new_inode(id, id);
(path, id)
})
.collect::<HashMap<_, _>>();
let file_system = self
.file_system
.into_iter()
.map(|(path, id)| {
let path = PathBuf::from(path);
let id = FileId::new_inode(id, id);
(path, id)
})
.collect::<HashMap<_, _>>();
let cache = TestCache::new(cache, file_system);
let rename_event = self.rename_event.map(|e| {
let file_id = e.file_id.map(|id| FileId::new_inode(id, id));
let event = e.into_debounced_event(time, None);
(event, file_id)
});
let rescan_event = self
.rescan_event
.map(|e| e.into_debounced_event(time, None));
DebounceDataInner {
queues,
roots: Vec::new(),
cache,
rename_event,
rescan_event,
errors: Vec::new(),
timeout: Duration::from_millis(self.timeout.unwrap_or(50)),
}
}
}

View File

@ -0,0 +1,34 @@
[package]
name = "gitbutler-notify-debouncer"
version = "0.0.0"
edition = "2021"
publish = false
[lib]
doctest = false
[features]
mock_instant = ["dep:mock_instant"]
[dependencies]
tracing = "0.1.40"
notify = { version = "6.0.1" }
parking_lot = "0.12.3"
file-id = "0.2.1"
walkdir = "2.2.2"
crossbeam-channel = "0.5.13"
mock_instant = { version = "0.3.2", optional = true }
[dev-dependencies]
gitbutler-notify-debouncer = { path = ".", features = ["mock_instant"] }
pretty_assertions = "1.4.0"
rstest = "0.20"
serde = { version = "1.0.203", features = ["derive"] }
deser-hjson = "1.1.1"
[lints.clippy]
all = "deny"
perf = "deny"
correctness = "deny"

View File

@ -83,7 +83,6 @@ impl FileIdMap {
/// If `recursive_mode` is `Recursive`, all children will be added to the cache as well /// If `recursive_mode` is `Recursive`, all children will be added to the cache as well
/// and all paths will be kept up-to-date in case of changes like new files being added, /// and all paths will be kept up-to-date in case of changes like new files being added,
/// files being removed or renamed. /// files being removed or renamed.
#[allow(dead_code)]
pub fn add_root(&mut self, path: impl Into<PathBuf>, recursive_mode: RecursiveMode) { pub fn add_root(&mut self, path: impl Into<PathBuf>, recursive_mode: RecursiveMode) {
let path = path.into(); let path = path.into();
@ -95,7 +94,6 @@ impl FileIdMap {
/// Remove a path form the cache. /// Remove a path form the cache.
/// ///
/// If the path was added with `Recursive` mode, all children will also be removed from the cache. /// If the path was added with `Recursive` mode, all children will also be removed from the cache.
#[allow(dead_code)]
pub fn remove_root(&mut self, path: impl AsRef<Path>) { pub fn remove_root(&mut self, path: impl AsRef<Path>) {
self.roots.retain(|(root, _)| !root.starts_with(&path)); self.roots.retain(|(root, _)| !root.starts_with(&path));

View File

@ -32,7 +32,7 @@ mod cache;
mod event; mod event;
#[cfg(test)] #[cfg(test)]
mod testing; mod tests;
use std::{ use std::{
collections::{HashMap, VecDeque}, collections::{HashMap, VecDeque},
@ -44,14 +44,11 @@ use std::{
time::Duration, time::Duration,
}; };
#[allow(unused_imports)]
pub use cache::{FileIdCache, FileIdMap, NoCache}; pub use cache::{FileIdCache, FileIdMap, NoCache};
pub use event::DebouncedEvent; pub use event::DebouncedEvent;
#[allow(unused_imports)]
pub use file_id; pub use file_id;
#[allow(unused_imports)]
pub use notify; pub use notify;
use file_id::FileId; use file_id::FileId;
@ -59,7 +56,7 @@ use notify::{
event::{ModifyKind, RemoveKind, RenameMode}, event::{ModifyKind, RemoveKind, RenameMode},
Error, ErrorKind, Event, EventKind, RecommendedWatcher, Watcher, Error, ErrorKind, Event, EventKind, RecommendedWatcher, Watcher,
}; };
use parking_lot::Mutex; use parking_lot::{MappedMutexGuard, Mutex, MutexGuard};
#[cfg(test)] #[cfg(test)]
use mock_instant::Instant; use mock_instant::Instant;
@ -487,7 +484,6 @@ impl<T: FileIdCache> DebounceDataInner<T> {
pub struct Debouncer<T: Watcher, C: FileIdCache> { pub struct Debouncer<T: Watcher, C: FileIdCache> {
watcher: T, watcher: T,
debouncer_thread: Option<std::thread::JoinHandle<()>>, debouncer_thread: Option<std::thread::JoinHandle<()>>,
#[allow(dead_code)]
data: DebounceData<C>, data: DebounceData<C>,
stop: Arc<AtomicBool>, stop: Arc<AtomicBool>,
flush: Arc<AtomicBool>, flush: Arc<AtomicBool>,
@ -496,7 +492,6 @@ pub struct Debouncer<T: Watcher, C: FileIdCache> {
impl<T: Watcher, C: FileIdCache> Debouncer<T, C> { impl<T: Watcher, C: FileIdCache> Debouncer<T, C> {
/// Stop the debouncer, waits for the event thread to finish. /// Stop the debouncer, waits for the event thread to finish.
/// May block for the duration of one tick_rate. /// May block for the duration of one tick_rate.
#[allow(dead_code)]
pub fn stop(mut self) { pub fn stop(mut self) {
self.set_stop(); self.set_stop();
if let Some(t) = self.debouncer_thread.take() { if let Some(t) = self.debouncer_thread.take() {
@ -505,7 +500,6 @@ impl<T: Watcher, C: FileIdCache> Debouncer<T, C> {
} }
/// Stop the debouncer, does not wait for the event thread to finish. /// Stop the debouncer, does not wait for the event thread to finish.
#[allow(dead_code)]
pub fn stop_nonblocking(self) { pub fn stop_nonblocking(self) {
self.set_stop(); self.set_stop();
} }
@ -519,6 +513,11 @@ impl<T: Watcher, C: FileIdCache> Debouncer<T, C> {
self.flush.store(true, Ordering::Relaxed); self.flush.store(true, Ordering::Relaxed);
} }
/// Access to the internally used notify Watcher backend
pub fn cache(&mut self) -> MappedMutexGuard<'_, C> {
MutexGuard::map(self.data.lock(), |data| &mut data.cache)
}
/// Access to the internally used notify Watcher backend /// Access to the internally used notify Watcher backend
pub fn watcher(&mut self) -> &mut T { pub fn watcher(&mut self) -> &mut T {
&mut self.watcher &mut self.watcher
@ -567,26 +566,27 @@ pub fn new_debouncer_opt<F: DebounceEventHandler, T: Watcher, C: FileIdCache + S
})?, })?,
}; };
let data_c = data.clone();
let stop_c = stop.clone();
let flush_c = flush.clone();
let mut idle_count = 0;
let mut prev_queue_count = 0;
let thread = std::thread::Builder::new() let thread = std::thread::Builder::new()
.name("notify-rs debouncer loop".to_string()) .name("notify-rs debouncer loop".to_string())
.spawn(move || loop { .spawn({
if stop_c.load(Ordering::Acquire) { let data = data.clone();
let stop = stop.clone();
let flush = flush.clone();
let mut idle_count = 0;
let mut prev_queue_count = 0;
move || loop {
if stop.load(Ordering::Acquire) {
break; break;
} }
let mut should_flush = flush_c.load(Ordering::Acquire); let mut should_flush = flush.load(Ordering::Acquire);
std::thread::sleep(tick); std::thread::sleep(tick);
let send_data; let send_data;
let errors; let errors;
{ {
let mut lock = data_c.lock(); let mut lock = data.lock();
let queue_count = lock.queues.values().fold(0, |acc, x| acc + x.events.len()); let queue_count = lock.queues.values().fold(0, |acc, x| acc + x.events.len());
if prev_queue_count == queue_count { if prev_queue_count == queue_count {
@ -595,17 +595,15 @@ pub fn new_debouncer_opt<F: DebounceEventHandler, T: Watcher, C: FileIdCache + S
prev_queue_count = queue_count prev_queue_count = queue_count
} }
if let Some(threshold) = flush_after { if flush_after.map_or(false, |threshold| idle_count >= threshold) {
if idle_count >= threshold {
idle_count = 0; idle_count = 0;
prev_queue_count = 0; prev_queue_count = 0;
should_flush = true; should_flush = true;
} }
}
send_data = lock.debounced_events(should_flush); send_data = lock.debounced_events(should_flush);
if should_flush { if should_flush {
flush_c.store(false, Ordering::Release); flush.store(false, Ordering::Release);
} }
errors = lock.errors(); errors = lock.errors();
@ -620,6 +618,7 @@ pub fn new_debouncer_opt<F: DebounceEventHandler, T: Watcher, C: FileIdCache + S
if !errors.is_empty() { if !errors.is_empty() {
event_handler.handle_event(Err(errors)); event_handler.handle_event(Err(errors));
} }
}
})?; })?;
let data_c = data.clone(); let data_c = data.clone();
@ -636,18 +635,16 @@ pub fn new_debouncer_opt<F: DebounceEventHandler, T: Watcher, C: FileIdCache + S
config, config,
)?; )?;
let guard = Debouncer { Ok(Debouncer {
watcher, watcher,
debouncer_thread: Some(thread), debouncer_thread: Some(thread),
data, data,
stop, stop,
flush, flush,
}; })
Ok(guard)
} }
/// Short function to create a new debounced watcher with the recommended debouncer and the built-in file ID cache. /// Short function to create a new debounced watcher with the recommended debouncer, without FileID cache for performance.
/// ///
/// Timeout is the amount of time after which a debounced event is emitted. /// Timeout is the amount of time after which a debounced event is emitted.
/// ///
@ -657,153 +654,13 @@ pub fn new_debouncer<F: DebounceEventHandler>(
tick_rate: Option<Duration>, tick_rate: Option<Duration>,
flush_after: Option<u32>, flush_after: Option<u32>,
event_handler: F, event_handler: F,
) -> Result<Debouncer<RecommendedWatcher, FileIdMap>, Error> { ) -> Result<Debouncer<RecommendedWatcher, NoCache>, Error> {
new_debouncer_opt::<F, RecommendedWatcher, FileIdMap>( new_debouncer_opt::<F, RecommendedWatcher, NoCache>(
timeout, timeout,
tick_rate, tick_rate,
flush_after, flush_after,
event_handler, event_handler,
FileIdMap::new(), NoCache,
notify::Config::default(), notify::Config::default(),
) )
} }
#[cfg(test)]
mod tests {
use std::{fs, path::Path};
use super::*;
use mock_instant::MockClock;
use pretty_assertions::assert_eq;
use rstest::rstest;
#[rstest]
fn state(
#[values(
"add_create_event",
"add_create_event_after_remove_event",
"add_create_dir_event_twice",
"add_modify_content_event_after_create_event",
"add_rename_from_event",
"add_rename_from_event_after_create_event",
"add_rename_from_event_after_modify_event",
"add_rename_from_event_after_create_and_modify_event",
"add_rename_from_event_after_rename_from_event",
"add_rename_to_event",
"add_rename_to_dir_event",
"add_rename_from_and_to_event",
"add_rename_from_and_to_event_after_create",
"add_rename_from_and_to_event_after_rename",
"add_rename_from_and_to_event_after_modify_content",
"add_rename_from_and_to_event_override_created",
"add_rename_from_and_to_event_override_modified",
"add_rename_from_and_to_event_override_removed",
"add_rename_from_and_to_event_with_file_ids",
"add_rename_from_and_to_event_with_different_file_ids",
"add_rename_from_and_to_event_with_different_tracker",
"add_rename_both_event",
"add_remove_event",
"add_remove_event_after_create_event",
"add_remove_event_after_modify_event",
"add_remove_event_after_create_and_modify_event",
"add_remove_parent_event_after_remove_child_event",
"add_errors",
"emit_continuous_modify_content_events",
"emit_events_in_chronological_order",
"emit_events_with_a_prepended_rename_event",
"emit_close_events_only_once",
"emit_modify_event_after_close_event",
"emit_needs_rescan_event",
"read_file_id_without_create_event"
)]
file_name: &str,
) {
use testing::TestCase;
let file_content =
fs::read_to_string(Path::new(&format!("./tests/fixtures/{file_name}.hjson"))).unwrap();
let mut test_case = deser_hjson::from_str::<TestCase>(&file_content).unwrap();
MockClock::set_time(Duration::default());
let time = Instant::now();
let mut state = test_case.state.into_debounce_data_inner(time);
for event in test_case.events {
let event = event.into_debounced_event(time, None);
MockClock::set_time(event.time - time);
state.add_event(event.event);
}
for error in test_case.errors {
let e = error.into_notify_error();
state.add_error(e);
}
let expected_errors = std::mem::take(&mut test_case.expected.errors);
let expected_events = std::mem::take(&mut test_case.expected.events);
let expected_state = test_case.expected.into_debounce_data_inner(time);
assert_eq!(
state.queues, expected_state.queues,
"queues not as expected"
);
assert_eq!(
state.rename_event, expected_state.rename_event,
"rename event not as expected"
);
assert_eq!(
state.rescan_event, expected_state.rescan_event,
"rescan event not as expected"
);
assert_eq!(
state.cache.paths, expected_state.cache.paths,
"cache not as expected"
);
assert_eq!(
state
.errors
.iter()
.map(|e| format!("{:?}", e))
.collect::<Vec<_>>(),
expected_errors
.iter()
.map(|e| format!("{:?}", e.clone().into_notify_error()))
.collect::<Vec<_>>(),
"errors not as expected"
);
let backup_time = Instant::now().duration_since(time);
let backup_queues = state.queues.clone();
for (delay, events) in expected_events {
MockClock::set_time(backup_time);
state.queues.clone_from(&backup_queues);
match delay.as_str() {
"none" => {}
"short" => MockClock::advance(Duration::from_millis(10)),
"long" => MockClock::advance(Duration::from_millis(100)),
_ => {
if let Ok(ts) = delay.parse::<u64>() {
let ts = time + Duration::from_millis(ts);
MockClock::set_time(ts - time);
}
}
}
let events = events
.into_iter()
.map(|event| event.into_debounced_event(time, None))
.collect::<Vec<_>>();
assert_eq!(
state.debounced_events(false),
events,
"debounced events after a `{delay}` delay"
);
}
}
}

View File

@ -95,8 +95,7 @@ fn state(
} }
for error in test_case.errors { for error in test_case.errors {
let error = error.into_notify_error(); state.add_error(error.into_notify_error());
state.add_error(error);
} }
let expected_errors = std::mem::take(&mut test_case.expected.errors); let expected_errors = std::mem::take(&mut test_case.expected.errors);
@ -166,7 +165,7 @@ fn state(
mod schema; mod schema;
mod utils { mod utils {
use crate::debouncer::FileIdCache; use crate::FileIdCache;
use file_id::FileId; use file_id::FileId;
use std::collections::HashMap; use std::collections::HashMap;

View File

@ -27,7 +27,7 @@
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
// IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE. // DEALINGS IN THE SOFTWARE.
use crate::debouncer::{DebounceDataInner, DebouncedEvent}; use crate::{DebounceDataInner, DebouncedEvent};
use file_id::FileId; use file_id::FileId;
use mock_instant::Instant; use mock_instant::Instant;
use notify::event::{ use notify::event::{
@ -241,7 +241,7 @@ impl State {
.queues .queues
.into_iter() .into_iter()
.map(|(path, queue)| { .map(|(path, queue)| {
let queue = crate::debouncer::Queue { let queue = crate::Queue {
events: queue events: queue
.events .events
.into_iter() .into_iter()