mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Setup randomized test harness in gpui::test
Co-Authored-By: Nathan Sobo <nathan@zed.dev> Co-Authored-By: Max Brunsfeld <max@zed.dev>
This commit is contained in:
parent
b0f3778381
commit
c1a9a20ac9
@ -2,8 +2,8 @@ use proc_macro::TokenStream;
|
||||
use quote::{format_ident, quote};
|
||||
use std::mem;
|
||||
use syn::{
|
||||
parse_macro_input, parse_quote, spanned::Spanned as _, AttributeArgs, ItemFn, Lit, Meta,
|
||||
NestedMeta,
|
||||
parse_macro_input, parse_quote, spanned::Spanned as _, AttributeArgs, FnArg, ItemFn, Lit, Meta,
|
||||
NestedMeta, Type,
|
||||
};
|
||||
|
||||
#[proc_macro_attribute]
|
||||
@ -69,16 +69,43 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||
let inner_fn_name = format_ident!("_{}", inner_fn.sig.ident);
|
||||
let outer_fn_name = mem::replace(&mut inner_fn.sig.ident, inner_fn_name.clone());
|
||||
|
||||
// Pass to the test function the number of app contexts that it needs,
|
||||
// based on its parameter list.
|
||||
let inner_fn_args = (0..inner_fn.sig.inputs.len())
|
||||
.map(|i| {
|
||||
let first_entity_id = i * 100_000;
|
||||
quote!(#namespace::TestAppContext::new(foreground.clone(), background.clone(), #first_entity_id),)
|
||||
})
|
||||
.collect::<proc_macro2::TokenStream>();
|
||||
|
||||
let mut outer_fn: ItemFn = if inner_fn.sig.asyncness.is_some() {
|
||||
// Pass to the test function the number of app contexts that it needs,
|
||||
// based on its parameter list.
|
||||
let mut inner_fn_args = proc_macro2::TokenStream::new();
|
||||
for (ix, arg) in inner_fn.sig.inputs.iter().enumerate() {
|
||||
if let FnArg::Typed(arg) = arg {
|
||||
if let Type::Path(ty) = &*arg.ty {
|
||||
let last_segment = ty.path.segments.last();
|
||||
match last_segment.map(|s| s.ident.to_string()).as_deref() {
|
||||
Some("TestAppContext") => {
|
||||
let first_entity_id = ix * 100_000;
|
||||
inner_fn_args.extend(
|
||||
quote!(#namespace::TestAppContext::new(foreground.clone(), background.clone(), #first_entity_id),)
|
||||
);
|
||||
}
|
||||
Some("StdRng") => {
|
||||
inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed)));
|
||||
}
|
||||
_ => {
|
||||
return TokenStream::from(
|
||||
syn::Error::new_spanned(arg, "invalid argument")
|
||||
.into_compile_error(),
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return TokenStream::from(
|
||||
syn::Error::new_spanned(arg, "invalid argument").into_compile_error(),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
return TokenStream::from(
|
||||
syn::Error::new_spanned(arg, "invalid argument").into_compile_error(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
parse_quote! {
|
||||
#[test]
|
||||
fn #outer_fn_name() {
|
||||
@ -87,9 +114,10 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||
let mut retries = 0;
|
||||
let mut i = 0;
|
||||
loop {
|
||||
let seed = #starting_seed + i;
|
||||
let seed = (#starting_seed + i) as u64;
|
||||
dbg!(seed);
|
||||
let result = std::panic::catch_unwind(|| {
|
||||
let (foreground, background) = #namespace::executor::deterministic(seed as u64);
|
||||
let (foreground, background) = #namespace::executor::deterministic(seed);
|
||||
foreground.run(#inner_fn_name(#inner_fn_args));
|
||||
});
|
||||
|
||||
@ -117,36 +145,60 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut inner_fn_args = proc_macro2::TokenStream::new();
|
||||
for arg in inner_fn.sig.inputs.iter() {
|
||||
if let FnArg::Typed(arg) = arg {
|
||||
if let Type::Path(ty) = &*arg.ty {
|
||||
let last_segment = ty.path.segments.last();
|
||||
if let Some("StdRng") = last_segment.map(|s| s.ident.to_string()).as_deref() {
|
||||
inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
|
||||
}
|
||||
} else {
|
||||
inner_fn_args.extend(quote!(cx,));
|
||||
}
|
||||
} else {
|
||||
return TokenStream::from(
|
||||
syn::Error::new_spanned(arg, "invalid argument").into_compile_error(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
parse_quote! {
|
||||
#[test]
|
||||
fn #outer_fn_name() {
|
||||
#inner_fn
|
||||
|
||||
if #max_retries > 0 {
|
||||
let mut retries = 0;
|
||||
loop {
|
||||
let result = std::panic::catch_unwind(|| {
|
||||
#namespace::App::test(|cx| {
|
||||
#inner_fn_name(cx);
|
||||
});
|
||||
let mut retries = 0;
|
||||
let mut i = 0;
|
||||
loop {
|
||||
let seed = (#starting_seed + i) as u64;
|
||||
dbg!(seed);
|
||||
let result = std::panic::catch_unwind(|| {
|
||||
#namespace::App::test(|cx| {
|
||||
#inner_fn_name(#inner_fn_args);
|
||||
});
|
||||
});
|
||||
|
||||
match result {
|
||||
Ok(result) => return result,
|
||||
Err(error) => {
|
||||
if retries < #max_retries {
|
||||
retries += 1;
|
||||
println!("retrying: attempt {}", retries);
|
||||
} else {
|
||||
std::panic::resume_unwind(error);
|
||||
match result {
|
||||
Ok(result) => {
|
||||
retries = 0;
|
||||
i += 1;
|
||||
if i == #num_iterations {
|
||||
return result
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
if retries < #max_retries {
|
||||
retries += 1;
|
||||
println!("retrying: attempt {}", retries);
|
||||
} else {
|
||||
if #num_iterations > 1 {
|
||||
eprintln!("failing seed: {}", seed);
|
||||
}
|
||||
std::panic::resume_unwind(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
#namespace::App::test(|cx| {
|
||||
#inner_fn_name(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2967,98 +2967,87 @@ mod tests {
|
||||
assert_eq!(*buffer_2_events, vec![Event::Edited, Event::Dirtied]);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_random_edits(cx: &mut gpui::MutableAppContext) {
|
||||
let iterations = env::var("ITERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `ITERATIONS` variable"))
|
||||
.unwrap_or(100);
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
let start_seed =
|
||||
env::var("SEED").map_or(0, |seed| seed.parse().expect("invalid `SEED` variable"));
|
||||
|
||||
for seed in start_seed..start_seed + iterations {
|
||||
println!("{:?}", seed);
|
||||
let mut rng = &mut StdRng::seed_from_u64(seed);
|
||||
let reference_string_len = rng.gen_range(0..3);
|
||||
let mut reference_string = RandomCharIter::new(&mut rng)
|
||||
.take(reference_string_len)
|
||||
.collect::<String>();
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, reference_string.as_str(), cx);
|
||||
buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
|
||||
let mut buffer_versions = Vec::new();
|
||||
log::info!(
|
||||
"buffer text {:?}, version: {:?}",
|
||||
buffer.text(),
|
||||
buffer.version()
|
||||
);
|
||||
|
||||
let reference_string_len = rng.gen_range(0..3);
|
||||
let mut reference_string = RandomCharIter::new(&mut rng)
|
||||
.take(reference_string_len)
|
||||
.collect::<String>();
|
||||
cx.add_model(|cx| {
|
||||
let mut buffer = Buffer::new(0, reference_string.as_str(), cx);
|
||||
buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
|
||||
let mut buffer_versions = Vec::new();
|
||||
for _i in 0..operations {
|
||||
let (old_ranges, new_text) = buffer.randomly_mutate(&mut rng, cx);
|
||||
for old_range in old_ranges.iter().rev() {
|
||||
reference_string.replace_range(old_range.clone(), &new_text);
|
||||
}
|
||||
assert_eq!(buffer.text(), reference_string);
|
||||
log::info!(
|
||||
"buffer text {:?}, version: {:?}",
|
||||
buffer.text(),
|
||||
buffer.version()
|
||||
);
|
||||
|
||||
for _i in 0..operations {
|
||||
let (old_ranges, new_text) = buffer.randomly_mutate(rng, cx);
|
||||
for old_range in old_ranges.iter().rev() {
|
||||
reference_string.replace_range(old_range.clone(), &new_text);
|
||||
}
|
||||
assert_eq!(buffer.text(), reference_string);
|
||||
if rng.gen_bool(0.25) {
|
||||
buffer.randomly_undo_redo(&mut rng, cx);
|
||||
reference_string = buffer.text();
|
||||
log::info!(
|
||||
"buffer text {:?}, version: {:?}",
|
||||
buffer.text(),
|
||||
buffer.version()
|
||||
);
|
||||
|
||||
if rng.gen_bool(0.25) {
|
||||
buffer.randomly_undo_redo(rng, cx);
|
||||
reference_string = buffer.text();
|
||||
log::info!(
|
||||
"buffer text {:?}, version: {:?}",
|
||||
buffer.text(),
|
||||
buffer.version()
|
||||
);
|
||||
}
|
||||
|
||||
let range = buffer.random_byte_range(0, rng);
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(range.clone()),
|
||||
TextSummary::from(&reference_string[range])
|
||||
);
|
||||
|
||||
if rng.gen_bool(0.3) {
|
||||
buffer_versions.push(buffer.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for mut old_buffer in buffer_versions {
|
||||
let edits = buffer
|
||||
.edits_since(old_buffer.version.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let range = buffer.random_byte_range(0, &mut rng);
|
||||
assert_eq!(
|
||||
buffer.text_summary_for_range(range.clone()),
|
||||
TextSummary::from(&reference_string[range])
|
||||
);
|
||||
|
||||
log::info!(
|
||||
"mutating old buffer version {:?}, text: {:?}, edits since: {:?}",
|
||||
old_buffer.version(),
|
||||
old_buffer.text(),
|
||||
edits,
|
||||
);
|
||||
|
||||
let mut delta = 0_isize;
|
||||
for edit in edits {
|
||||
let old_start = (edit.old_bytes.start as isize + delta) as usize;
|
||||
let new_text: String =
|
||||
buffer.text_for_range(edit.new_bytes.clone()).collect();
|
||||
old_buffer.edit(
|
||||
Some(old_start..old_start + edit.deleted_bytes()),
|
||||
new_text,
|
||||
cx,
|
||||
);
|
||||
delta += edit.delta();
|
||||
}
|
||||
assert_eq!(old_buffer.text(), buffer.text());
|
||||
if rng.gen_bool(0.3) {
|
||||
buffer_versions.push(buffer.clone());
|
||||
}
|
||||
}
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
for mut old_buffer in buffer_versions {
|
||||
let edits = buffer
|
||||
.edits_since(old_buffer.version.clone())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
log::info!(
|
||||
"mutating old buffer version {:?}, text: {:?}, edits since: {:?}",
|
||||
old_buffer.version(),
|
||||
old_buffer.text(),
|
||||
edits,
|
||||
);
|
||||
|
||||
let mut delta = 0_isize;
|
||||
for edit in edits {
|
||||
let old_start = (edit.old_bytes.start as isize + delta) as usize;
|
||||
let new_text: String = buffer.text_for_range(edit.new_bytes.clone()).collect();
|
||||
old_buffer.edit(
|
||||
Some(old_start..old_start + edit.deleted_bytes()),
|
||||
new_text,
|
||||
cx,
|
||||
);
|
||||
delta += edit.delta();
|
||||
}
|
||||
assert_eq!(old_buffer.text(), buffer.text());
|
||||
}
|
||||
|
||||
buffer
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@ -3727,103 +3716,93 @@ mod tests {
|
||||
assert_eq!(buffer3.read(cx).text(), "a12c34e56");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext) {
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_concurrent_edits(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
|
||||
use crate::test::Network;
|
||||
|
||||
let peers = env::var("PEERS")
|
||||
.map(|i| i.parse().expect("invalid `PEERS` variable"))
|
||||
.unwrap_or(5);
|
||||
let iterations = env::var("ITERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `ITERATIONS` variable"))
|
||||
.unwrap_or(100);
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
let start_seed =
|
||||
env::var("SEED").map_or(0, |seed| seed.parse().expect("invalid `SEED` variable"));
|
||||
|
||||
for seed in start_seed..start_seed + iterations {
|
||||
dbg!(seed);
|
||||
let mut rng = StdRng::seed_from_u64(seed);
|
||||
let base_text_len = rng.gen_range(0..10);
|
||||
let base_text = RandomCharIter::new(&mut rng)
|
||||
.take(base_text_len)
|
||||
.collect::<String>();
|
||||
let mut replica_ids = Vec::new();
|
||||
let mut buffers = Vec::new();
|
||||
let mut network = Network::new(rng.clone());
|
||||
|
||||
let base_text_len = rng.gen_range(0..10);
|
||||
let base_text = RandomCharIter::new(&mut rng)
|
||||
.take(base_text_len)
|
||||
.collect::<String>();
|
||||
let mut replica_ids = Vec::new();
|
||||
let mut buffers = Vec::new();
|
||||
let mut network = Network::new(StdRng::seed_from_u64(seed));
|
||||
for i in 0..peers {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buf = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
|
||||
buf.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
|
||||
buf
|
||||
});
|
||||
buffers.push(buffer);
|
||||
replica_ids.push(i as u16);
|
||||
network.add_peer(i as u16);
|
||||
}
|
||||
|
||||
for i in 0..peers {
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let mut buf = Buffer::new(i as ReplicaId, base_text.as_str(), cx);
|
||||
buf.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
|
||||
buf
|
||||
});
|
||||
buffers.push(buffer);
|
||||
replica_ids.push(i as u16);
|
||||
network.add_peer(i as u16);
|
||||
}
|
||||
log::info!("initial text: {:?}", base_text);
|
||||
|
||||
log::info!("initial text: {:?}", base_text);
|
||||
|
||||
let mut mutation_count = operations;
|
||||
loop {
|
||||
let replica_index = rng.gen_range(0..peers);
|
||||
let replica_id = replica_ids[replica_index];
|
||||
buffers[replica_index].update(cx, |buffer, cx| match rng.gen_range(0..=100) {
|
||||
0..=50 if mutation_count != 0 => {
|
||||
buffer.randomly_mutate(&mut rng, cx);
|
||||
network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations));
|
||||
log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
|
||||
mutation_count -= 1;
|
||||
}
|
||||
51..=70 if mutation_count != 0 => {
|
||||
buffer.randomly_undo_redo(&mut rng, cx);
|
||||
network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations));
|
||||
mutation_count -= 1;
|
||||
}
|
||||
71..=100 if network.has_unreceived(replica_id) => {
|
||||
let ops = network.receive(replica_id);
|
||||
if !ops.is_empty() {
|
||||
log::info!(
|
||||
"peer {} applying {} ops from the network.",
|
||||
replica_id,
|
||||
ops.len()
|
||||
);
|
||||
buffer.apply_ops(ops, cx).unwrap();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
|
||||
if mutation_count == 0 && network.is_idle() {
|
||||
break;
|
||||
let mut mutation_count = operations;
|
||||
loop {
|
||||
let replica_index = rng.gen_range(0..peers);
|
||||
let replica_id = replica_ids[replica_index];
|
||||
buffers[replica_index].update(cx, |buffer, cx| match rng.gen_range(0..=100) {
|
||||
0..=50 if mutation_count != 0 => {
|
||||
buffer.randomly_mutate(&mut rng, cx);
|
||||
network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations));
|
||||
log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
|
||||
mutation_count -= 1;
|
||||
}
|
||||
}
|
||||
51..=70 if mutation_count != 0 => {
|
||||
buffer.randomly_undo_redo(&mut rng, cx);
|
||||
network.broadcast(buffer.replica_id, mem::take(&mut buffer.operations));
|
||||
mutation_count -= 1;
|
||||
}
|
||||
71..=100 if network.has_unreceived(replica_id) => {
|
||||
let ops = network.receive(replica_id);
|
||||
if !ops.is_empty() {
|
||||
log::info!(
|
||||
"peer {} applying {} ops from the network.",
|
||||
replica_id,
|
||||
ops.len()
|
||||
);
|
||||
buffer.apply_ops(ops, cx).unwrap();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
|
||||
let first_buffer = buffers[0].read(cx);
|
||||
for buffer in &buffers[1..] {
|
||||
let buffer = buffer.read(cx);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
first_buffer.text(),
|
||||
"Replica {} text != Replica 0 text",
|
||||
buffer.replica_id
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.selection_sets().collect::<HashMap<_, _>>(),
|
||||
first_buffer.selection_sets().collect::<HashMap<_, _>>()
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.all_selection_ranges().collect::<HashMap<_, _>>(),
|
||||
first_buffer
|
||||
.all_selection_ranges()
|
||||
.collect::<HashMap<_, _>>()
|
||||
);
|
||||
if mutation_count == 0 && network.is_idle() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let first_buffer = buffers[0].read(cx);
|
||||
for buffer in &buffers[1..] {
|
||||
let buffer = buffer.read(cx);
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
first_buffer.text(),
|
||||
"Replica {} text != Replica 0 text",
|
||||
buffer.replica_id
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.selection_sets().collect::<HashMap<_, _>>(),
|
||||
first_buffer.selection_sets().collect::<HashMap<_, _>>()
|
||||
);
|
||||
assert_eq!(
|
||||
buffer.all_selection_ranges().collect::<HashMap<_, _>>(),
|
||||
first_buffer
|
||||
.all_selection_ranges()
|
||||
.collect::<HashMap<_, _>>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -541,74 +541,61 @@ mod tests {
|
||||
assert_eq!(rope.text(), text);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_random() {
|
||||
let iterations = env::var("ITERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `ITERATIONS` variable"))
|
||||
.unwrap_or(100);
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random(mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
let seed_range = if let Ok(seed) = env::var("SEED") {
|
||||
let seed = seed.parse().expect("invalid `SEED` variable");
|
||||
seed..seed + 1
|
||||
} else {
|
||||
0..iterations
|
||||
};
|
||||
|
||||
for seed in seed_range {
|
||||
dbg!(seed);
|
||||
let mut rng = StdRng::seed_from_u64(seed);
|
||||
let mut expected = String::new();
|
||||
let mut actual = Rope::new();
|
||||
for _ in 0..operations {
|
||||
let mut expected = String::new();
|
||||
let mut actual = Rope::new();
|
||||
for _ in 0..operations {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
let len = rng.gen_range(0..=64);
|
||||
let new_text: String = RandomCharIter::new(&mut rng).take(len).collect();
|
||||
|
||||
let mut new_actual = Rope::new();
|
||||
let mut cursor = actual.cursor(0);
|
||||
new_actual.append(cursor.slice(start_ix));
|
||||
new_actual.push(&new_text);
|
||||
cursor.seek_forward(end_ix);
|
||||
new_actual.append(cursor.suffix());
|
||||
actual = new_actual;
|
||||
|
||||
expected.replace_range(start_ix..end_ix, &new_text);
|
||||
|
||||
assert_eq!(actual.text(), expected);
|
||||
log::info!("text: {:?}", expected);
|
||||
|
||||
for _ in 0..5 {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
let len = rng.gen_range(0..=64);
|
||||
let new_text: String = RandomCharIter::new(&mut rng).take(len).collect();
|
||||
assert_eq!(
|
||||
actual.chunks_in_range(start_ix..end_ix).collect::<String>(),
|
||||
&expected[start_ix..end_ix]
|
||||
);
|
||||
}
|
||||
|
||||
let mut new_actual = Rope::new();
|
||||
let mut cursor = actual.cursor(0);
|
||||
new_actual.append(cursor.slice(start_ix));
|
||||
new_actual.push(&new_text);
|
||||
cursor.seek_forward(end_ix);
|
||||
new_actual.append(cursor.suffix());
|
||||
actual = new_actual;
|
||||
|
||||
expected.replace_range(start_ix..end_ix, &new_text);
|
||||
|
||||
assert_eq!(actual.text(), expected);
|
||||
log::info!("text: {:?}", expected);
|
||||
|
||||
for _ in 0..5 {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
assert_eq!(
|
||||
actual.chunks_in_range(start_ix..end_ix).collect::<String>(),
|
||||
&expected[start_ix..end_ix]
|
||||
);
|
||||
let mut point = Point::new(0, 0);
|
||||
for (ix, ch) in expected.char_indices().chain(Some((expected.len(), '\0'))) {
|
||||
assert_eq!(actual.to_point(ix), point, "to_point({})", ix);
|
||||
assert_eq!(actual.to_offset(point), ix, "to_offset({:?})", point);
|
||||
if ch == '\n' {
|
||||
point.row += 1;
|
||||
point.column = 0
|
||||
} else {
|
||||
point.column += ch.len_utf8() as u32;
|
||||
}
|
||||
}
|
||||
|
||||
let mut point = Point::new(0, 0);
|
||||
for (ix, ch) in expected.char_indices().chain(Some((expected.len(), '\0'))) {
|
||||
assert_eq!(actual.to_point(ix), point, "to_point({})", ix);
|
||||
assert_eq!(actual.to_offset(point), ix, "to_offset({:?})", point);
|
||||
if ch == '\n' {
|
||||
point.row += 1;
|
||||
point.column = 0
|
||||
} else {
|
||||
point.column += ch.len_utf8() as u32;
|
||||
}
|
||||
}
|
||||
|
||||
for _ in 0..5 {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
assert_eq!(
|
||||
actual.cursor(start_ix).summary(end_ix),
|
||||
TextSummary::from(&expected[start_ix..end_ix])
|
||||
);
|
||||
}
|
||||
for _ in 0..5 {
|
||||
let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
|
||||
let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
|
||||
assert_eq!(
|
||||
actual.cursor(start_ix).summary(end_ix),
|
||||
TextSummary::from(&expected[start_ix..end_ix])
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1294,212 +1294,197 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_random_folds(cx: &mut gpui::MutableAppContext) {
|
||||
let iterations = env::var("ITERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `ITERATIONS` variable"))
|
||||
.unwrap_or(100);
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_folds(cx: &mut gpui::MutableAppContext, mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
let seed_range = if let Ok(seed) = env::var("SEED") {
|
||||
let seed = seed.parse().expect("invalid `SEED` variable");
|
||||
seed..seed + 1
|
||||
} else {
|
||||
0..iterations
|
||||
};
|
||||
|
||||
for seed in seed_range {
|
||||
dbg!(seed);
|
||||
let mut rng = StdRng::seed_from_u64(seed);
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let len = rng.gen_range(0..10);
|
||||
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
|
||||
Buffer::new(0, text, cx)
|
||||
});
|
||||
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0;
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let len = rng.gen_range(0..10);
|
||||
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
|
||||
Buffer::new(0, text, cx)
|
||||
});
|
||||
let mut map = FoldMap::new(buffer.clone(), cx.as_ref()).0;
|
||||
let (mut initial_snapshot, _) = map.read(cx.as_ref());
|
||||
let mut snapshot_edits = Vec::new();
|
||||
|
||||
let (mut initial_snapshot, _) = map.read(cx.as_ref());
|
||||
let mut snapshot_edits = Vec::new();
|
||||
|
||||
for _ in 0..operations {
|
||||
log::info!("text: {:?}", buffer.read(cx).text());
|
||||
match rng.gen_range(0..=100) {
|
||||
0..=59 => {
|
||||
snapshot_edits.extend(map.randomly_mutate(&mut rng, cx.as_ref()));
|
||||
}
|
||||
_ => {
|
||||
let edits = buffer.update(cx, |buffer, cx| {
|
||||
let start_version = buffer.version.clone();
|
||||
let edit_count = rng.gen_range(1..=5);
|
||||
buffer.randomly_edit(&mut rng, edit_count, cx);
|
||||
buffer.edits_since(start_version).collect::<Vec<_>>()
|
||||
});
|
||||
log::info!("editing {:?}", edits);
|
||||
}
|
||||
for _ in 0..operations {
|
||||
log::info!("text: {:?}", buffer.read(cx).text());
|
||||
match rng.gen_range(0..=100) {
|
||||
0..=59 => {
|
||||
snapshot_edits.extend(map.randomly_mutate(&mut rng, cx.as_ref()));
|
||||
}
|
||||
|
||||
let buffer = map.buffer.read(cx).snapshot();
|
||||
let mut expected_text: String = buffer.text().into();
|
||||
let mut expected_buffer_rows = Vec::new();
|
||||
let mut next_row = buffer.max_point().row;
|
||||
for fold_range in map.merged_fold_ranges(cx.as_ref()).into_iter().rev() {
|
||||
let fold_start = buffer.point_for_offset(fold_range.start).unwrap();
|
||||
let fold_end = buffer.point_for_offset(fold_range.end).unwrap();
|
||||
expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
|
||||
next_row = fold_start.row;
|
||||
|
||||
expected_text.replace_range(fold_range.start..fold_range.end, "…");
|
||||
_ => {
|
||||
let edits = buffer.update(cx, |buffer, cx| {
|
||||
let start_version = buffer.version.clone();
|
||||
let edit_count = rng.gen_range(1..=5);
|
||||
buffer.randomly_edit(&mut rng, edit_count, cx);
|
||||
buffer.edits_since(start_version).collect::<Vec<_>>()
|
||||
});
|
||||
log::info!("editing {:?}", edits);
|
||||
}
|
||||
expected_buffer_rows.extend((0..=next_row).rev());
|
||||
expected_buffer_rows.reverse();
|
||||
}
|
||||
|
||||
let (snapshot, edits) = map.read(cx.as_ref());
|
||||
assert_eq!(snapshot.text(), expected_text);
|
||||
snapshot_edits.push((snapshot.clone(), edits));
|
||||
let buffer = map.buffer.read(cx).snapshot();
|
||||
let mut expected_text: String = buffer.text().into();
|
||||
let mut expected_buffer_rows = Vec::new();
|
||||
let mut next_row = buffer.max_point().row;
|
||||
for fold_range in map.merged_fold_ranges(cx.as_ref()).into_iter().rev() {
|
||||
let fold_start = buffer.point_for_offset(fold_range.start).unwrap();
|
||||
let fold_end = buffer.point_for_offset(fold_range.end).unwrap();
|
||||
expected_buffer_rows.extend((fold_end.row + 1..=next_row).rev());
|
||||
next_row = fold_start.row;
|
||||
|
||||
for (output_row, line) in expected_text.lines().enumerate() {
|
||||
let line_len = snapshot.line_len(output_row as u32);
|
||||
assert_eq!(line_len, line.len() as u32);
|
||||
expected_text.replace_range(fold_range.start..fold_range.end, "…");
|
||||
}
|
||||
expected_buffer_rows.extend((0..=next_row).rev());
|
||||
expected_buffer_rows.reverse();
|
||||
|
||||
let (snapshot, edits) = map.read(cx.as_ref());
|
||||
assert_eq!(snapshot.text(), expected_text);
|
||||
snapshot_edits.push((snapshot.clone(), edits));
|
||||
|
||||
for (output_row, line) in expected_text.lines().enumerate() {
|
||||
let line_len = snapshot.line_len(output_row as u32);
|
||||
assert_eq!(line_len, line.len() as u32);
|
||||
}
|
||||
|
||||
let longest_row = snapshot.longest_row();
|
||||
let longest_char_column = expected_text
|
||||
.split('\n')
|
||||
.nth(longest_row as usize)
|
||||
.unwrap()
|
||||
.chars()
|
||||
.count();
|
||||
let mut fold_point = FoldPoint::new(0, 0);
|
||||
let mut fold_offset = FoldOffset(0);
|
||||
let mut char_column = 0;
|
||||
for c in expected_text.chars() {
|
||||
let buffer_point = fold_point.to_buffer_point(&snapshot);
|
||||
let buffer_offset = buffer_point.to_offset(&buffer);
|
||||
assert_eq!(
|
||||
buffer_point.to_fold_point(&snapshot),
|
||||
fold_point,
|
||||
"buffer_Point.to_fold_point({:?})",
|
||||
buffer_point,
|
||||
);
|
||||
assert_eq!(
|
||||
fold_point.to_buffer_offset(&snapshot),
|
||||
buffer_offset,
|
||||
"fold_point.to_buffer_offset({:?})",
|
||||
fold_point,
|
||||
);
|
||||
assert_eq!(
|
||||
fold_point.to_offset(&snapshot),
|
||||
fold_offset,
|
||||
"fold_point.to_offset({:?})",
|
||||
fold_point,
|
||||
);
|
||||
|
||||
if c == '\n' {
|
||||
*fold_point.row_mut() += 1;
|
||||
*fold_point.column_mut() = 0;
|
||||
char_column = 0;
|
||||
} else {
|
||||
*fold_point.column_mut() += c.len_utf8() as u32;
|
||||
char_column += 1;
|
||||
}
|
||||
|
||||
let longest_row = snapshot.longest_row();
|
||||
let longest_char_column = expected_text
|
||||
.split('\n')
|
||||
.nth(longest_row as usize)
|
||||
.unwrap()
|
||||
.chars()
|
||||
.count();
|
||||
let mut fold_point = FoldPoint::new(0, 0);
|
||||
let mut fold_offset = FoldOffset(0);
|
||||
let mut char_column = 0;
|
||||
for c in expected_text.chars() {
|
||||
let buffer_point = fold_point.to_buffer_point(&snapshot);
|
||||
let buffer_offset = buffer_point.to_offset(&buffer);
|
||||
assert_eq!(
|
||||
buffer_point.to_fold_point(&snapshot),
|
||||
fold_point,
|
||||
"buffer_Point.to_fold_point({:?})",
|
||||
buffer_point,
|
||||
);
|
||||
assert_eq!(
|
||||
fold_point.to_buffer_offset(&snapshot),
|
||||
buffer_offset,
|
||||
"fold_point.to_buffer_offset({:?})",
|
||||
fold_point,
|
||||
);
|
||||
assert_eq!(
|
||||
fold_point.to_offset(&snapshot),
|
||||
fold_offset,
|
||||
"fold_point.to_offset({:?})",
|
||||
fold_point,
|
||||
);
|
||||
|
||||
if c == '\n' {
|
||||
*fold_point.row_mut() += 1;
|
||||
*fold_point.column_mut() = 0;
|
||||
char_column = 0;
|
||||
} else {
|
||||
*fold_point.column_mut() += c.len_utf8() as u32;
|
||||
char_column += 1;
|
||||
}
|
||||
fold_offset.0 += c.len_utf8();
|
||||
if char_column > longest_char_column {
|
||||
panic!(
|
||||
"invalid longest row {:?} (chars {}), found row {:?} (chars: {})",
|
||||
longest_row,
|
||||
longest_char_column,
|
||||
fold_point.row(),
|
||||
char_column
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for _ in 0..5 {
|
||||
let offset = snapshot
|
||||
.clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Right);
|
||||
assert_eq!(
|
||||
snapshot.chunks_at(offset).collect::<String>(),
|
||||
&expected_text[offset.0..],
|
||||
fold_offset.0 += c.len_utf8();
|
||||
if char_column > longest_char_column {
|
||||
panic!(
|
||||
"invalid longest row {:?} (chars {}), found row {:?} (chars: {})",
|
||||
longest_row,
|
||||
longest_char_column,
|
||||
fold_point.row(),
|
||||
char_column
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (idx, buffer_row) in expected_buffer_rows.iter().enumerate() {
|
||||
let fold_row = Point::new(*buffer_row, 0).to_fold_point(&snapshot).row();
|
||||
assert_eq!(
|
||||
snapshot.buffer_rows(fold_row).collect::<Vec<_>>(),
|
||||
expected_buffer_rows[idx..],
|
||||
);
|
||||
for _ in 0..5 {
|
||||
let offset = snapshot
|
||||
.clip_offset(FoldOffset(rng.gen_range(0..=snapshot.len().0)), Bias::Right);
|
||||
assert_eq!(
|
||||
snapshot.chunks_at(offset).collect::<String>(),
|
||||
&expected_text[offset.0..],
|
||||
);
|
||||
}
|
||||
|
||||
for (idx, buffer_row) in expected_buffer_rows.iter().enumerate() {
|
||||
let fold_row = Point::new(*buffer_row, 0).to_fold_point(&snapshot).row();
|
||||
assert_eq!(
|
||||
snapshot.buffer_rows(fold_row).collect::<Vec<_>>(),
|
||||
expected_buffer_rows[idx..],
|
||||
);
|
||||
}
|
||||
|
||||
for fold_range in map.merged_fold_ranges(cx.as_ref()) {
|
||||
let fold_point = fold_range.start.to_point(&buffer).to_fold_point(&snapshot);
|
||||
assert!(snapshot.is_line_folded(fold_point.row()));
|
||||
}
|
||||
|
||||
for _ in 0..5 {
|
||||
let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
|
||||
let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
|
||||
let expected_folds = map
|
||||
.folds
|
||||
.items(&buffer)
|
||||
.into_iter()
|
||||
.filter(|fold| {
|
||||
let start = buffer.anchor_before(start);
|
||||
let end = buffer.anchor_after(end);
|
||||
start.cmp(&fold.0.end, &buffer).unwrap() == Ordering::Less
|
||||
&& end.cmp(&fold.0.start, &buffer).unwrap() == Ordering::Greater
|
||||
})
|
||||
.map(|fold| fold.0)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.folds_in_range(start..end)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>(),
|
||||
expected_folds
|
||||
);
|
||||
}
|
||||
|
||||
let text = snapshot.text();
|
||||
for _ in 0..5 {
|
||||
let start_row = rng.gen_range(0..=snapshot.max_point().row());
|
||||
let start_column = rng.gen_range(0..=snapshot.line_len(start_row));
|
||||
let end_row = rng.gen_range(0..=snapshot.max_point().row());
|
||||
let end_column = rng.gen_range(0..=snapshot.line_len(end_row));
|
||||
let mut start =
|
||||
snapshot.clip_point(FoldPoint::new(start_row, start_column), Bias::Left);
|
||||
let mut end = snapshot.clip_point(FoldPoint::new(end_row, end_column), Bias::Right);
|
||||
if start > end {
|
||||
mem::swap(&mut start, &mut end);
|
||||
}
|
||||
|
||||
for fold_range in map.merged_fold_ranges(cx.as_ref()) {
|
||||
let fold_point = fold_range.start.to_point(&buffer).to_fold_point(&snapshot);
|
||||
assert!(snapshot.is_line_folded(fold_point.row()));
|
||||
let lines = start..end;
|
||||
let bytes = start.to_offset(&snapshot)..end.to_offset(&snapshot);
|
||||
assert_eq!(
|
||||
snapshot.text_summary_for_range(lines),
|
||||
TextSummary::from(&text[bytes.start.0..bytes.end.0])
|
||||
)
|
||||
}
|
||||
|
||||
let mut text = initial_snapshot.text();
|
||||
for (snapshot, edits) in snapshot_edits.drain(..) {
|
||||
let new_text = snapshot.text();
|
||||
let mut delta = 0isize;
|
||||
for edit in edits {
|
||||
let old_bytes = ((edit.old_bytes.start.0 as isize) + delta) as usize
|
||||
..((edit.old_bytes.end.0 as isize) + delta) as usize;
|
||||
let new_bytes = edit.new_bytes.start.0..edit.new_bytes.end.0;
|
||||
delta += edit.delta();
|
||||
text.replace_range(old_bytes, &new_text[new_bytes]);
|
||||
}
|
||||
|
||||
for _ in 0..5 {
|
||||
let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
|
||||
let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
|
||||
let expected_folds = map
|
||||
.folds
|
||||
.items(&buffer)
|
||||
.into_iter()
|
||||
.filter(|fold| {
|
||||
let start = buffer.anchor_before(start);
|
||||
let end = buffer.anchor_after(end);
|
||||
start.cmp(&fold.0.end, &buffer).unwrap() == Ordering::Less
|
||||
&& end.cmp(&fold.0.start, &buffer).unwrap() == Ordering::Greater
|
||||
})
|
||||
.map(|fold| fold.0)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.folds_in_range(start..end)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>(),
|
||||
expected_folds
|
||||
);
|
||||
}
|
||||
|
||||
let text = snapshot.text();
|
||||
for _ in 0..5 {
|
||||
let start_row = rng.gen_range(0..=snapshot.max_point().row());
|
||||
let start_column = rng.gen_range(0..=snapshot.line_len(start_row));
|
||||
let end_row = rng.gen_range(0..=snapshot.max_point().row());
|
||||
let end_column = rng.gen_range(0..=snapshot.line_len(end_row));
|
||||
let mut start =
|
||||
snapshot.clip_point(FoldPoint::new(start_row, start_column), Bias::Left);
|
||||
let mut end =
|
||||
snapshot.clip_point(FoldPoint::new(end_row, end_column), Bias::Right);
|
||||
if start > end {
|
||||
mem::swap(&mut start, &mut end);
|
||||
}
|
||||
|
||||
let lines = start..end;
|
||||
let bytes = start.to_offset(&snapshot)..end.to_offset(&snapshot);
|
||||
assert_eq!(
|
||||
snapshot.text_summary_for_range(lines),
|
||||
TextSummary::from(&text[bytes.start.0..bytes.end.0])
|
||||
)
|
||||
}
|
||||
|
||||
let mut text = initial_snapshot.text();
|
||||
for (snapshot, edits) in snapshot_edits.drain(..) {
|
||||
let new_text = snapshot.text();
|
||||
let mut delta = 0isize;
|
||||
for edit in edits {
|
||||
let old_bytes = ((edit.old_bytes.start.0 as isize) + delta) as usize
|
||||
..((edit.old_bytes.end.0 as isize) + delta) as usize;
|
||||
let new_bytes = edit.new_bytes.start.0..edit.new_bytes.end.0;
|
||||
delta += edit.delta();
|
||||
text.replace_range(old_bytes, &new_text[new_bytes]);
|
||||
}
|
||||
|
||||
assert_eq!(text, new_text);
|
||||
initial_snapshot = snapshot;
|
||||
}
|
||||
assert_eq!(text, new_text);
|
||||
initial_snapshot = snapshot;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -882,150 +882,133 @@ mod tests {
|
||||
use smol::channel;
|
||||
use std::env;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_random_wraps(mut cx: gpui::TestAppContext) {
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_random_wraps(mut cx: gpui::TestAppContext, mut rng: StdRng) {
|
||||
cx.foreground().set_block_on_ticks(0..=50);
|
||||
|
||||
let iterations = env::var("ITERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `ITERATIONS` variable"))
|
||||
.unwrap_or(100);
|
||||
cx.foreground().forbid_parking();
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
let seed_range = if let Ok(seed) = env::var("SEED") {
|
||||
let seed = seed.parse().expect("invalid `SEED` variable");
|
||||
seed..seed + 1
|
||||
|
||||
let font_cache = cx.font_cache().clone();
|
||||
let font_system = cx.platform().fonts();
|
||||
let mut wrap_width = if rng.gen_bool(0.1) {
|
||||
None
|
||||
} else {
|
||||
0..iterations
|
||||
Some(rng.gen_range(0.0..=1000.0))
|
||||
};
|
||||
let settings = Settings {
|
||||
tab_size: rng.gen_range(1..=4),
|
||||
buffer_font_family: font_cache.load_family(&["Helvetica"]).unwrap(),
|
||||
buffer_font_size: 14.0,
|
||||
..Settings::new(&font_cache).unwrap()
|
||||
};
|
||||
log::info!("Tab size: {}", settings.tab_size);
|
||||
log::info!("Wrap width: {:?}", wrap_width);
|
||||
|
||||
for seed in seed_range {
|
||||
cx.foreground().forbid_parking();
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let len = rng.gen_range(0..10);
|
||||
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
|
||||
Buffer::new(0, text, cx)
|
||||
});
|
||||
let (mut fold_map, folds_snapshot) = cx.read(|cx| FoldMap::new(buffer.clone(), cx));
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), settings.tab_size);
|
||||
log::info!(
|
||||
"Unwrapped text (no folds): {:?}",
|
||||
buffer.read_with(&cx, |buf, _| buf.text())
|
||||
);
|
||||
log::info!(
|
||||
"Unwrapped text (unexpanded tabs): {:?}",
|
||||
folds_snapshot.text()
|
||||
);
|
||||
log::info!("Unwrapped text (expanded tabs): {:?}", tabs_snapshot.text());
|
||||
let wrap_map = cx
|
||||
.add_model(|cx| WrapMap::new(tabs_snapshot.clone(), settings.clone(), wrap_width, cx));
|
||||
let (_observer, notifications) = Observer::new(&wrap_map, &mut cx);
|
||||
|
||||
dbg!(seed);
|
||||
let mut rng = StdRng::seed_from_u64(seed);
|
||||
let font_cache = cx.font_cache().clone();
|
||||
let font_system = cx.platform().fonts();
|
||||
let mut wrap_width = if rng.gen_bool(0.1) {
|
||||
None
|
||||
} else {
|
||||
Some(rng.gen_range(0.0..=1000.0))
|
||||
};
|
||||
let settings = Settings {
|
||||
tab_size: rng.gen_range(1..=4),
|
||||
buffer_font_family: font_cache.load_family(&["Helvetica"]).unwrap(),
|
||||
buffer_font_size: 14.0,
|
||||
..Settings::new(&font_cache).unwrap()
|
||||
};
|
||||
log::info!("Tab size: {}", settings.tab_size);
|
||||
log::info!("Wrap width: {:?}", wrap_width);
|
||||
let mut line_wrapper = LineWrapper::new(font_system, &font_cache, settings);
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
|
||||
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.recv().await.unwrap();
|
||||
}
|
||||
|
||||
let snapshot = wrap_map.update(&mut cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
|
||||
let actual_text = snapshot.text();
|
||||
assert_eq!(
|
||||
actual_text, expected_text,
|
||||
"unwrapped text is: {:?}",
|
||||
unwrapped_text
|
||||
);
|
||||
log::info!("Wrapped text: {:?}", actual_text);
|
||||
|
||||
for _i in 0..operations {
|
||||
match rng.gen_range(0..=100) {
|
||||
0..=19 => {
|
||||
wrap_width = if rng.gen_bool(0.2) {
|
||||
None
|
||||
} else {
|
||||
Some(rng.gen_range(0.0..=1000.0))
|
||||
};
|
||||
log::info!("Setting wrap width to {:?}", wrap_width);
|
||||
wrap_map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx));
|
||||
}
|
||||
20..=39 => {
|
||||
for (folds_snapshot, edits) in
|
||||
cx.read(|cx| fold_map.randomly_mutate(&mut rng, cx))
|
||||
{
|
||||
let (tabs_snapshot, edits) = tab_map.sync(folds_snapshot, edits);
|
||||
let mut snapshot =
|
||||
wrap_map.update(&mut cx, |map, cx| map.sync(tabs_snapshot, edits, cx));
|
||||
snapshot.check_invariants();
|
||||
snapshot.verify_chunks(&mut rng);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx));
|
||||
}
|
||||
}
|
||||
|
||||
let buffer = cx.add_model(|cx| {
|
||||
let len = rng.gen_range(0..10);
|
||||
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
|
||||
Buffer::new(0, text, cx)
|
||||
});
|
||||
let (mut fold_map, folds_snapshot) = cx.read(|cx| FoldMap::new(buffer.clone(), cx));
|
||||
let (tab_map, tabs_snapshot) = TabMap::new(folds_snapshot.clone(), settings.tab_size);
|
||||
log::info!(
|
||||
"Unwrapped text (no folds): {:?}",
|
||||
buffer.read_with(&cx, |buf, _| buf.text())
|
||||
);
|
||||
let (folds_snapshot, edits) = cx.read(|cx| fold_map.read(cx));
|
||||
log::info!(
|
||||
"Unwrapped text (unexpanded tabs): {:?}",
|
||||
folds_snapshot.text()
|
||||
);
|
||||
let (tabs_snapshot, edits) = tab_map.sync(folds_snapshot, edits);
|
||||
log::info!("Unwrapped text (expanded tabs): {:?}", tabs_snapshot.text());
|
||||
let wrap_map = cx.add_model(|cx| {
|
||||
WrapMap::new(tabs_snapshot.clone(), settings.clone(), wrap_width, cx)
|
||||
});
|
||||
let (_observer, notifications) = Observer::new(&wrap_map, &mut cx);
|
||||
|
||||
let mut line_wrapper = LineWrapper::new(font_system, &font_cache, settings);
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
let mut snapshot = wrap_map.update(&mut cx, |map, cx| {
|
||||
map.sync(tabs_snapshot.clone(), edits, cx)
|
||||
});
|
||||
snapshot.check_invariants();
|
||||
snapshot.verify_chunks(&mut rng);
|
||||
|
||||
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.recv().await.unwrap();
|
||||
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
|
||||
log::info!("Waiting for wrapping to finish");
|
||||
while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.recv().await.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
let snapshot =
|
||||
wrap_map.update(&mut cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
|
||||
let actual_text = snapshot.text();
|
||||
assert_eq!(
|
||||
actual_text, expected_text,
|
||||
"unwrapped text is: {:?}",
|
||||
unwrapped_text
|
||||
);
|
||||
log::info!("Wrapped text: {:?}", actual_text);
|
||||
|
||||
for _i in 0..operations {
|
||||
match rng.gen_range(0..=100) {
|
||||
0..=19 => {
|
||||
wrap_width = if rng.gen_bool(0.2) {
|
||||
None
|
||||
} else {
|
||||
Some(rng.gen_range(0.0..=1000.0))
|
||||
};
|
||||
log::info!("Setting wrap width to {:?}", wrap_width);
|
||||
wrap_map.update(&mut cx, |map, cx| map.set_wrap_width(wrap_width, cx));
|
||||
}
|
||||
20..=39 => {
|
||||
for (folds_snapshot, edits) in
|
||||
cx.read(|cx| fold_map.randomly_mutate(&mut rng, cx))
|
||||
{
|
||||
let (tabs_snapshot, edits) = tab_map.sync(folds_snapshot, edits);
|
||||
let mut snapshot = wrap_map
|
||||
.update(&mut cx, |map, cx| map.sync(tabs_snapshot, edits, cx));
|
||||
snapshot.check_invariants();
|
||||
snapshot.verify_chunks(&mut rng);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
buffer.update(&mut cx, |buffer, cx| buffer.randomly_mutate(&mut rng, cx));
|
||||
}
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Unwrapped text (no folds): {:?}",
|
||||
buffer.read_with(&cx, |buf, _| buf.text())
|
||||
if !wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
let mut wrapped_snapshot =
|
||||
wrap_map.update(&mut cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
|
||||
let actual_text = wrapped_snapshot.text();
|
||||
log::info!("Wrapping finished: {:?}", actual_text);
|
||||
wrapped_snapshot.check_invariants();
|
||||
wrapped_snapshot.verify_chunks(&mut rng);
|
||||
assert_eq!(
|
||||
actual_text, expected_text,
|
||||
"unwrapped text is: {:?}",
|
||||
unwrapped_text
|
||||
);
|
||||
let (folds_snapshot, edits) = cx.read(|cx| fold_map.read(cx));
|
||||
log::info!(
|
||||
"Unwrapped text (unexpanded tabs): {:?}",
|
||||
folds_snapshot.text()
|
||||
);
|
||||
let (tabs_snapshot, edits) = tab_map.sync(folds_snapshot, edits);
|
||||
log::info!("Unwrapped text (expanded tabs): {:?}", tabs_snapshot.text());
|
||||
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
let mut snapshot = wrap_map.update(&mut cx, |map, cx| {
|
||||
map.sync(tabs_snapshot.clone(), edits, cx)
|
||||
});
|
||||
snapshot.check_invariants();
|
||||
snapshot.verify_chunks(&mut rng);
|
||||
|
||||
if wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
|
||||
log::info!("Waiting for wrapping to finish");
|
||||
while wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.recv().await.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
if !wrap_map.read_with(&cx, |map, _| map.is_rewrapping()) {
|
||||
let mut wrapped_snapshot =
|
||||
wrap_map.update(&mut cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
|
||||
let actual_text = wrapped_snapshot.text();
|
||||
log::info!("Wrapping finished: {:?}", actual_text);
|
||||
wrapped_snapshot.check_invariants();
|
||||
wrapped_snapshot.verify_chunks(&mut rng);
|
||||
assert_eq!(
|
||||
actual_text, expected_text,
|
||||
"unwrapped text is: {:?}",
|
||||
unwrapped_text
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -584,6 +584,7 @@ where
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::{distributions, prelude::*};
|
||||
use std::cmp;
|
||||
use std::ops::Add;
|
||||
|
||||
@ -602,108 +603,101 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_random() {
|
||||
for seed in 0..100 {
|
||||
use rand::{distributions, prelude::*};
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random(mut rng: StdRng) {
|
||||
let rng = &mut rng;
|
||||
let mut tree = SumTree::<u8>::new();
|
||||
let count = rng.gen_range(0..10);
|
||||
tree.extend(rng.sample_iter(distributions::Standard).take(count), &());
|
||||
|
||||
dbg!(seed);
|
||||
let rng = &mut StdRng::seed_from_u64(seed);
|
||||
for _ in 0..5 {
|
||||
let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
|
||||
let splice_start = rng.gen_range(0..splice_end + 1);
|
||||
let count = rng.gen_range(0..3);
|
||||
let tree_end = tree.extent::<Count>(&());
|
||||
let new_items = rng
|
||||
.sample_iter(distributions::Standard)
|
||||
.take(count)
|
||||
.collect::<Vec<u8>>();
|
||||
|
||||
let mut tree = SumTree::<u8>::new();
|
||||
let count = rng.gen_range(0..10);
|
||||
tree.extend(rng.sample_iter(distributions::Standard).take(count), &());
|
||||
|
||||
for _ in 0..5 {
|
||||
let splice_end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
|
||||
let splice_start = rng.gen_range(0..splice_end + 1);
|
||||
let count = rng.gen_range(0..3);
|
||||
let tree_end = tree.extent::<Count>(&());
|
||||
let new_items = rng
|
||||
.sample_iter(distributions::Standard)
|
||||
.take(count)
|
||||
.collect::<Vec<u8>>();
|
||||
|
||||
let mut reference_items = tree.items(&());
|
||||
reference_items.splice(splice_start..splice_end, new_items.clone());
|
||||
|
||||
tree = {
|
||||
let mut cursor = tree.cursor::<Count, ()>();
|
||||
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
|
||||
new_tree.extend(new_items, &());
|
||||
cursor.seek(&Count(splice_end), Bias::Right, &());
|
||||
new_tree.push_tree(cursor.slice(&tree_end, Bias::Right, &()), &());
|
||||
new_tree
|
||||
};
|
||||
|
||||
assert_eq!(tree.items(&()), reference_items);
|
||||
|
||||
let mut filter_cursor =
|
||||
tree.filter::<_, Count>(|summary| summary.contains_even, &());
|
||||
let mut reference_filter = tree
|
||||
.items(&())
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter(|(_, item)| (item & 1) == 0);
|
||||
while let Some(actual_item) = filter_cursor.item() {
|
||||
let (reference_index, reference_item) = reference_filter.next().unwrap();
|
||||
assert_eq!(actual_item, &reference_item);
|
||||
assert_eq!(filter_cursor.start().0, reference_index);
|
||||
filter_cursor.next(&());
|
||||
}
|
||||
assert!(reference_filter.next().is_none());
|
||||
|
||||
let mut pos = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
|
||||
let mut before_start = false;
|
||||
let mut cursor = tree.cursor::<Count, Count>();
|
||||
cursor.seek(&Count(pos), Bias::Right, &());
|
||||
|
||||
for i in 0..10 {
|
||||
assert_eq!(cursor.sum_start().0, pos);
|
||||
|
||||
if pos > 0 {
|
||||
assert_eq!(cursor.prev_item().unwrap(), &reference_items[pos - 1]);
|
||||
} else {
|
||||
assert_eq!(cursor.prev_item(), None);
|
||||
}
|
||||
|
||||
if pos < reference_items.len() && !before_start {
|
||||
assert_eq!(cursor.item().unwrap(), &reference_items[pos]);
|
||||
} else {
|
||||
assert_eq!(cursor.item(), None);
|
||||
}
|
||||
|
||||
if i < 5 {
|
||||
cursor.next(&());
|
||||
if pos < reference_items.len() {
|
||||
pos += 1;
|
||||
before_start = false;
|
||||
}
|
||||
} else {
|
||||
cursor.prev(&());
|
||||
if pos == 0 {
|
||||
before_start = true;
|
||||
}
|
||||
pos = pos.saturating_sub(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _ in 0..10 {
|
||||
let end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
|
||||
let start = rng.gen_range(0..end + 1);
|
||||
let start_bias = if rng.gen() { Bias::Left } else { Bias::Right };
|
||||
let end_bias = if rng.gen() { Bias::Left } else { Bias::Right };
|
||||
let mut reference_items = tree.items(&());
|
||||
reference_items.splice(splice_start..splice_end, new_items.clone());
|
||||
|
||||
tree = {
|
||||
let mut cursor = tree.cursor::<Count, ()>();
|
||||
cursor.seek(&Count(start), start_bias, &());
|
||||
let slice = cursor.slice(&Count(end), end_bias, &());
|
||||
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
|
||||
new_tree.extend(new_items, &());
|
||||
cursor.seek(&Count(splice_end), Bias::Right, &());
|
||||
new_tree.push_tree(cursor.slice(&tree_end, Bias::Right, &()), &());
|
||||
new_tree
|
||||
};
|
||||
|
||||
cursor.seek(&Count(start), start_bias, &());
|
||||
let summary = cursor.summary::<Sum>(&Count(end), end_bias, &());
|
||||
assert_eq!(tree.items(&()), reference_items);
|
||||
|
||||
assert_eq!(summary, slice.summary().sum);
|
||||
let mut filter_cursor = tree.filter::<_, Count>(|summary| summary.contains_even, &());
|
||||
let mut reference_filter = tree
|
||||
.items(&())
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter(|(_, item)| (item & 1) == 0);
|
||||
while let Some(actual_item) = filter_cursor.item() {
|
||||
let (reference_index, reference_item) = reference_filter.next().unwrap();
|
||||
assert_eq!(actual_item, &reference_item);
|
||||
assert_eq!(filter_cursor.start().0, reference_index);
|
||||
filter_cursor.next(&());
|
||||
}
|
||||
assert!(reference_filter.next().is_none());
|
||||
|
||||
let mut pos = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
|
||||
let mut before_start = false;
|
||||
let mut cursor = tree.cursor::<Count, Count>();
|
||||
cursor.seek(&Count(pos), Bias::Right, &());
|
||||
|
||||
for i in 0..10 {
|
||||
assert_eq!(cursor.sum_start().0, pos);
|
||||
|
||||
if pos > 0 {
|
||||
assert_eq!(cursor.prev_item().unwrap(), &reference_items[pos - 1]);
|
||||
} else {
|
||||
assert_eq!(cursor.prev_item(), None);
|
||||
}
|
||||
|
||||
if pos < reference_items.len() && !before_start {
|
||||
assert_eq!(cursor.item().unwrap(), &reference_items[pos]);
|
||||
} else {
|
||||
assert_eq!(cursor.item(), None);
|
||||
}
|
||||
|
||||
if i < 5 {
|
||||
cursor.next(&());
|
||||
if pos < reference_items.len() {
|
||||
pos += 1;
|
||||
before_start = false;
|
||||
}
|
||||
} else {
|
||||
cursor.prev(&());
|
||||
if pos == 0 {
|
||||
before_start = true;
|
||||
}
|
||||
pos = pos.saturating_sub(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _ in 0..10 {
|
||||
let end = rng.gen_range(0..tree.extent::<Count>(&()).0 + 1);
|
||||
let start = rng.gen_range(0..end + 1);
|
||||
let start_bias = if rng.gen() { Bias::Left } else { Bias::Right };
|
||||
let end_bias = if rng.gen() { Bias::Left } else { Bias::Right };
|
||||
|
||||
let mut cursor = tree.cursor::<Count, ()>();
|
||||
cursor.seek(&Count(start), start_bias, &());
|
||||
let slice = cursor.slice(&Count(end), end_bias, &());
|
||||
|
||||
cursor.seek(&Count(start), start_bias, &());
|
||||
let summary = cursor.summary::<Sum>(&Count(end), end_bias, &());
|
||||
|
||||
assert_eq!(summary, slice.summary().sum);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2955,93 +2955,80 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_random() {
|
||||
let iterations = env::var("ITERATIONS")
|
||||
.map(|i| i.parse().unwrap())
|
||||
.unwrap_or(100);
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random(mut rng: StdRng) {
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|o| o.parse().unwrap())
|
||||
.unwrap_or(40);
|
||||
let initial_entries = env::var("INITIAL_ENTRIES")
|
||||
.map(|o| o.parse().unwrap())
|
||||
.unwrap_or(20);
|
||||
let seeds = if let Ok(seed) = env::var("SEED").map(|s| s.parse().unwrap()) {
|
||||
seed..seed + 1
|
||||
} else {
|
||||
0..iterations
|
||||
};
|
||||
|
||||
for seed in seeds {
|
||||
dbg!(seed);
|
||||
let mut rng = StdRng::seed_from_u64(seed);
|
||||
|
||||
let root_dir = tempdir::TempDir::new(&format!("test-{}", seed)).unwrap();
|
||||
for _ in 0..initial_entries {
|
||||
randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
|
||||
}
|
||||
log::info!("Generated initial tree");
|
||||
|
||||
let (notify_tx, _notify_rx) = smol::channel::unbounded();
|
||||
let fs = Arc::new(RealFs);
|
||||
let next_entry_id = Arc::new(AtomicUsize::new(0));
|
||||
let mut initial_snapshot = Snapshot {
|
||||
id: 0,
|
||||
scan_id: 0,
|
||||
abs_path: root_dir.path().into(),
|
||||
entries_by_path: Default::default(),
|
||||
entries_by_id: Default::default(),
|
||||
removed_entry_ids: Default::default(),
|
||||
ignores: Default::default(),
|
||||
root_name: Default::default(),
|
||||
root_char_bag: Default::default(),
|
||||
next_entry_id: next_entry_id.clone(),
|
||||
};
|
||||
initial_snapshot.insert_entry(Entry::new(
|
||||
Path::new("").into(),
|
||||
&smol::block_on(fs.metadata(root_dir.path()))
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
&next_entry_id,
|
||||
Default::default(),
|
||||
));
|
||||
let mut scanner = BackgroundScanner::new(
|
||||
Arc::new(Mutex::new(initial_snapshot.clone())),
|
||||
notify_tx,
|
||||
fs.clone(),
|
||||
Arc::new(gpui::executor::Background::new()),
|
||||
);
|
||||
smol::block_on(scanner.scan_dirs()).unwrap();
|
||||
scanner.snapshot().check_invariants();
|
||||
|
||||
let mut events = Vec::new();
|
||||
let mut mutations_len = operations;
|
||||
while mutations_len > 1 {
|
||||
if !events.is_empty() && rng.gen_bool(0.4) {
|
||||
let len = rng.gen_range(0..=events.len());
|
||||
let to_deliver = events.drain(0..len).collect::<Vec<_>>();
|
||||
log::info!("Delivering events: {:#?}", to_deliver);
|
||||
smol::block_on(scanner.process_events(to_deliver));
|
||||
scanner.snapshot().check_invariants();
|
||||
} else {
|
||||
events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
|
||||
mutations_len -= 1;
|
||||
}
|
||||
}
|
||||
log::info!("Quiescing: {:#?}", events);
|
||||
smol::block_on(scanner.process_events(events));
|
||||
scanner.snapshot().check_invariants();
|
||||
|
||||
let (notify_tx, _notify_rx) = smol::channel::unbounded();
|
||||
let mut new_scanner = BackgroundScanner::new(
|
||||
Arc::new(Mutex::new(initial_snapshot)),
|
||||
notify_tx,
|
||||
scanner.fs.clone(),
|
||||
scanner.executor.clone(),
|
||||
);
|
||||
smol::block_on(new_scanner.scan_dirs()).unwrap();
|
||||
assert_eq!(scanner.snapshot().to_vec(), new_scanner.snapshot().to_vec());
|
||||
let root_dir = tempdir::TempDir::new("worktree-test").unwrap();
|
||||
for _ in 0..initial_entries {
|
||||
randomly_mutate_tree(root_dir.path(), 1.0, &mut rng).unwrap();
|
||||
}
|
||||
log::info!("Generated initial tree");
|
||||
|
||||
let (notify_tx, _notify_rx) = smol::channel::unbounded();
|
||||
let fs = Arc::new(RealFs);
|
||||
let next_entry_id = Arc::new(AtomicUsize::new(0));
|
||||
let mut initial_snapshot = Snapshot {
|
||||
id: 0,
|
||||
scan_id: 0,
|
||||
abs_path: root_dir.path().into(),
|
||||
entries_by_path: Default::default(),
|
||||
entries_by_id: Default::default(),
|
||||
removed_entry_ids: Default::default(),
|
||||
ignores: Default::default(),
|
||||
root_name: Default::default(),
|
||||
root_char_bag: Default::default(),
|
||||
next_entry_id: next_entry_id.clone(),
|
||||
};
|
||||
initial_snapshot.insert_entry(Entry::new(
|
||||
Path::new("").into(),
|
||||
&smol::block_on(fs.metadata(root_dir.path()))
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
&next_entry_id,
|
||||
Default::default(),
|
||||
));
|
||||
let mut scanner = BackgroundScanner::new(
|
||||
Arc::new(Mutex::new(initial_snapshot.clone())),
|
||||
notify_tx,
|
||||
fs.clone(),
|
||||
Arc::new(gpui::executor::Background::new()),
|
||||
);
|
||||
smol::block_on(scanner.scan_dirs()).unwrap();
|
||||
scanner.snapshot().check_invariants();
|
||||
|
||||
let mut events = Vec::new();
|
||||
let mut mutations_len = operations;
|
||||
while mutations_len > 1 {
|
||||
if !events.is_empty() && rng.gen_bool(0.4) {
|
||||
let len = rng.gen_range(0..=events.len());
|
||||
let to_deliver = events.drain(0..len).collect::<Vec<_>>();
|
||||
log::info!("Delivering events: {:#?}", to_deliver);
|
||||
smol::block_on(scanner.process_events(to_deliver));
|
||||
scanner.snapshot().check_invariants();
|
||||
} else {
|
||||
events.extend(randomly_mutate_tree(root_dir.path(), 0.6, &mut rng).unwrap());
|
||||
mutations_len -= 1;
|
||||
}
|
||||
}
|
||||
log::info!("Quiescing: {:#?}", events);
|
||||
smol::block_on(scanner.process_events(events));
|
||||
scanner.snapshot().check_invariants();
|
||||
|
||||
let (notify_tx, _notify_rx) = smol::channel::unbounded();
|
||||
let mut new_scanner = BackgroundScanner::new(
|
||||
Arc::new(Mutex::new(initial_snapshot)),
|
||||
notify_tx,
|
||||
scanner.fs.clone(),
|
||||
scanner.executor.clone(),
|
||||
);
|
||||
smol::block_on(new_scanner.scan_dirs()).unwrap();
|
||||
assert_eq!(scanner.snapshot().to_vec(), new_scanner.snapshot().to_vec());
|
||||
}
|
||||
|
||||
fn randomly_mutate_tree(
|
||||
|
Loading…
Reference in New Issue
Block a user