Merge branch 'main' into project-reconnection

This commit is contained in:
Max Brunsfeld 2022-12-20 15:50:09 -08:00
commit 21d6665c37
34 changed files with 1569 additions and 851 deletions

21
Cargo.lock generated
View File

@ -1133,7 +1133,7 @@ dependencies = [
[[package]]
name = "collab"
version = "0.4.1"
version = "0.4.2"
dependencies = [
"anyhow",
"async-tungstenite",
@ -4809,6 +4809,24 @@ dependencies = [
"rand_core 0.3.1",
]
[[package]]
name = "recent_projects"
version = "0.1.0"
dependencies = [
"db",
"editor",
"fuzzy",
"gpui",
"language",
"ordered-float",
"picker",
"postage",
"settings",
"smol",
"text",
"workspace",
]
[[package]]
name = "redox_syscall"
version = "0.2.16"
@ -8183,6 +8201,7 @@ dependencies = [
"project_panel",
"project_symbols",
"rand 0.8.5",
"recent_projects",
"regex",
"rpc",
"rsa",

View File

@ -40,6 +40,7 @@ members = [
"crates/project",
"crates/project_panel",
"crates/project_symbols",
"crates/recent_projects",
"crates/rope",
"crates/rpc",
"crates/search",

View File

@ -36,6 +36,7 @@
"cmd-n": "workspace::NewFile",
"cmd-shift-n": "workspace::NewWindow",
"cmd-o": "workspace::Open",
"alt-cmd-o": "recent_projects::Toggle",
"ctrl-`": "workspace::NewTerminal"
}
},

View File

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
version = "0.4.1"
version = "0.4.2"
[[bin]]
name = "collab"

View File

@ -2133,7 +2133,7 @@ async fn test_git_diff_base_change(
buffer_local_a.read_with(cx_a, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@ -2153,7 +2153,7 @@ async fn test_git_diff_base_change(
buffer_remote_a.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@ -2177,7 +2177,7 @@ async fn test_git_diff_base_change(
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],
@ -2188,7 +2188,7 @@ async fn test_git_diff_base_change(
buffer_remote_a.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],
@ -2231,7 +2231,7 @@ async fn test_git_diff_base_change(
buffer_local_b.read_with(cx_a, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@ -2251,7 +2251,7 @@ async fn test_git_diff_base_change(
buffer_remote_b.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(1..2, "", "two\n")],
@ -2279,12 +2279,12 @@ async fn test_git_diff_base_change(
"{:?}",
buffer
.snapshot()
.git_diff_hunks_in_range(0..4, false)
.git_diff_hunks_in_row_range(0..4, false)
.collect::<Vec<_>>()
);
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],
@ -2295,7 +2295,7 @@ async fn test_git_diff_base_change(
buffer_remote_b.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
git::diff::assert_hunks(
buffer.snapshot().git_diff_hunks_in_range(0..4, false),
buffer.snapshot().git_diff_hunks_in_row_range(0..4, false),
&buffer,
&diff_base,
&[(2..3, "", "three\n")],

View File

@ -39,6 +39,7 @@ const FALLBACK_DB_NAME: &'static str = "FALLBACK_MEMORY_DB";
const DB_FILE_NAME: &'static str = "db.sqlite";
lazy_static::lazy_static! {
static ref ZED_STATELESS: bool = std::env::var("ZED_STATELESS").map_or(false, |v| !v.is_empty());
static ref DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(());
pub static ref BACKUP_DB_PATH: RwLock<Option<PathBuf>> = RwLock::new(None);
pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false);
@ -52,6 +53,10 @@ pub async fn open_db<M: Migrator + 'static>(
db_dir: &Path,
release_channel: &ReleaseChannel,
) -> ThreadSafeConnection<M> {
if *ZED_STATELESS {
return open_fallback_db().await;
}
let release_channel_name = release_channel.dev_name();
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
@ -67,11 +72,11 @@ pub async fn open_db<M: Migrator + 'static>(
//
// Basically: Don't ever push invalid migrations to stable or everyone will have
// a bad time.
// If no db folder, create one at 0-{channel}
create_dir_all(&main_db_dir).context("Could not create db directory")?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
// Optimistically open databases in parallel
if !DB_FILE_OPERATIONS.is_locked() {
// Try building a connection
@ -79,7 +84,7 @@ pub async fn open_db<M: Migrator + 'static>(
return Ok(connection)
};
}
// Take a lock in the failure case so that we move the db once per process instead
// of potentially multiple times from different threads. This shouldn't happen in the
// normal path
@ -87,12 +92,12 @@ pub async fn open_db<M: Migrator + 'static>(
if let Some(connection) = open_main_db(&db_path).await {
return Ok(connection)
};
let backup_timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime")
.as_millis();
// If failed, move 0-{channel} to {current unix timestamp}-{channel}
let backup_db_dir = db_dir.join(Path::new(&format!(
"{}-{}",
@ -108,7 +113,7 @@ pub async fn open_db<M: Migrator + 'static>(
let mut guard = BACKUP_DB_PATH.write();
*guard = Some(backup_db_dir);
}
// Create a new 0-{channel}
create_dir_all(&main_db_dir).context("Should be able to create the database directory")?;
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));

View File

@ -80,7 +80,7 @@ macro_rules! query {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
self.select::<$return_type>(sql_stmt)?(())
self.select::<$return_type>(sql_stmt)?()
.context(::std::format!(
"Error in {}, select_row failed to execute or parse for: {}",
::std::stringify!($id),
@ -95,7 +95,7 @@ macro_rules! query {
self.write(|connection| {
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
connection.select::<$return_type>(sql_stmt)?(())
connection.select::<$return_type>(sql_stmt)?()
.context(::std::format!(
"Error in {}, select_row failed to execute or parse for: {}",
::std::stringify!($id),

View File

@ -575,6 +575,15 @@ impl Item for ProjectDiagnosticsEditor {
unreachable!()
}
fn git_diff_recalc(
&mut self,
project: ModelHandle<Project>,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>> {
self.editor
.update(cx, |editor, cx| editor.git_diff_recalc(project, cx))
}
fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
Editor::to_item_events(event)
}

View File

@ -5453,11 +5453,17 @@ impl Editor {
pub fn set_selections_from_remote(
&mut self,
selections: Vec<Selection<Anchor>>,
pending_selection: Option<Selection<Anchor>>,
cx: &mut ViewContext<Self>,
) {
let old_cursor_position = self.selections.newest_anchor().head();
self.selections.change_with(cx, |s| {
s.select_anchors(selections);
if let Some(pending_selection) = pending_selection {
s.set_pending(pending_selection, SelectMode::Character);
} else {
s.clear_pending();
}
});
self.selections_did_change(false, &old_cursor_position, cx);
}

View File

@ -130,13 +130,17 @@ impl FollowableItem for Editor {
.ok_or_else(|| anyhow!("invalid selection"))
})
.collect::<Result<Vec<_>>>()?;
let pending_selection = state
.pending_selection
.map(|selection| deserialize_selection(&buffer, selection))
.flatten();
let scroll_top_anchor = state
.scroll_top_anchor
.and_then(|anchor| deserialize_anchor(&buffer, anchor));
drop(buffer);
if !selections.is_empty() {
editor.set_selections_from_remote(selections, cx);
if !selections.is_empty() || pending_selection.is_some() {
editor.set_selections_from_remote(selections, pending_selection, cx);
}
if let Some(scroll_top_anchor) = scroll_top_anchor {
@ -216,6 +220,11 @@ impl FollowableItem for Editor {
.iter()
.map(serialize_selection)
.collect(),
pending_selection: self
.selections
.pending_anchor()
.as_ref()
.map(serialize_selection),
}))
}
@ -269,9 +278,13 @@ impl FollowableItem for Editor {
.selections
.disjoint_anchors()
.iter()
.chain(self.selections.pending_anchor().as_ref())
.map(serialize_selection)
.collect();
update.pending_selection = self
.selections
.pending_anchor()
.as_ref()
.map(serialize_selection);
true
}
_ => false,
@ -307,6 +320,10 @@ impl FollowableItem for Editor {
.into_iter()
.filter_map(|selection| deserialize_selection(&multibuffer, selection))
.collect::<Vec<_>>();
let pending_selection = message
.pending_selection
.and_then(|selection| deserialize_selection(&multibuffer, selection));
let scroll_top_anchor = message
.scroll_top_anchor
.and_then(|anchor| deserialize_anchor(&multibuffer, anchor));
@ -361,8 +378,8 @@ impl FollowableItem for Editor {
multibuffer.remove_excerpts(removals, cx);
});
if !selections.is_empty() {
this.set_selections_from_remote(selections, cx);
if !selections.is_empty() || pending_selection.is_some() {
this.set_selections_from_remote(selections, pending_selection, cx);
this.request_autoscroll_remotely(Autoscroll::newest(), cx);
} else if let Some(anchor) = scroll_top_anchor {
this.set_scroll_anchor_remote(ScrollAnchor {

View File

@ -2710,11 +2710,73 @@ impl MultiBufferSnapshot {
row_range: Range<u32>,
reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
self.as_singleton()
.into_iter()
.flat_map(move |(_, _, buffer)| {
buffer.git_diff_hunks_in_range(row_range.clone(), reversed)
})
let mut cursor = self.excerpts.cursor::<Point>();
if reversed {
cursor.seek(&Point::new(row_range.end, 0), Bias::Left, &());
if cursor.item().is_none() {
cursor.prev(&());
}
} else {
cursor.seek(&Point::new(row_range.start, 0), Bias::Right, &());
}
std::iter::from_fn(move || {
let excerpt = cursor.item()?;
let multibuffer_start = *cursor.start();
let multibuffer_end = multibuffer_start + excerpt.text_summary.lines;
if multibuffer_start.row >= row_range.end {
return None;
}
let mut buffer_start = excerpt.range.context.start;
let mut buffer_end = excerpt.range.context.end;
let excerpt_start_point = buffer_start.to_point(&excerpt.buffer);
let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines;
if row_range.start > multibuffer_start.row {
let buffer_start_point =
excerpt_start_point + Point::new(row_range.start - multibuffer_start.row, 0);
buffer_start = excerpt.buffer.anchor_before(buffer_start_point);
}
if row_range.end < multibuffer_end.row {
let buffer_end_point =
excerpt_start_point + Point::new(row_range.end - multibuffer_start.row, 0);
buffer_end = excerpt.buffer.anchor_before(buffer_end_point);
}
let buffer_hunks = excerpt
.buffer
.git_diff_hunks_intersecting_range(buffer_start..buffer_end, reversed)
.filter_map(move |hunk| {
let start = multibuffer_start.row
+ hunk
.buffer_range
.start
.saturating_sub(excerpt_start_point.row);
let end = multibuffer_start.row
+ hunk
.buffer_range
.end
.min(excerpt_end_point.row + 1)
.saturating_sub(excerpt_start_point.row);
Some(DiffHunk {
buffer_range: start..end,
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
})
});
if reversed {
cursor.prev(&());
} else {
cursor.next(&());
}
Some(buffer_hunks)
})
.flatten()
}
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
@ -3546,11 +3608,12 @@ impl ToPointUtf16 for PointUtf16 {
#[cfg(test)]
mod tests {
use super::*;
use gpui::MutableAppContext;
use gpui::{MutableAppContext, TestAppContext};
use language::{Buffer, Rope};
use rand::prelude::*;
use settings::Settings;
use std::{env, rc::Rc};
use unindent::Unindent;
use util::test::sample_text;
@ -4168,6 +4231,178 @@ mod tests {
);
}
#[gpui::test]
async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
use git::diff::DiffHunkStatus;
// buffer has two modified hunks with two rows each
let buffer_1 = cx.add_model(|cx| {
let mut buffer = Buffer::new(
0,
"
1.zero
1.ONE
1.TWO
1.three
1.FOUR
1.FIVE
1.six
"
.unindent(),
cx,
);
buffer.set_diff_base(
Some(
"
1.zero
1.one
1.two
1.three
1.four
1.five
1.six
"
.unindent(),
),
cx,
);
buffer
});
// buffer has a deletion hunk and an insertion hunk
let buffer_2 = cx.add_model(|cx| {
let mut buffer = Buffer::new(
0,
"
2.zero
2.one
2.two
2.three
2.four
2.five
2.six
"
.unindent(),
cx,
);
buffer.set_diff_base(
Some(
"
2.zero
2.one
2.one-and-a-half
2.two
2.three
2.four
2.six
"
.unindent(),
),
cx,
);
buffer
});
cx.foreground().run_until_parked();
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
buffer_1.clone(),
[
// excerpt ends in the middle of a modified hunk
ExcerptRange {
context: Point::new(0, 0)..Point::new(1, 5),
primary: Default::default(),
},
// excerpt begins in the middle of a modified hunk
ExcerptRange {
context: Point::new(5, 0)..Point::new(6, 5),
primary: Default::default(),
},
],
cx,
);
multibuffer.push_excerpts(
buffer_2.clone(),
[
// excerpt ends at a deletion
ExcerptRange {
context: Point::new(0, 0)..Point::new(1, 5),
primary: Default::default(),
},
// excerpt starts at a deletion
ExcerptRange {
context: Point::new(2, 0)..Point::new(2, 5),
primary: Default::default(),
},
// excerpt fully contains a deletion hunk
ExcerptRange {
context: Point::new(1, 0)..Point::new(2, 5),
primary: Default::default(),
},
// excerpt fully contains an insertion hunk
ExcerptRange {
context: Point::new(4, 0)..Point::new(6, 5),
primary: Default::default(),
},
],
cx,
);
multibuffer
});
let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
assert_eq!(
snapshot.text(),
"
1.zero
1.ONE
1.FIVE
1.six
2.zero
2.one
2.two
2.one
2.two
2.four
2.five
2.six"
.unindent()
);
let expected = [
(DiffHunkStatus::Modified, 1..2),
(DiffHunkStatus::Modified, 2..3),
//TODO: Define better when and where removed hunks show up at range extremities
(DiffHunkStatus::Removed, 6..6),
(DiffHunkStatus::Removed, 8..8),
(DiffHunkStatus::Added, 10..11),
];
assert_eq!(
snapshot
.git_diff_hunks_in_range(0..12, false)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
&expected,
);
assert_eq!(
snapshot
.git_diff_hunks_in_range(0..12, true)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
expected
.iter()
.rev()
.cloned()
.collect::<Vec<_>>()
.as_slice(),
);
}
#[gpui::test(iterations = 100)]
fn test_random_multibuffer(cx: &mut MutableAppContext, mut rng: StdRng) {
let operations = env::var("OPERATIONS")

View File

@ -254,7 +254,7 @@ impl<'a> EditorTestContext<'a> {
Actual selections:
{}
"},
"},
self.assertion_context(),
expected_marked_text,
actual_marked_text,

View File

@ -62,11 +62,12 @@ impl View for FileFinder {
impl FileFinder {
fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec<usize>, String, Vec<usize>) {
let path_string = path_match.path.to_string_lossy();
let path = &path_match.path;
let path_string = path.to_string_lossy();
let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
let path_positions = path_match.positions.clone();
let file_name = path_match.path.file_name().map_or_else(
let file_name = path.file_name().map_or_else(
|| path_match.path_prefix.to_string(),
|file_name| file_name.to_string_lossy().to_string(),
);
@ -161,7 +162,7 @@ impl FileFinder {
self.cancel_flag = Arc::new(AtomicBool::new(false));
let cancel_flag = self.cancel_flag.clone();
cx.spawn(|this, mut cx| async move {
let matches = fuzzy::match_paths(
let matches = fuzzy::match_path_sets(
candidate_sets.as_slice(),
&query,
false,

View File

@ -1,794 +1,8 @@
mod char_bag;
use gpui::executor;
use std::{
borrow::Cow,
cmp::{self, Ordering},
path::Path,
sync::atomic::{self, AtomicBool},
sync::Arc,
};
mod matcher;
mod paths;
mod strings;
pub use char_bag::CharBag;
const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
const MIN_DISTANCE_PENALTY: f64 = 0.2;
pub struct Matcher<'a> {
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
min_score: f64,
match_positions: Vec<usize>,
last_positions: Vec<usize>,
score_matrix: Vec<Option<f64>>,
best_position_matrix: Vec<usize>,
}
trait Match: Ord {
fn score(&self) -> f64;
fn set_positions(&mut self, positions: Vec<usize>);
}
trait MatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool;
fn to_string(&self) -> Cow<'_, str>;
}
#[derive(Clone, Debug)]
pub struct PathMatchCandidate<'a> {
pub path: &'a Arc<Path>,
pub char_bag: CharBag,
}
#[derive(Clone, Debug)]
pub struct PathMatch {
pub score: f64,
pub positions: Vec<usize>,
pub worktree_id: usize,
pub path: Arc<Path>,
pub path_prefix: Arc<str>,
}
#[derive(Clone, Debug)]
pub struct StringMatchCandidate {
pub id: usize,
pub string: String,
pub char_bag: CharBag,
}
pub trait PathMatchCandidateSet<'a>: Send + Sync {
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
fn id(&self) -> usize;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn prefix(&self) -> Arc<str>;
fn candidates(&'a self, start: usize) -> Self::Candidates;
}
impl Match for PathMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl Match for StringMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl<'a> MatchCandidate for PathMatchCandidate<'a> {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.path.to_string_lossy()
}
}
impl StringMatchCandidate {
pub fn new(id: usize, string: String) -> Self {
Self {
id,
char_bag: CharBag::from(string.as_str()),
string,
}
}
}
impl<'a> MatchCandidate for &'a StringMatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.string.as_str().into()
}
}
#[derive(Clone, Debug)]
pub struct StringMatch {
pub candidate_id: usize,
pub score: f64,
pub positions: Vec<usize>,
pub string: String,
}
impl PartialEq for StringMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for StringMatch {}
impl PartialOrd for StringMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for StringMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.candidate_id.cmp(&other.candidate_id))
}
}
impl PartialEq for PathMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for PathMatch {}
impl PartialOrd for PathMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for PathMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.worktree_id.cmp(&other.worktree_id))
.then_with(|| Arc::as_ptr(&self.path).cmp(&Arc::as_ptr(&other.path)))
}
}
pub async fn match_strings(
candidates: &[StringMatchCandidate],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<StringMatch> {
if candidates.is_empty() || max_results == 0 {
return Default::default();
}
if query.is_empty() {
return candidates
.iter()
.map(|candidate| StringMatch {
candidate_id: candidate.id,
score: 0.,
positions: Default::default(),
string: candidate.string.clone(),
})
.collect();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(candidates.len());
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let cancel_flag = &cancel_flag;
scope.spawn(async move {
let segment_start = cmp::min(segment_idx * segment_size, candidates.len());
let segment_end = cmp::min(segment_start + segment_size, candidates.len());
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
matcher.match_strings(
&candidates[segment_start..segment_end],
results,
cancel_flag,
);
});
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}
pub async fn match_paths<'a, Set: PathMatchCandidateSet<'a>>(
candidate_sets: &'a [Set],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<PathMatch> {
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
if path_count == 0 {
return Vec::new();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(path_count);
let segment_size = (path_count + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
scope.spawn(async move {
let segment_start = segment_idx * segment_size;
let segment_end = segment_start + segment_size;
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
let mut tree_start = 0;
for candidate_set in candidate_sets {
let tree_end = tree_start + candidate_set.len();
if tree_start < segment_end && segment_start < tree_end {
let start = cmp::max(tree_start, segment_start) - tree_start;
let end = cmp::min(tree_end, segment_end) - tree_start;
let candidates = candidate_set.candidates(start).take(end - start);
matcher.match_paths(
candidate_set.id(),
candidate_set.prefix(),
candidates,
results,
cancel_flag,
);
}
if tree_end >= segment_end {
break;
}
tree_start = tree_end;
}
})
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}
impl<'a> Matcher<'a> {
pub fn new(
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
) -> Self {
Self {
query,
lowercase_query,
query_char_bag,
min_score: 0.0,
last_positions: vec![0; query.len()],
match_positions: vec![0; query.len()],
score_matrix: Vec::new(),
best_position_matrix: Vec::new(),
smart_case,
max_results,
}
}
pub fn match_strings(
&mut self,
candidates: &[StringMatchCandidate],
results: &mut Vec<StringMatch>,
cancel_flag: &AtomicBool,
) {
self.match_internal(
&[],
&[],
candidates.iter(),
results,
cancel_flag,
|candidate, score| StringMatch {
candidate_id: candidate.id,
score,
positions: Vec::new(),
string: candidate.string.to_string(),
},
)
}
pub fn match_paths<'c: 'a>(
&mut self,
tree_id: usize,
path_prefix: Arc<str>,
path_entries: impl Iterator<Item = PathMatchCandidate<'c>>,
results: &mut Vec<PathMatch>,
cancel_flag: &AtomicBool,
) {
let prefix = path_prefix.chars().collect::<Vec<_>>();
let lowercase_prefix = prefix
.iter()
.map(|c| c.to_ascii_lowercase())
.collect::<Vec<_>>();
self.match_internal(
&prefix,
&lowercase_prefix,
path_entries,
results,
cancel_flag,
|candidate, score| PathMatch {
score,
worktree_id: tree_id,
positions: Vec::new(),
path: candidate.path.clone(),
path_prefix: path_prefix.clone(),
},
)
}
fn match_internal<C: MatchCandidate, R, F>(
&mut self,
prefix: &[char],
lowercase_prefix: &[char],
candidates: impl Iterator<Item = C>,
results: &mut Vec<R>,
cancel_flag: &AtomicBool,
build_match: F,
) where
R: Match,
F: Fn(&C, f64) -> R,
{
let mut candidate_chars = Vec::new();
let mut lowercase_candidate_chars = Vec::new();
for candidate in candidates {
if !candidate.has_chars(self.query_char_bag) {
continue;
}
if cancel_flag.load(atomic::Ordering::Relaxed) {
break;
}
candidate_chars.clear();
lowercase_candidate_chars.clear();
for c in candidate.to_string().chars() {
candidate_chars.push(c);
lowercase_candidate_chars.push(c.to_ascii_lowercase());
}
if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
continue;
}
let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len());
self.score_matrix.clear();
self.score_matrix.resize(matrix_len, None);
self.best_position_matrix.clear();
self.best_position_matrix.resize(matrix_len, 0);
let score = self.score_match(
&candidate_chars,
&lowercase_candidate_chars,
prefix,
lowercase_prefix,
);
if score > 0.0 {
let mut mat = build_match(&candidate, score);
if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) {
if results.len() < self.max_results {
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
} else if i < results.len() {
results.pop();
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
}
if results.len() == self.max_results {
self.min_score = results.last().unwrap().score();
}
}
}
}
}
fn find_last_positions(
&mut self,
lowercase_prefix: &[char],
lowercase_candidate: &[char],
) -> bool {
let mut lowercase_prefix = lowercase_prefix.iter();
let mut lowercase_candidate = lowercase_candidate.iter();
for (i, char) in self.lowercase_query.iter().enumerate().rev() {
if let Some(j) = lowercase_candidate.rposition(|c| c == char) {
self.last_positions[i] = j + lowercase_prefix.len();
} else if let Some(j) = lowercase_prefix.rposition(|c| c == char) {
self.last_positions[i] = j;
} else {
return false;
}
}
true
}
fn score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
) -> f64 {
let score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
0,
0,
self.query.len() as f64,
) * self.query.len() as f64;
if score <= 0.0 {
return 0.0;
}
let path_len = prefix.len() + path.len();
let mut cur_start = 0;
let mut byte_ix = 0;
let mut char_ix = 0;
for i in 0..self.query.len() {
let match_char_ix = self.best_position_matrix[i * path_len + cur_start];
while char_ix < match_char_ix {
let ch = prefix
.get(char_ix)
.or_else(|| path.get(char_ix - prefix.len()))
.unwrap();
byte_ix += ch.len_utf8();
char_ix += 1;
}
cur_start = match_char_ix + 1;
self.match_positions[i] = byte_ix;
}
score
}
#[allow(clippy::too_many_arguments)]
fn recursive_score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
query_idx: usize,
path_idx: usize,
cur_score: f64,
) -> f64 {
if query_idx == self.query.len() {
return 1.0;
}
let path_len = prefix.len() + path.len();
if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] {
return memoized;
}
let mut score = 0.0;
let mut best_position = 0;
let query_char = self.lowercase_query[query_idx];
let limit = self.last_positions[query_idx];
let mut last_slash = 0;
for j in path_idx..=limit {
let path_char = if j < prefix.len() {
lowercase_prefix[j]
} else {
path_cased[j - prefix.len()]
};
let is_path_sep = path_char == '/' || path_char == '\\';
if query_idx == 0 && is_path_sep {
last_slash = j;
}
if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
let curr = if j < prefix.len() {
prefix[j]
} else {
path[j - prefix.len()]
};
let mut char_score = 1.0;
if j > path_idx {
let last = if j - 1 < prefix.len() {
prefix[j - 1]
} else {
path[j - 1 - prefix.len()]
};
if last == '/' {
char_score = 0.9;
} else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
|| (last.is_lowercase() && curr.is_uppercase())
{
char_score = 0.8;
} else if last == '.' {
char_score = 0.7;
} else if query_idx == 0 {
char_score = BASE_DISTANCE_PENALTY;
} else {
char_score = MIN_DISTANCE_PENALTY.max(
BASE_DISTANCE_PENALTY
- (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY,
);
}
}
// Apply a severe penalty if the case doesn't match.
// This will make the exact matches have higher score than the case-insensitive and the
// path insensitive matches.
if (self.smart_case || curr == '/') && self.query[query_idx] != curr {
char_score *= 0.001;
}
let mut multiplier = char_score;
// Scale the score based on how deep within the path we found the match.
if query_idx == 0 {
multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
}
let mut next_score = 1.0;
if self.min_score > 0.0 {
next_score = cur_score * multiplier;
// Scores only decrease. If we can't pass the previous best, bail
if next_score < self.min_score {
// Ensure that score is non-zero so we use it in the memo table.
if score == 0.0 {
score = 1e-18;
}
continue;
}
}
let new_score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
query_idx + 1,
j + 1,
next_score,
) * multiplier;
if new_score > score {
score = new_score;
best_position = j;
// Optimization: can't score better than 1.
if new_score == 1.0 {
break;
}
}
}
}
if best_position != 0 {
self.best_position_matrix[query_idx * path_len + path_idx] = best_position;
}
self.score_matrix[query_idx * path_len + path_idx] = Some(score);
score
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::PathBuf;
#[test]
fn test_get_last_positions() {
let mut query: &[char] = &['d', 'c'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(!result);
query = &['c', 'd'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![2, 4]);
query = &['z', '/', 'z', 'f'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
}
#[test]
fn test_match_path_entries() {
let paths = vec![
"",
"a",
"ab",
"abC",
"abcd",
"alphabravocharlie",
"AlphaBravoCharlie",
"thisisatestdir",
"/////ThisIsATestDir",
"/this/is/a/test/dir",
"/test/tiatd",
];
assert_eq!(
match_query("abc", false, &paths),
vec![
("abC", vec![0, 1, 2]),
("abcd", vec![0, 1, 2]),
("AlphaBravoCharlie", vec![0, 5, 10]),
("alphabravocharlie", vec![4, 5, 10]),
]
);
assert_eq!(
match_query("t/i/a/t/d", false, &paths),
vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),]
);
assert_eq!(
match_query("tiatd", false, &paths),
vec![
("/test/tiatd", vec![6, 7, 8, 9, 10]),
("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]),
("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]),
("thisisatestdir", vec![0, 2, 6, 7, 11]),
]
);
}
#[test]
fn test_match_multibyte_path_entries() {
let paths = vec!["aαbβ/cγ", "αβγδ/bcde", "c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", "/d/🆒/h"];
assert_eq!("1".len(), 7);
assert_eq!(
match_query("bcd", false, &paths),
vec![
("αβγδ/bcde", vec![9, 10, 11]),
("aαbβ/cγ", vec![3, 7, 10]),
]
);
assert_eq!(
match_query("cde", false, &paths),
vec![
("αβγδ/bcde", vec![10, 11, 12]),
("c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", vec![0, 23, 46]),
]
);
}
fn match_query<'a>(
query: &str,
smart_case: bool,
paths: &[&'a str],
) -> Vec<(&'a str, Vec<usize>)> {
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let query_chars = CharBag::from(&lowercase_query[..]);
let path_arcs = paths
.iter()
.map(|path| Arc::from(PathBuf::from(path)))
.collect::<Vec<_>>();
let mut path_entries = Vec::new();
for (i, path) in paths.iter().enumerate() {
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
let char_bag = CharBag::from(lowercase_path.as_slice());
path_entries.push(PathMatchCandidate {
char_bag,
path: path_arcs.get(i).unwrap(),
});
}
let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100);
let cancel_flag = AtomicBool::new(false);
let mut results = Vec::new();
matcher.match_paths(
0,
"".into(),
path_entries.into_iter(),
&mut results,
&cancel_flag,
);
results
.into_iter()
.map(|result| {
(
paths
.iter()
.copied()
.find(|p| result.path.as_ref() == Path::new(p))
.unwrap(),
result.positions,
)
})
.collect()
}
}
pub use paths::{match_path_sets, PathMatch, PathMatchCandidate, PathMatchCandidateSet};
pub use strings::{match_strings, StringMatch, StringMatchCandidate};

463
crates/fuzzy/src/matcher.rs Normal file
View File

@ -0,0 +1,463 @@
use std::{
borrow::Cow,
sync::atomic::{self, AtomicBool},
};
use crate::CharBag;
const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
const MIN_DISTANCE_PENALTY: f64 = 0.2;
pub struct Matcher<'a> {
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
min_score: f64,
match_positions: Vec<usize>,
last_positions: Vec<usize>,
score_matrix: Vec<Option<f64>>,
best_position_matrix: Vec<usize>,
}
pub trait Match: Ord {
fn score(&self) -> f64;
fn set_positions(&mut self, positions: Vec<usize>);
}
pub trait MatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool;
fn to_string(&self) -> Cow<'_, str>;
}
impl<'a> Matcher<'a> {
pub fn new(
query: &'a [char],
lowercase_query: &'a [char],
query_char_bag: CharBag,
smart_case: bool,
max_results: usize,
) -> Self {
Self {
query,
lowercase_query,
query_char_bag,
min_score: 0.0,
last_positions: vec![0; query.len()],
match_positions: vec![0; query.len()],
score_matrix: Vec::new(),
best_position_matrix: Vec::new(),
smart_case,
max_results,
}
}
pub fn match_candidates<C: MatchCandidate, R, F>(
&mut self,
prefix: &[char],
lowercase_prefix: &[char],
candidates: impl Iterator<Item = C>,
results: &mut Vec<R>,
cancel_flag: &AtomicBool,
build_match: F,
) where
R: Match,
F: Fn(&C, f64) -> R,
{
let mut candidate_chars = Vec::new();
let mut lowercase_candidate_chars = Vec::new();
for candidate in candidates {
if !candidate.has_chars(self.query_char_bag) {
continue;
}
if cancel_flag.load(atomic::Ordering::Relaxed) {
break;
}
candidate_chars.clear();
lowercase_candidate_chars.clear();
for c in candidate.to_string().chars() {
candidate_chars.push(c);
lowercase_candidate_chars.push(c.to_ascii_lowercase());
}
if !self.find_last_positions(lowercase_prefix, &lowercase_candidate_chars) {
continue;
}
let matrix_len = self.query.len() * (prefix.len() + candidate_chars.len());
self.score_matrix.clear();
self.score_matrix.resize(matrix_len, None);
self.best_position_matrix.clear();
self.best_position_matrix.resize(matrix_len, 0);
let score = self.score_match(
&candidate_chars,
&lowercase_candidate_chars,
prefix,
lowercase_prefix,
);
if score > 0.0 {
let mut mat = build_match(&candidate, score);
if let Err(i) = results.binary_search_by(|m| mat.cmp(m)) {
if results.len() < self.max_results {
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
} else if i < results.len() {
results.pop();
mat.set_positions(self.match_positions.clone());
results.insert(i, mat);
}
if results.len() == self.max_results {
self.min_score = results.last().unwrap().score();
}
}
}
}
}
fn find_last_positions(
&mut self,
lowercase_prefix: &[char],
lowercase_candidate: &[char],
) -> bool {
let mut lowercase_prefix = lowercase_prefix.iter();
let mut lowercase_candidate = lowercase_candidate.iter();
for (i, char) in self.lowercase_query.iter().enumerate().rev() {
if let Some(j) = lowercase_candidate.rposition(|c| c == char) {
self.last_positions[i] = j + lowercase_prefix.len();
} else if let Some(j) = lowercase_prefix.rposition(|c| c == char) {
self.last_positions[i] = j;
} else {
return false;
}
}
true
}
fn score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
) -> f64 {
let score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
0,
0,
self.query.len() as f64,
) * self.query.len() as f64;
if score <= 0.0 {
return 0.0;
}
let path_len = prefix.len() + path.len();
let mut cur_start = 0;
let mut byte_ix = 0;
let mut char_ix = 0;
for i in 0..self.query.len() {
let match_char_ix = self.best_position_matrix[i * path_len + cur_start];
while char_ix < match_char_ix {
let ch = prefix
.get(char_ix)
.or_else(|| path.get(char_ix - prefix.len()))
.unwrap();
byte_ix += ch.len_utf8();
char_ix += 1;
}
cur_start = match_char_ix + 1;
self.match_positions[i] = byte_ix;
}
score
}
#[allow(clippy::too_many_arguments)]
fn recursive_score_match(
&mut self,
path: &[char],
path_cased: &[char],
prefix: &[char],
lowercase_prefix: &[char],
query_idx: usize,
path_idx: usize,
cur_score: f64,
) -> f64 {
if query_idx == self.query.len() {
return 1.0;
}
let path_len = prefix.len() + path.len();
if let Some(memoized) = self.score_matrix[query_idx * path_len + path_idx] {
return memoized;
}
let mut score = 0.0;
let mut best_position = 0;
let query_char = self.lowercase_query[query_idx];
let limit = self.last_positions[query_idx];
let mut last_slash = 0;
for j in path_idx..=limit {
let path_char = if j < prefix.len() {
lowercase_prefix[j]
} else {
path_cased[j - prefix.len()]
};
let is_path_sep = path_char == '/' || path_char == '\\';
if query_idx == 0 && is_path_sep {
last_slash = j;
}
if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
let curr = if j < prefix.len() {
prefix[j]
} else {
path[j - prefix.len()]
};
let mut char_score = 1.0;
if j > path_idx {
let last = if j - 1 < prefix.len() {
prefix[j - 1]
} else {
path[j - 1 - prefix.len()]
};
if last == '/' {
char_score = 0.9;
} else if (last == '-' || last == '_' || last == ' ' || last.is_numeric())
|| (last.is_lowercase() && curr.is_uppercase())
{
char_score = 0.8;
} else if last == '.' {
char_score = 0.7;
} else if query_idx == 0 {
char_score = BASE_DISTANCE_PENALTY;
} else {
char_score = MIN_DISTANCE_PENALTY.max(
BASE_DISTANCE_PENALTY
- (j - path_idx - 1) as f64 * ADDITIONAL_DISTANCE_PENALTY,
);
}
}
// Apply a severe penalty if the case doesn't match.
// This will make the exact matches have higher score than the case-insensitive and the
// path insensitive matches.
if (self.smart_case || curr == '/') && self.query[query_idx] != curr {
char_score *= 0.001;
}
let mut multiplier = char_score;
// Scale the score based on how deep within the path we found the match.
if query_idx == 0 {
multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
}
let mut next_score = 1.0;
if self.min_score > 0.0 {
next_score = cur_score * multiplier;
// Scores only decrease. If we can't pass the previous best, bail
if next_score < self.min_score {
// Ensure that score is non-zero so we use it in the memo table.
if score == 0.0 {
score = 1e-18;
}
continue;
}
}
let new_score = self.recursive_score_match(
path,
path_cased,
prefix,
lowercase_prefix,
query_idx + 1,
j + 1,
next_score,
) * multiplier;
if new_score > score {
score = new_score;
best_position = j;
// Optimization: can't score better than 1.
if new_score == 1.0 {
break;
}
}
}
}
if best_position != 0 {
self.best_position_matrix[query_idx * path_len + path_idx] = best_position;
}
self.score_matrix[query_idx * path_len + path_idx] = Some(score);
score
}
}
#[cfg(test)]
mod tests {
use crate::{PathMatch, PathMatchCandidate};
use super::*;
use std::{
path::{Path, PathBuf},
sync::Arc,
};
#[test]
fn test_get_last_positions() {
let mut query: &[char] = &['d', 'c'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(!result);
query = &['c', 'd'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['a', 'b', 'c'], &['b', 'd', 'e', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![2, 4]);
query = &['z', '/', 'z', 'f'];
let mut matcher = Matcher::new(query, query, query.into(), false, 10);
let result = matcher.find_last_positions(&['z', 'e', 'd', '/'], &['z', 'e', 'd', '/', 'f']);
assert!(result);
assert_eq!(matcher.last_positions, vec![0, 3, 4, 8]);
}
#[test]
fn test_match_path_entries() {
let paths = vec![
"",
"a",
"ab",
"abC",
"abcd",
"alphabravocharlie",
"AlphaBravoCharlie",
"thisisatestdir",
"/////ThisIsATestDir",
"/this/is/a/test/dir",
"/test/tiatd",
];
assert_eq!(
match_single_path_query("abc", false, &paths),
vec![
("abC", vec![0, 1, 2]),
("abcd", vec![0, 1, 2]),
("AlphaBravoCharlie", vec![0, 5, 10]),
("alphabravocharlie", vec![4, 5, 10]),
]
);
assert_eq!(
match_single_path_query("t/i/a/t/d", false, &paths),
vec![("/this/is/a/test/dir", vec![1, 5, 6, 8, 9, 10, 11, 15, 16]),]
);
assert_eq!(
match_single_path_query("tiatd", false, &paths),
vec![
("/test/tiatd", vec![6, 7, 8, 9, 10]),
("/this/is/a/test/dir", vec![1, 6, 9, 11, 16]),
("/////ThisIsATestDir", vec![5, 9, 11, 12, 16]),
("thisisatestdir", vec![0, 2, 6, 7, 11]),
]
);
}
#[test]
fn test_match_multibyte_path_entries() {
let paths = vec!["aαbβ/cγ", "αβγδ/bcde", "c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", "/d/🆒/h"];
assert_eq!("1".len(), 7);
assert_eq!(
match_single_path_query("bcd", false, &paths),
vec![
("αβγδ/bcde", vec![9, 10, 11]),
("aαbβ/cγ", vec![3, 7, 10]),
]
);
assert_eq!(
match_single_path_query("cde", false, &paths),
vec![
("αβγδ/bcde", vec![10, 11, 12]),
("c1⃣2⃣3⃣/d4⃣5⃣6⃣/e7⃣8⃣9⃣/f", vec![0, 23, 46]),
]
);
}
fn match_single_path_query<'a>(
query: &str,
smart_case: bool,
paths: &[&'a str],
) -> Vec<(&'a str, Vec<usize>)> {
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let query_chars = CharBag::from(&lowercase_query[..]);
let path_arcs: Vec<Arc<Path>> = paths
.iter()
.map(|path| Arc::from(PathBuf::from(path)))
.collect::<Vec<_>>();
let mut path_entries = Vec::new();
for (i, path) in paths.iter().enumerate() {
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
let char_bag = CharBag::from(lowercase_path.as_slice());
path_entries.push(PathMatchCandidate {
char_bag,
path: &path_arcs[i],
});
}
let mut matcher = Matcher::new(&query, &lowercase_query, query_chars, smart_case, 100);
let cancel_flag = AtomicBool::new(false);
let mut results = Vec::new();
matcher.match_candidates(
&[],
&[],
path_entries.into_iter(),
&mut results,
&cancel_flag,
|candidate, score| PathMatch {
score,
worktree_id: 0,
positions: Vec::new(),
path: candidate.path.clone(),
path_prefix: "".into(),
},
);
results
.into_iter()
.map(|result| {
(
paths
.iter()
.copied()
.find(|p| result.path.as_ref() == Path::new(p))
.unwrap(),
result.positions,
)
})
.collect()
}
}

174
crates/fuzzy/src/paths.rs Normal file
View File

@ -0,0 +1,174 @@
use std::{
borrow::Cow,
cmp::{self, Ordering},
path::Path,
sync::{atomic::AtomicBool, Arc},
};
use gpui::executor;
use crate::{
matcher::{Match, MatchCandidate, Matcher},
CharBag,
};
#[derive(Clone, Debug)]
pub struct PathMatchCandidate<'a> {
pub path: &'a Arc<Path>,
pub char_bag: CharBag,
}
#[derive(Clone, Debug)]
pub struct PathMatch {
pub score: f64,
pub positions: Vec<usize>,
pub worktree_id: usize,
pub path: Arc<Path>,
pub path_prefix: Arc<str>,
}
pub trait PathMatchCandidateSet<'a>: Send + Sync {
type Candidates: Iterator<Item = PathMatchCandidate<'a>>;
fn id(&self) -> usize;
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
fn prefix(&self) -> Arc<str>;
fn candidates(&'a self, start: usize) -> Self::Candidates;
}
impl Match for PathMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl<'a> MatchCandidate for PathMatchCandidate<'a> {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.path.to_string_lossy()
}
}
impl PartialEq for PathMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for PathMatch {}
impl PartialOrd for PathMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for PathMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.worktree_id.cmp(&other.worktree_id))
.then_with(|| self.path.cmp(&other.path))
}
}
pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
candidate_sets: &'a [Set],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<PathMatch> {
let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
if path_count == 0 {
return Vec::new();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(path_count);
let segment_size = (path_count + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
scope.spawn(async move {
let segment_start = segment_idx * segment_size;
let segment_end = segment_start + segment_size;
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
let mut tree_start = 0;
for candidate_set in candidate_sets {
let tree_end = tree_start + candidate_set.len();
if tree_start < segment_end && segment_start < tree_end {
let start = cmp::max(tree_start, segment_start) - tree_start;
let end = cmp::min(tree_end, segment_end) - tree_start;
let candidates = candidate_set.candidates(start).take(end - start);
let worktree_id = candidate_set.id();
let prefix = candidate_set.prefix().chars().collect::<Vec<_>>();
let lowercase_prefix = prefix
.iter()
.map(|c| c.to_ascii_lowercase())
.collect::<Vec<_>>();
matcher.match_candidates(
&prefix,
&lowercase_prefix,
candidates,
results,
cancel_flag,
|candidate, score| PathMatch {
score,
worktree_id,
positions: Vec::new(),
path: candidate.path.clone(),
path_prefix: candidate_set.prefix(),
},
);
}
if tree_end >= segment_end {
break;
}
tree_start = tree_end;
}
})
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}

161
crates/fuzzy/src/strings.rs Normal file
View File

@ -0,0 +1,161 @@
use std::{
borrow::Cow,
cmp::{self, Ordering},
sync::{atomic::AtomicBool, Arc},
};
use gpui::executor;
use crate::{
matcher::{Match, MatchCandidate, Matcher},
CharBag,
};
#[derive(Clone, Debug)]
pub struct StringMatchCandidate {
pub id: usize,
pub string: String,
pub char_bag: CharBag,
}
impl Match for StringMatch {
fn score(&self) -> f64 {
self.score
}
fn set_positions(&mut self, positions: Vec<usize>) {
self.positions = positions;
}
}
impl StringMatchCandidate {
pub fn new(id: usize, string: String) -> Self {
Self {
id,
char_bag: CharBag::from(string.as_str()),
string,
}
}
}
impl<'a> MatchCandidate for &'a StringMatchCandidate {
fn has_chars(&self, bag: CharBag) -> bool {
self.char_bag.is_superset(bag)
}
fn to_string(&self) -> Cow<'a, str> {
self.string.as_str().into()
}
}
#[derive(Clone, Debug)]
pub struct StringMatch {
pub candidate_id: usize,
pub score: f64,
pub positions: Vec<usize>,
pub string: String,
}
impl PartialEq for StringMatch {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for StringMatch {}
impl PartialOrd for StringMatch {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for StringMatch {
fn cmp(&self, other: &Self) -> Ordering {
self.score
.partial_cmp(&other.score)
.unwrap_or(Ordering::Equal)
.then_with(|| self.candidate_id.cmp(&other.candidate_id))
}
}
pub async fn match_strings(
candidates: &[StringMatchCandidate],
query: &str,
smart_case: bool,
max_results: usize,
cancel_flag: &AtomicBool,
background: Arc<executor::Background>,
) -> Vec<StringMatch> {
if candidates.is_empty() || max_results == 0 {
return Default::default();
}
if query.is_empty() {
return candidates
.iter()
.map(|candidate| StringMatch {
candidate_id: candidate.id,
score: 0.,
positions: Default::default(),
string: candidate.string.clone(),
})
.collect();
}
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_char_bag = CharBag::from(&lowercase_query[..]);
let num_cpus = background.num_cpus().min(candidates.len());
let segment_size = (candidates.len() + num_cpus - 1) / num_cpus;
let mut segment_results = (0..num_cpus)
.map(|_| Vec::with_capacity(max_results.min(candidates.len())))
.collect::<Vec<_>>();
background
.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let cancel_flag = &cancel_flag;
scope.spawn(async move {
let segment_start = cmp::min(segment_idx * segment_size, candidates.len());
let segment_end = cmp::min(segment_start + segment_size, candidates.len());
let mut matcher = Matcher::new(
query,
lowercase_query,
query_char_bag,
smart_case,
max_results,
);
matcher.match_candidates(
&[],
&[],
candidates[segment_start..segment_end].iter(),
results,
cancel_flag,
|candidate, score| StringMatch {
candidate_id: candidate.id,
score,
positions: Vec::new(),
string: candidate.string.to_string(),
},
);
});
}
})
.await;
let mut results = Vec::new();
for segment_result in segment_results {
if results.is_empty() {
results = segment_result;
} else {
util::extend_sorted(&mut results, segment_result, max_results, |a, b| b.cmp(a));
}
}
results
}

View File

@ -71,18 +71,26 @@ impl BufferDiff {
}
}
pub fn hunks_in_range<'a>(
pub fn hunks_in_row_range<'a>(
&'a self,
query_row_range: Range<u32>,
range: Range<u32>,
buffer: &'a BufferSnapshot,
reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let start = buffer.anchor_before(Point::new(query_row_range.start, 0));
let end = buffer.anchor_after(Point::new(query_row_range.end, 0));
let start = buffer.anchor_before(Point::new(range.start, 0));
let end = buffer.anchor_after(Point::new(range.end, 0));
self.hunks_intersecting_range(start..end, buffer, reversed)
}
pub fn hunks_intersecting_range<'a>(
&'a self,
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
reversed: bool,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
let before_start = summary.buffer_range.end.cmp(&start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&end, buffer).is_gt();
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
!before_start && !after_end
});
@ -141,7 +149,9 @@ impl BufferDiff {
#[cfg(test)]
fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
self.hunks_in_range(0..u32::MAX, text, false)
let start = text.anchor_before(Point::new(0, 0));
let end = text.anchor_after(Point::new(u32::MAX, u32::MAX));
self.hunks_intersecting_range(start..end, text, false)
}
fn diff<'a>(head: &'a str, current: &'a str) -> Option<GitPatch<'a>> {
@ -355,7 +365,7 @@ mod tests {
assert_eq!(diff.hunks(&buffer).count(), 8);
assert_hunks(
diff.hunks_in_range(7..12, &buffer, false),
diff.hunks_in_row_range(7..12, &buffer, false),
&buffer,
&diff_base,
&[

View File

@ -2310,13 +2310,21 @@ impl BufferSnapshot {
})
}
pub fn git_diff_hunks_in_range<'a>(
pub fn git_diff_hunks_in_row_range<'a>(
&'a self,
query_row_range: Range<u32>,
range: Range<u32>,
reversed: bool,
) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
self.git_diff.hunks_in_row_range(range, self, reversed)
}
pub fn git_diff_hunks_intersecting_range<'a>(
&'a self,
range: Range<Anchor>,
reversed: bool,
) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
self.git_diff
.hunks_in_range(query_row_range, self, reversed)
.hunks_intersecting_range(range, self, reversed)
}
pub fn diagnostics_in_range<'a, T, O>(

View File

@ -84,13 +84,13 @@ impl OutlineView {
.active_item(cx)
.and_then(|item| item.downcast::<Editor>())
{
let buffer = editor
let outline = editor
.read(cx)
.buffer()
.read(cx)
.snapshot(cx)
.outline(Some(cx.global::<Settings>().theme.editor.syntax.as_ref()));
if let Some(outline) = buffer {
if let Some(outline) = outline {
workspace.toggle_modal(cx, |_, cx| {
let view = cx.add_view(|cx| OutlineView::new(outline, editor, cx));
cx.subscribe(&view, Self::on_event).detach();

View File

@ -0,0 +1,22 @@
[package]
name = "recent_projects"
version = "0.1.0"
edition = "2021"
[lib]
path = "src/recent_projects.rs"
doctest = false
[dependencies]
db = { path = "../db" }
editor = { path = "../editor" }
fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" }
language = { path = "../language" }
picker = { path = "../picker" }
settings = { path = "../settings" }
text = { path = "../text" }
workspace = { path = "../workspace" }
ordered-float = "2.1.1"
postage = { version = "0.4", features = ["futures-traits"] }
smol = "1.2"

View File

@ -0,0 +1,129 @@
use std::path::Path;
use fuzzy::StringMatch;
use gpui::{
elements::{Label, LabelStyle},
Element, ElementBox,
};
use workspace::WorkspaceLocation;
pub struct HighlightedText {
pub text: String,
pub highlight_positions: Vec<usize>,
char_count: usize,
}
impl HighlightedText {
fn join(components: impl Iterator<Item = Self>, separator: &str) -> Self {
let mut char_count = 0;
let separator_char_count = separator.chars().count();
let mut text = String::new();
let mut highlight_positions = Vec::new();
for component in components {
if char_count != 0 {
text.push_str(separator);
char_count += separator_char_count;
}
highlight_positions.extend(
component
.highlight_positions
.iter()
.map(|position| position + char_count),
);
text.push_str(&component.text);
char_count += component.text.chars().count();
}
Self {
text,
highlight_positions,
char_count,
}
}
pub fn render(self, style: impl Into<LabelStyle>) -> ElementBox {
Label::new(self.text, style)
.with_highlights(self.highlight_positions)
.boxed()
}
}
pub struct HighlightedWorkspaceLocation {
pub names: HighlightedText,
pub paths: Vec<HighlightedText>,
}
impl HighlightedWorkspaceLocation {
pub fn new(string_match: &StringMatch, location: &WorkspaceLocation) -> Self {
let mut path_start_offset = 0;
let (names, paths): (Vec<_>, Vec<_>) = location
.paths()
.iter()
.map(|path| {
let highlighted_text = Self::highlights_for_path(
path.as_ref(),
&string_match.positions,
path_start_offset,
);
path_start_offset += highlighted_text.1.char_count;
highlighted_text
})
.unzip();
Self {
names: HighlightedText::join(names.into_iter().filter_map(|name| name), ", "),
paths,
}
}
// Compute the highlighted text for the name and path
fn highlights_for_path(
path: &Path,
match_positions: &Vec<usize>,
path_start_offset: usize,
) -> (Option<HighlightedText>, HighlightedText) {
let path_string = path.to_string_lossy();
let path_char_count = path_string.chars().count();
// Get the subset of match highlight positions that line up with the given path.
// Also adjusts them to start at the path start
let path_positions = match_positions
.iter()
.copied()
.skip_while(|position| *position < path_start_offset)
.take_while(|position| *position < path_start_offset + path_char_count)
.map(|position| position - path_start_offset)
.collect::<Vec<_>>();
// Again subset the highlight positions to just those that line up with the file_name
// again adjusted to the start of the file_name
let file_name_text_and_positions = path.file_name().map(|file_name| {
let text = file_name.to_string_lossy();
let char_count = text.chars().count();
let file_name_start = path_char_count - char_count;
let highlight_positions = path_positions
.iter()
.copied()
.skip_while(|position| *position < file_name_start)
.take_while(|position| *position < file_name_start + char_count)
.map(|position| position - file_name_start)
.collect::<Vec<_>>();
HighlightedText {
text: text.to_string(),
highlight_positions,
char_count,
}
});
(
file_name_text_and_positions,
HighlightedText {
text: path_string.to_string(),
highlight_positions: path_positions,
char_count: path_char_count,
},
)
}
}

View File

@ -0,0 +1,197 @@
mod highlighted_workspace_location;
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
actions,
elements::{ChildView, Flex, ParentElement},
AnyViewHandle, Element, ElementBox, Entity, MutableAppContext, RenderContext, Task, View,
ViewContext, ViewHandle,
};
use highlighted_workspace_location::HighlightedWorkspaceLocation;
use ordered_float::OrderedFloat;
use picker::{Picker, PickerDelegate};
use settings::Settings;
use workspace::{OpenPaths, Workspace, WorkspaceLocation, WORKSPACE_DB};
actions!(recent_projects, [Toggle]);
pub fn init(cx: &mut MutableAppContext) {
cx.add_action(RecentProjectsView::toggle);
Picker::<RecentProjectsView>::init(cx);
}
struct RecentProjectsView {
picker: ViewHandle<Picker<Self>>,
workspace_locations: Vec<WorkspaceLocation>,
selected_match_index: usize,
matches: Vec<StringMatch>,
}
impl RecentProjectsView {
fn new(workspace_locations: Vec<WorkspaceLocation>, cx: &mut ViewContext<Self>) -> Self {
let handle = cx.weak_handle();
Self {
picker: cx.add_view(|cx| {
Picker::new("Recent Projects...", handle, cx).with_max_size(800., 1200.)
}),
workspace_locations,
selected_match_index: 0,
matches: Default::default(),
}
}
fn toggle(_: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
cx.spawn(|workspace, mut cx| async move {
let workspace_locations = cx
.background()
.spawn(async {
WORKSPACE_DB
.recent_workspaces_on_disk()
.await
.unwrap_or_default()
.into_iter()
.map(|(_, location)| location)
.collect()
})
.await;
workspace.update(&mut cx, |workspace, cx| {
workspace.toggle_modal(cx, |_, cx| {
let view = cx.add_view(|cx| Self::new(workspace_locations, cx));
cx.subscribe(&view, Self::on_event).detach();
view
});
})
})
.detach();
}
fn on_event(
workspace: &mut Workspace,
_: ViewHandle<Self>,
event: &Event,
cx: &mut ViewContext<Workspace>,
) {
match event {
Event::Dismissed => workspace.dismiss_modal(cx),
}
}
}
pub enum Event {
Dismissed,
}
impl Entity for RecentProjectsView {
type Event = Event;
}
impl View for RecentProjectsView {
fn ui_name() -> &'static str {
"RecentProjectsView"
}
fn render(&mut self, cx: &mut RenderContext<Self>) -> ElementBox {
ChildView::new(self.picker.clone(), cx).boxed()
}
fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
if cx.is_self_focused() {
cx.focus(&self.picker);
}
}
}
impl PickerDelegate for RecentProjectsView {
fn match_count(&self) -> usize {
self.matches.len()
}
fn selected_index(&self) -> usize {
self.selected_match_index
}
fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext<Self>) {
self.selected_match_index = ix;
}
fn update_matches(&mut self, query: String, cx: &mut ViewContext<Self>) -> gpui::Task<()> {
let query = query.trim_start();
let smart_case = query.chars().any(|c| c.is_uppercase());
let candidates = self
.workspace_locations
.iter()
.enumerate()
.map(|(id, location)| {
let combined_string = location
.paths()
.iter()
.map(|path| path.to_string_lossy().to_owned())
.collect::<Vec<_>>()
.join("");
StringMatchCandidate::new(id, combined_string)
})
.collect::<Vec<_>>();
self.matches = smol::block_on(fuzzy::match_strings(
candidates.as_slice(),
query,
smart_case,
100,
&Default::default(),
cx.background().clone(),
));
self.matches.sort_unstable_by_key(|m| m.candidate_id);
self.selected_match_index = self
.matches
.iter()
.enumerate()
.max_by_key(|(_, m)| OrderedFloat(m.score))
.map(|(ix, _)| ix)
.unwrap_or(0);
Task::ready(())
}
fn confirm(&mut self, cx: &mut ViewContext<Self>) {
let selected_match = &self.matches[self.selected_index()];
let workspace_location = &self.workspace_locations[selected_match.candidate_id];
cx.dispatch_global_action(OpenPaths {
paths: workspace_location.paths().as_ref().clone(),
});
cx.emit(Event::Dismissed);
}
fn dismiss(&mut self, cx: &mut ViewContext<Self>) {
cx.emit(Event::Dismissed);
}
fn render_match(
&self,
ix: usize,
mouse_state: &mut gpui::MouseState,
selected: bool,
cx: &gpui::AppContext,
) -> ElementBox {
let settings = cx.global::<Settings>();
let string_match = &self.matches[ix];
let style = settings.theme.picker.item.style_for(mouse_state, selected);
let highlighted_location = HighlightedWorkspaceLocation::new(
&string_match,
&self.workspace_locations[string_match.candidate_id],
);
Flex::column()
.with_child(highlighted_location.names.render(style.label.clone()))
.with_children(
highlighted_location
.paths
.into_iter()
.map(|highlighted_path| highlighted_path.render(style.label.clone())),
)
.flex(1., false)
.contained()
.with_style(style.container)
.named("match")
}
}

View File

@ -897,9 +897,10 @@ message UpdateView {
repeated ExcerptInsertion inserted_excerpts = 1;
repeated uint64 deleted_excerpts = 2;
repeated Selection selections = 3;
EditorAnchor scroll_top_anchor = 4;
float scroll_x = 5;
float scroll_y = 6;
optional Selection pending_selection = 4;
EditorAnchor scroll_top_anchor = 5;
float scroll_x = 6;
float scroll_y = 7;
}
}
@ -916,9 +917,10 @@ message View {
optional string title = 2;
repeated Excerpt excerpts = 3;
repeated Selection selections = 4;
EditorAnchor scroll_top_anchor = 5;
float scroll_x = 6;
float scroll_y = 7;
optional Selection pending_selection = 5;
EditorAnchor scroll_top_anchor = 6;
float scroll_x = 7;
float scroll_y = 8;
}
}

View File

@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*;
mod macros;
pub const PROTOCOL_VERSION: u32 = 43;
pub const PROTOCOL_VERSION: u32 = 44;

View File

@ -334,6 +334,15 @@ impl Item for ProjectSearchView {
.update(cx, |editor, cx| editor.navigate(data, cx))
}
fn git_diff_recalc(
&mut self,
project: ModelHandle<Project>,
cx: &mut ViewContext<Self>,
) -> Task<anyhow::Result<()>> {
self.results_editor
.update(cx, |editor, cx| editor.git_diff_recalc(project, cx))
}
fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
match event {
ViewEvent::UpdateTab => vec![ItemEvent::UpdateBreadcrumbs, ItemEvent::UpdateTab],

View File

@ -597,6 +597,10 @@ where
self.cursor.item()
}
pub fn item_summary(&self) -> Option<&'a T::Summary> {
self.cursor.item_summary()
}
pub fn next(&mut self, cx: &<T::Summary as Summary>::Context) {
self.cursor.next_internal(&mut self.filter_node, cx);
}

View File

@ -96,10 +96,10 @@ impl WorkspaceDb {
WorkspaceLocation,
bool,
DockPosition,
) =
) =
self.select_row_bound(sql!{
SELECT workspace_id, workspace_location, left_sidebar_open, dock_visible, dock_anchor
FROM workspaces
FROM workspaces
WHERE workspace_location = ?
})
.and_then(|mut prepared_statement| (prepared_statement)(&workspace_location))
@ -195,15 +195,38 @@ impl WorkspaceDb {
}
query! {
pub fn recent_workspaces(limit: usize) -> Result<Vec<(WorkspaceId, WorkspaceLocation)>> {
fn recent_workspaces() -> Result<Vec<(WorkspaceId, WorkspaceLocation)>> {
SELECT workspace_id, workspace_location
FROM workspaces
WHERE workspace_location IS NOT NULL
ORDER BY timestamp DESC
LIMIT ?
}
}
query! {
async fn delete_stale_workspace(id: WorkspaceId) -> Result<()> {
DELETE FROM workspaces
WHERE workspace_id IS ?
}
}
// Returns the recent locations which are still valid on disk and deletes ones which no longer
// exist.
pub async fn recent_workspaces_on_disk(&self) -> Result<Vec<(WorkspaceId, WorkspaceLocation)>> {
let mut result = Vec::new();
let mut delete_tasks = Vec::new();
for (id, location) in self.recent_workspaces()? {
if location.paths().iter().all(|path| dbg!(path).exists()) {
result.push((id, location));
} else {
delete_tasks.push(self.delete_stale_workspace(id));
}
}
futures::future::join_all(delete_tasks).await;
Ok(result)
}
query! {
pub fn last_workspace() -> Result<Option<WorkspaceLocation>> {
SELECT workspace_location

View File

@ -60,7 +60,7 @@ pub use pane_group::*;
use persistence::{model::SerializedItem, DB};
pub use persistence::{
model::{ItemId, WorkspaceLocation},
WorkspaceDb,
WorkspaceDb, DB as WORKSPACE_DB,
};
use postage::prelude::Stream;
use project::{Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId};

View File

@ -44,6 +44,7 @@ plugin_runtime = { path = "../plugin_runtime" }
project = { path = "../project" }
project_panel = { path = "../project_panel" }
project_symbols = { path = "../project_symbols" }
recent_projects = { path = "../recent_projects" }
rpc = { path = "../rpc" }
settings = { path = "../settings" }
sum_tree = { path = "../sum_tree" }

View File

@ -93,7 +93,7 @@ impl LspAdapter for RustLspAdapter {
}
async fn disk_based_diagnostics_progress_token(&self) -> Option<String> {
Some("rust-analyzer/checkOnSave".into())
Some("rust-analyzer/flycheck".into())
}
async fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {

View File

@ -123,6 +123,7 @@ fn main() {
vim::init(cx);
terminal_view::init(cx);
theme_testbench::init(cx);
recent_projects::init(cx);
cx.spawn(|cx| watch_themes(fs.clone(), themes.clone(), cx))
.detach();

View File

@ -79,6 +79,11 @@ pub fn menus() -> Vec<Menu<'static>> {
name: "Open…",
action: Box::new(workspace::Open),
},
MenuItem::Action {
name: "Open Recent...",
action: Box::new(recent_projects::Toggle),
},
MenuItem::Separator,
MenuItem::Action {
name: "Add Folder to Project…",
action: Box::new(workspace::AddFolderToProject),

View File

@ -36,6 +36,7 @@ position_1=0,0
position_2=${width},0
# Authenticate using the collab server's admin secret.
export ZED_STATELESS=1
export ZED_ADMIN_API_TOKEN=secret
export ZED_SERVER_URL=http://localhost:8080
export ZED_WINDOW_SIZE=${width},${height}