mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-08 07:35:01 +03:00
util: Use GlobSet in PathMatcher (#13197)
Previously we were using a single globset::Glob in PathMatcher; higher
up the stack, we were then resorting to using a list of PathMatchers.
globset crate exposes a GlobSet type that's better suited for this use
case. In my benchmarks, using a single PathMatcher with GlobSet instead
of a Vec of PathMatchers with Globs is about 3 times faster with the
default 'file_scan_exclusions' values. This slightly improves our
project load time for projects with large # of files, as showcased in
the following videos of loading a project with 100k source files. This
project is *not* a git repository, so it should measure raw overhead on
our side.
Current nightly: 51404d4ea0
https://github.com/zed-industries/zed/assets/24362066/e0aa9f8c-aae6-4348-8d42-d20bd41fcd76
versus this PR:
https://github.com/zed-industries/zed/assets/24362066/408dcab1-cee2-4c9e-a541-a31d14772dd7
Release Notes:
- Improved performance in large worktrees
This commit is contained in:
parent
64d815a176
commit
5dc26c261d
@ -220,7 +220,7 @@ struct Options {
|
||||
const INCLUDE_WARNINGS_ARGUMENT: &str = "--include-warnings";
|
||||
|
||||
impl Options {
|
||||
pub fn parse(arguments_line: Option<&str>) -> Self {
|
||||
fn parse(arguments_line: Option<&str>) -> Self {
|
||||
arguments_line
|
||||
.map(|arguments_line| {
|
||||
let args = arguments_line.split_whitespace().collect::<Vec<_>>();
|
||||
@ -230,7 +230,7 @@ impl Options {
|
||||
if arg == INCLUDE_WARNINGS_ARGUMENT {
|
||||
include_warnings = true;
|
||||
} else {
|
||||
path_matcher = PathMatcher::new(arg).log_err();
|
||||
path_matcher = PathMatcher::new(&[arg.to_owned()]).log_err();
|
||||
}
|
||||
}
|
||||
Self {
|
||||
@ -255,7 +255,8 @@ fn collect_diagnostics(
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<(String, Vec<(Range<usize>, PlaceholderType)>)>> {
|
||||
let error_source = if let Some(path_matcher) = &options.path_matcher {
|
||||
Some(path_matcher.source().to_string())
|
||||
debug_assert_eq!(path_matcher.sources().len(), 1);
|
||||
Some(path_matcher.sources().first().cloned().unwrap_or_default())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -183,7 +183,7 @@ fn collect_files(
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<(String, Vec<(Range<usize>, PathBuf, EntryType)>)>> {
|
||||
let Ok(matcher) = PathMatcher::new(glob_input) else {
|
||||
let Ok(matcher) = PathMatcher::new(&[glob_input.to_owned()]) else {
|
||||
return Task::ready(Err(anyhow!("invalid path")));
|
||||
};
|
||||
|
||||
|
@ -4904,7 +4904,15 @@ async fn test_project_search(
|
||||
let mut results = HashMap::default();
|
||||
let mut search_rx = project_b.update(cx_b, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text(
|
||||
"world",
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
@ -875,7 +875,14 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
|
||||
let mut search = project.update(cx, |project, cx| {
|
||||
project.search(
|
||||
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
|
||||
SearchQuery::text(
|
||||
query,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
)
|
||||
.unwrap(),
|
||||
cx,
|
||||
)
|
||||
|
@ -25,8 +25,15 @@ use crate::panel_settings::MessageEditorSettings;
|
||||
const MENTIONS_DEBOUNCE_INTERVAL: Duration = Duration::from_millis(50);
|
||||
|
||||
lazy_static! {
|
||||
static ref MENTIONS_SEARCH: SearchQuery =
|
||||
SearchQuery::regex("@[-_\\w]+", false, false, false, Vec::new(), Vec::new()).unwrap();
|
||||
static ref MENTIONS_SEARCH: SearchQuery = SearchQuery::regex(
|
||||
"@[-_\\w]+",
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub struct MessageEditor {
|
||||
|
@ -113,7 +113,7 @@ impl Prettier {
|
||||
None
|
||||
}
|
||||
}).any(|workspace_definition| {
|
||||
if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() {
|
||||
if let Some(path_matcher) = PathMatcher::new(&[workspace_definition.clone()]).ok() {
|
||||
path_matcher.is_match(subproject_path)
|
||||
} else {
|
||||
workspace_definition == subproject_path.to_string_lossy()
|
||||
|
@ -3929,7 +3929,15 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text(
|
||||
"TWO",
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
@ -3954,7 +3962,15 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text(
|
||||
"TWO",
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
@ -3994,8 +4010,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
Vec::new()
|
||||
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4014,8 +4030,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.rs").unwrap()],
|
||||
Vec::new()
|
||||
PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4037,11 +4053,10 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap(),
|
||||
],
|
||||
Vec::new()
|
||||
|
||||
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
|
||||
Default::default(),
|
||||
).unwrap(),
|
||||
cx
|
||||
)
|
||||
@ -4062,12 +4077,10 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.rs").unwrap(),
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap(),
|
||||
],
|
||||
Vec::new()
|
||||
|
||||
PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
|
||||
Default::default(),
|
||||
).unwrap(),
|
||||
cx
|
||||
)
|
||||
@ -4110,8 +4123,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
Default::default(),
|
||||
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4135,8 +4148,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![PathMatcher::new("*.rs").unwrap()],
|
||||
Default::default(),
|
||||
PathMatcher::new(&["*.rs".to_owned()]).unwrap()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4158,11 +4171,10 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap(),
|
||||
],
|
||||
Default::default(),
|
||||
|
||||
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
|
||||
).unwrap(),
|
||||
cx
|
||||
)
|
||||
@ -4183,12 +4195,10 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
Vec::new(),
|
||||
vec![
|
||||
PathMatcher::new("*.rs").unwrap(),
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap(),
|
||||
],
|
||||
Default::default(),
|
||||
|
||||
PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
|
||||
).unwrap(),
|
||||
cx
|
||||
)
|
||||
@ -4225,8 +4235,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
vec![PathMatcher::new("*.odd").unwrap()],
|
||||
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
|
||||
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4245,8 +4255,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
|
||||
PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
|
||||
).unwrap(),
|
||||
cx
|
||||
)
|
||||
@ -4264,14 +4274,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
],
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
],
|
||||
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4290,14 +4294,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![
|
||||
PathMatcher::new("*.ts").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
],
|
||||
vec![
|
||||
PathMatcher::new("*.rs").unwrap(),
|
||||
PathMatcher::new("*.odd").unwrap()
|
||||
],
|
||||
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4349,8 +4347,8 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
|
||||
Vec::new()
|
||||
PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4368,8 +4366,8 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
|
||||
Vec::new()
|
||||
PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4388,8 +4386,8 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
Vec::new()
|
||||
PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
@ -4437,7 +4435,15 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text(
|
||||
query,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Default::default(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
@ -4450,7 +4456,15 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
|
||||
SearchQuery::text(
|
||||
query,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
Default::default(),
|
||||
Default::default()
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
@ -4475,8 +4489,8 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||
"Unrestricted search with ignored directories should find every file with the query"
|
||||
);
|
||||
|
||||
let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
|
||||
let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
|
||||
let files_to_include = PathMatcher::new(&["/dir/node_modules/prettier/**".to_owned()]).unwrap();
|
||||
let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
|
||||
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
|
||||
assert_eq!(
|
||||
search(
|
||||
|
@ -1,7 +1,6 @@
|
||||
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::Result;
|
||||
use client::proto;
|
||||
use itertools::Itertools;
|
||||
use language::{char_kind, BufferSnapshot};
|
||||
use regex::{Captures, Regex, RegexBuilder};
|
||||
use smol::future::yield_now;
|
||||
@ -19,18 +18,18 @@ static TEXT_REPLACEMENT_SPECIAL_CHARACTERS_REGEX: OnceLock<Regex> = OnceLock::ne
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SearchInputs {
|
||||
query: Arc<str>,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
files_to_include: PathMatcher,
|
||||
files_to_exclude: PathMatcher,
|
||||
}
|
||||
|
||||
impl SearchInputs {
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.query.as_ref()
|
||||
}
|
||||
pub fn files_to_include(&self) -> &[PathMatcher] {
|
||||
pub fn files_to_include(&self) -> &PathMatcher {
|
||||
&self.files_to_include
|
||||
}
|
||||
pub fn files_to_exclude(&self) -> &[PathMatcher] {
|
||||
pub fn files_to_exclude(&self) -> &PathMatcher {
|
||||
&self.files_to_exclude
|
||||
}
|
||||
}
|
||||
@ -62,8 +61,8 @@ impl SearchQuery {
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
files_to_include: PathMatcher,
|
||||
files_to_exclude: PathMatcher,
|
||||
) -> Result<Self> {
|
||||
let query = query.to_string();
|
||||
let search = AhoCorasickBuilder::new()
|
||||
@ -89,8 +88,8 @@ impl SearchQuery {
|
||||
whole_word: bool,
|
||||
case_sensitive: bool,
|
||||
include_ignored: bool,
|
||||
files_to_include: Vec<PathMatcher>,
|
||||
files_to_exclude: Vec<PathMatcher>,
|
||||
files_to_include: PathMatcher,
|
||||
files_to_exclude: PathMatcher,
|
||||
) -> Result<Self> {
|
||||
let mut query = query.to_string();
|
||||
let initial_query = Arc::from(query.as_str());
|
||||
@ -167,16 +166,8 @@ impl SearchQuery {
|
||||
whole_word: self.whole_word(),
|
||||
case_sensitive: self.case_sensitive(),
|
||||
include_ignored: self.include_ignored(),
|
||||
files_to_include: self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
.map(|matcher| matcher.to_string())
|
||||
.join(","),
|
||||
files_to_exclude: self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.map(|matcher| matcher.to_string())
|
||||
.join(","),
|
||||
files_to_include: self.files_to_include().sources().join(","),
|
||||
files_to_exclude: self.files_to_exclude().sources().join(","),
|
||||
}
|
||||
}
|
||||
|
||||
@ -377,11 +368,11 @@ impl SearchQuery {
|
||||
matches!(self, Self::Regex { .. })
|
||||
}
|
||||
|
||||
pub fn files_to_include(&self) -> &[PathMatcher] {
|
||||
pub fn files_to_include(&self) -> &PathMatcher {
|
||||
self.as_inner().files_to_include()
|
||||
}
|
||||
|
||||
pub fn files_to_exclude(&self) -> &[PathMatcher] {
|
||||
pub fn files_to_exclude(&self) -> &PathMatcher {
|
||||
self.as_inner().files_to_exclude()
|
||||
}
|
||||
|
||||
@ -390,17 +381,10 @@ impl SearchQuery {
|
||||
Some(file_path) => {
|
||||
let mut path = file_path.to_path_buf();
|
||||
loop {
|
||||
if self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.any(|exclude_glob| exclude_glob.is_match(&path))
|
||||
{
|
||||
if self.files_to_exclude().is_match(&path) {
|
||||
return false;
|
||||
} else if self.files_to_include().is_empty()
|
||||
|| self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
.any(|include_glob| include_glob.is_match(&path))
|
||||
} else if self.files_to_include().sources().is_empty()
|
||||
|| self.files_to_include().is_match(&path)
|
||||
{
|
||||
return true;
|
||||
} else if !path.pop() {
|
||||
@ -408,7 +392,7 @@ impl SearchQuery {
|
||||
}
|
||||
}
|
||||
}
|
||||
None => self.files_to_include().is_empty(),
|
||||
None => self.files_to_include().sources().is_empty(),
|
||||
}
|
||||
}
|
||||
pub fn as_inner(&self) -> &SearchInputs {
|
||||
@ -418,16 +402,13 @@ impl SearchQuery {
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
|
||||
glob_set
|
||||
fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<PathMatcher> {
|
||||
let globs = glob_set
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.filter(|glob_str| !glob_str.is_empty())
|
||||
.map(|glob_str| {
|
||||
PathMatcher::new(glob_str)
|
||||
.with_context(|| format!("deserializing path match glob {glob_str}"))
|
||||
})
|
||||
.collect()
|
||||
.filter_map(|glob_str| (!glob_str.is_empty()).then(|| glob_str.to_owned()))
|
||||
.collect::<Vec<_>>();
|
||||
Ok(PathMatcher::new(&globs)?)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -445,7 +426,7 @@ mod tests {
|
||||
"dir/[a-z].txt",
|
||||
"../dir/filé",
|
||||
] {
|
||||
let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
|
||||
let path_matcher = PathMatcher::new(&[valid_path.to_owned()]).unwrap_or_else(|e| {
|
||||
panic!("Valid path {valid_path} should be accepted, but got: {e}")
|
||||
});
|
||||
assert!(
|
||||
@ -458,7 +439,7 @@ mod tests {
|
||||
#[test]
|
||||
fn path_matcher_creation_for_globs() {
|
||||
for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
|
||||
match PathMatcher::new(invalid_glob) {
|
||||
match PathMatcher::new(&[invalid_glob.to_owned()]) {
|
||||
Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
|
||||
Err(_expected) => {}
|
||||
}
|
||||
@ -471,9 +452,9 @@ mod tests {
|
||||
"dir/[a-z].txt",
|
||||
"{dir,file}",
|
||||
] {
|
||||
match PathMatcher::new(valid_glob) {
|
||||
match PathMatcher::new(&[valid_glob.to_owned()]) {
|
||||
Ok(_expected) => {}
|
||||
Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
|
||||
Err(e) => panic!("Valid glob should be accepted, but got: {e}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -927,8 +927,8 @@ impl BufferSearchBar {
|
||||
self.search_options.contains(SearchOptions::WHOLE_WORD),
|
||||
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
|
||||
false,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
) {
|
||||
Ok(query) => query.with_replacement(self.replacement(cx)),
|
||||
Err(_) => {
|
||||
@ -944,8 +944,8 @@ impl BufferSearchBar {
|
||||
self.search_options.contains(SearchOptions::WHOLE_WORD),
|
||||
self.search_options.contains(SearchOptions::CASE_SENSITIVE),
|
||||
false,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
) {
|
||||
Ok(query) => query.with_replacement(self.replacement(cx)),
|
||||
Err(_) => {
|
||||
|
@ -3,7 +3,6 @@ use crate::{
|
||||
SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleIncludeIgnored, ToggleRegex,
|
||||
ToggleReplace, ToggleWholeWord,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{
|
||||
actions::SelectAll,
|
||||
@ -876,7 +875,7 @@ impl ProjectSearchView {
|
||||
if should_mark_error {
|
||||
cx.notify();
|
||||
}
|
||||
vec![]
|
||||
PathMatcher::default()
|
||||
}
|
||||
};
|
||||
let excluded_files =
|
||||
@ -894,7 +893,7 @@ impl ProjectSearchView {
|
||||
if should_mark_error {
|
||||
cx.notify();
|
||||
}
|
||||
vec![]
|
||||
PathMatcher::default()
|
||||
}
|
||||
};
|
||||
|
||||
@ -960,15 +959,14 @@ impl ProjectSearchView {
|
||||
query
|
||||
}
|
||||
|
||||
fn parse_path_matches(text: &str) -> anyhow::Result<Vec<PathMatcher>> {
|
||||
text.split(',')
|
||||
fn parse_path_matches(text: &str) -> anyhow::Result<PathMatcher> {
|
||||
let queries = text
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.filter(|maybe_glob_str| !maybe_glob_str.is_empty())
|
||||
.map(|maybe_glob_str| {
|
||||
PathMatcher::new(maybe_glob_str)
|
||||
.with_context(|| format!("parsing {maybe_glob_str} as path matcher"))
|
||||
})
|
||||
.collect()
|
||||
.map(str::to_owned)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(PathMatcher::new(&queries)?)
|
||||
}
|
||||
|
||||
fn select_match(&mut self, direction: Direction, cx: &mut ViewContext<Self>) {
|
||||
|
@ -1024,8 +1024,8 @@ impl SearchableItem for TerminalView {
|
||||
query.whole_word(),
|
||||
query.case_sensitive(),
|
||||
query.include_ignored(),
|
||||
query.files_to_include().to_vec(),
|
||||
query.files_to_exclude().to_vec(),
|
||||
query.files_to_include().clone(),
|
||||
query.files_to_exclude().clone(),
|
||||
)
|
||||
.unwrap()),
|
||||
),
|
||||
|
@ -3,7 +3,7 @@ use std::{
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use globset::{Glob, GlobMatcher};
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
@ -257,43 +257,51 @@ impl<P> PathLikeWithPosition<P> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct PathMatcher {
|
||||
source: String,
|
||||
glob: GlobMatcher,
|
||||
sources: Vec<String>,
|
||||
glob: GlobSet,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PathMatcher {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.source.fmt(f)
|
||||
}
|
||||
}
|
||||
// impl std::fmt::Display for PathMatcher {
|
||||
// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
// self.sources.fmt(f)
|
||||
// }
|
||||
// }
|
||||
|
||||
impl PartialEq for PathMatcher {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.source.eq(&other.source)
|
||||
self.sources.eq(&other.sources)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for PathMatcher {}
|
||||
|
||||
impl PathMatcher {
|
||||
pub fn new(source: &str) -> Result<Self, globset::Error> {
|
||||
Ok(PathMatcher {
|
||||
glob: Glob::new(source)?.compile_matcher(),
|
||||
source: String::from(source),
|
||||
})
|
||||
pub fn new(globs: &[String]) -> Result<Self, globset::Error> {
|
||||
let globs = globs
|
||||
.into_iter()
|
||||
.map(|glob| Glob::new(&glob))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect();
|
||||
let mut glob_builder = GlobSetBuilder::new();
|
||||
for single_glob in globs {
|
||||
glob_builder.add(single_glob);
|
||||
}
|
||||
let glob = glob_builder.build()?;
|
||||
Ok(PathMatcher { glob, sources })
|
||||
}
|
||||
|
||||
pub fn source(&self) -> &str {
|
||||
&self.source
|
||||
pub fn sources(&self) -> &[String] {
|
||||
&self.sources
|
||||
}
|
||||
|
||||
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
|
||||
let other_path = other.as_ref();
|
||||
other_path.starts_with(Path::new(&self.source))
|
||||
|| other_path.ends_with(Path::new(&self.source))
|
||||
|| self.glob.is_match(other_path)
|
||||
self.sources.iter().any(|source| {
|
||||
let as_bytes = other_path.as_os_str().as_encoded_bytes();
|
||||
as_bytes.starts_with(source.as_bytes()) || as_bytes.ends_with(source.as_bytes())
|
||||
}) || self.glob.is_match(other_path)
|
||||
|| self.check_with_end_separator(other_path)
|
||||
}
|
||||
|
||||
@ -534,20 +542,20 @@ mod tests {
|
||||
#[test]
|
||||
fn edge_of_glob() {
|
||||
let path = Path::new("/work/node_modules");
|
||||
let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
|
||||
let path_matcher = PathMatcher::new(&["**/node_modules/**".to_owned()]).unwrap();
|
||||
assert!(
|
||||
path_matcher.is_match(path),
|
||||
"Path matcher {path_matcher} should match {path:?}"
|
||||
"Path matcher should match {path:?}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn project_search() {
|
||||
let path = Path::new("/Users/someonetoignore/work/zed/zed.dev/node_modules");
|
||||
let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
|
||||
let path_matcher = PathMatcher::new(&["**/node_modules/**".to_owned()]).unwrap();
|
||||
assert!(
|
||||
path_matcher.is_match(path),
|
||||
"Path matcher {path_matcher} should match {path:?}"
|
||||
"Path matcher should match {path:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -8,25 +9,19 @@ use util::paths::PathMatcher;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct WorktreeSettings {
|
||||
pub file_scan_exclusions: Arc<[PathMatcher]>,
|
||||
pub private_files: Arc<[PathMatcher]>,
|
||||
pub file_scan_exclusions: PathMatcher,
|
||||
pub private_files: PathMatcher,
|
||||
}
|
||||
|
||||
impl WorktreeSettings {
|
||||
pub fn is_path_private(&self, path: &Path) -> bool {
|
||||
path.ancestors().any(|ancestor| {
|
||||
self.private_files
|
||||
.iter()
|
||||
.any(|matcher| matcher.is_match(&ancestor))
|
||||
})
|
||||
path.ancestors()
|
||||
.any(|ancestor| self.private_files.is_match(&ancestor))
|
||||
}
|
||||
|
||||
pub fn is_path_excluded(&self, path: &Path) -> bool {
|
||||
path.ancestors().any(|ancestor| {
|
||||
self.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|matcher| matcher.is_match(&ancestor))
|
||||
})
|
||||
path.ancestors()
|
||||
.any(|ancestor| self.file_scan_exclusions.is_match(&ancestor))
|
||||
}
|
||||
}
|
||||
|
||||
@ -67,25 +62,12 @@ impl Settings for WorktreeSettings {
|
||||
file_scan_exclusions.sort();
|
||||
private_files.sort();
|
||||
Ok(Self {
|
||||
file_scan_exclusions: path_matchers(&file_scan_exclusions, "file_scan_exclusions"),
|
||||
private_files: path_matchers(&private_files, "private_files"),
|
||||
file_scan_exclusions: path_matchers(&file_scan_exclusions, "file_scan_exclusions")?,
|
||||
private_files: path_matchers(&private_files, "private_files")?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn path_matchers(values: &[String], context: &'static str) -> Arc<[PathMatcher]> {
|
||||
values
|
||||
.iter()
|
||||
.filter_map(|pattern| {
|
||||
PathMatcher::new(pattern)
|
||||
.map(Some)
|
||||
.unwrap_or_else(|e| {
|
||||
log::error!(
|
||||
"Skipping pattern {pattern} in `{}` project settings due to parsing error: {e:#}", context
|
||||
);
|
||||
None
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into()
|
||||
fn path_matchers(values: &[String], context: &'static str) -> anyhow::Result<PathMatcher> {
|
||||
PathMatcher::new(values).with_context(|| format!("Failed to parse globs from {}", context))
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user