mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-08 07:35:01 +03:00
Output non-ignored files first
This commit is contained in:
parent
92953fb53d
commit
566857b0b7
@ -557,6 +557,7 @@ enum SearchMatchCandidate {
|
||||
},
|
||||
Path {
|
||||
worktree_id: WorktreeId,
|
||||
is_ignored: bool,
|
||||
path: Arc<Path>,
|
||||
},
|
||||
}
|
||||
@ -5743,88 +5744,9 @@ impl Project {
|
||||
.log_err();
|
||||
}
|
||||
|
||||
// TODO kb parallelize directory traversal
|
||||
background
|
||||
.scoped(|scope| {
|
||||
if query.include_ignored() {
|
||||
scope.spawn(async move {
|
||||
for snapshot in snapshots {
|
||||
for ignored_entry in snapshot
|
||||
.entries(query.include_ignored())
|
||||
.filter(|e| e.is_ignored)
|
||||
{
|
||||
let mut ignored_paths_to_process =
|
||||
VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
|
||||
while let Some(ignored_abs_path) =
|
||||
ignored_paths_to_process.pop_front()
|
||||
{
|
||||
if !query.file_matches(Some(&ignored_abs_path))
|
||||
|| snapshot.is_abs_path_excluded(&ignored_abs_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Some(fs_metadata) = fs
|
||||
.metadata(&ignored_abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("fetching fs metadata for {ignored_abs_path:?}")
|
||||
})
|
||||
.log_err()
|
||||
.flatten()
|
||||
{
|
||||
if fs_metadata.is_dir {
|
||||
if let Some(mut subfiles) = fs
|
||||
.read_dir(&ignored_abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"listing ignored path {ignored_abs_path:?}"
|
||||
)
|
||||
})
|
||||
.log_err()
|
||||
{
|
||||
while let Some(subfile) = subfiles.next().await {
|
||||
if let Some(subfile) = subfile.log_err() {
|
||||
ignored_paths_to_process
|
||||
.push_front(subfile);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if !fs_metadata.is_symlink {
|
||||
let matches = if let Some(file) = fs
|
||||
.open_sync(&ignored_abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Opening ignored path {ignored_abs_path:?}"
|
||||
)
|
||||
})
|
||||
.log_err()
|
||||
{
|
||||
query.detect(file).unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if matches {
|
||||
let project_path = SearchMatchCandidate::Path {
|
||||
worktree_id: snapshot.id(),
|
||||
path: ignored_entry.path.clone(),
|
||||
};
|
||||
if matching_paths_tx
|
||||
.send(project_path)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
for worker_ix in 0..workers {
|
||||
let worker_start_ix = worker_ix * paths_per_worker;
|
||||
let worker_end_ix = worker_start_ix + paths_per_worker;
|
||||
@ -5878,6 +5800,7 @@ impl Project {
|
||||
let project_path = SearchMatchCandidate::Path {
|
||||
worktree_id: snapshot.id(),
|
||||
path: entry.path.clone(),
|
||||
is_ignored: entry.is_ignored,
|
||||
};
|
||||
if matching_paths_tx.send(project_path).await.is_err() {
|
||||
break;
|
||||
@ -5890,6 +5813,92 @@ impl Project {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if query.include_ignored() {
|
||||
scope.spawn(async move {
|
||||
for snapshot in snapshots {
|
||||
for ignored_entry in snapshot
|
||||
.entries(query.include_ignored())
|
||||
.filter(|e| e.is_ignored)
|
||||
{
|
||||
let mut ignored_paths_to_process =
|
||||
VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
|
||||
while let Some(ignored_abs_path) =
|
||||
ignored_paths_to_process.pop_front()
|
||||
{
|
||||
if !query.file_matches(Some(&ignored_abs_path))
|
||||
|| snapshot.is_abs_path_excluded(&ignored_abs_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Some(fs_metadata) = fs
|
||||
.metadata(&ignored_abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("fetching fs metadata for {ignored_abs_path:?}")
|
||||
})
|
||||
.log_err()
|
||||
.flatten()
|
||||
{
|
||||
if fs_metadata.is_dir {
|
||||
if let Some(mut subfiles) = fs
|
||||
.read_dir(&ignored_abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"listing ignored path {ignored_abs_path:?}"
|
||||
)
|
||||
})
|
||||
.log_err()
|
||||
{
|
||||
while let Some(subfile) = subfiles.next().await {
|
||||
if let Some(subfile) = subfile.log_err() {
|
||||
ignored_paths_to_process.push_back(subfile);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if !fs_metadata.is_symlink {
|
||||
let matches = if let Some(file) = fs
|
||||
.open_sync(&ignored_abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Opening ignored path {ignored_abs_path:?}"
|
||||
)
|
||||
})
|
||||
.log_err()
|
||||
{
|
||||
query.detect(file).unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if matches {
|
||||
let project_path = SearchMatchCandidate::Path {
|
||||
worktree_id: snapshot.id(),
|
||||
path: Arc::from(
|
||||
ignored_abs_path
|
||||
.strip_prefix(snapshot.abs_path())
|
||||
.expect(
|
||||
"scanning worktree-related files",
|
||||
),
|
||||
),
|
||||
is_ignored: true,
|
||||
};
|
||||
if matching_paths_tx
|
||||
.send(project_path)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
})
|
||||
.await;
|
||||
}
|
||||
@ -5998,11 +6007,24 @@ impl Project {
|
||||
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
|
||||
let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
|
||||
cx.spawn(|this, cx| async move {
|
||||
let mut buffers = vec![];
|
||||
let mut buffers = Vec::new();
|
||||
let mut ignored_buffers = Vec::new();
|
||||
while let Some(entry) = matching_paths_rx.next().await {
|
||||
buffers.push(entry);
|
||||
if matches!(
|
||||
entry,
|
||||
SearchMatchCandidate::Path {
|
||||
is_ignored: true,
|
||||
..
|
||||
}
|
||||
) {
|
||||
ignored_buffers.push(entry);
|
||||
} else {
|
||||
buffers.push(entry);
|
||||
}
|
||||
}
|
||||
buffers.sort_by_key(|candidate| candidate.path());
|
||||
ignored_buffers.sort_by_key(|candidate| candidate.path());
|
||||
buffers.extend(ignored_buffers);
|
||||
let matching_paths = buffers.clone();
|
||||
let _ = sorted_buffers_tx.send(buffers);
|
||||
for (index, candidate) in matching_paths.into_iter().enumerate() {
|
||||
@ -6014,7 +6036,9 @@ impl Project {
|
||||
cx.spawn(|mut cx| async move {
|
||||
let buffer = match candidate {
|
||||
SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
|
||||
SearchMatchCandidate::Path { worktree_id, path } => this
|
||||
SearchMatchCandidate::Path {
|
||||
worktree_id, path, ..
|
||||
} => this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.open_buffer((worktree_id, path), cx)
|
||||
})
|
||||
|
@ -219,7 +219,7 @@ impl ProjectSearch {
|
||||
this.no_results = Some(true);
|
||||
});
|
||||
|
||||
// TODO kb check for cancellations here and actually stop the search
|
||||
// TODO kb check for cancellations here and actually stop the search?
|
||||
while let Some((buffer, anchors)) = matches.next().await {
|
||||
let mut ranges = this.update(&mut cx, |this, cx| {
|
||||
this.no_results = Some(false);
|
||||
|
Loading…
Reference in New Issue
Block a user