mirror of
https://github.com/zed-industries/zed.git
synced 2025-01-04 10:12:44 +03:00
Merge pull request #1235 from zed-industries/diagnostics-in-hidden-worktrees
Allow diagnostics to be published for hidden worktrees
This commit is contained in:
commit
fc3ec6966b
@ -2644,9 +2644,6 @@ impl Project {
|
|||||||
let (worktree, relative_path) = self
|
let (worktree, relative_path) = self
|
||||||
.find_local_worktree(&abs_path, cx)
|
.find_local_worktree(&abs_path, cx)
|
||||||
.ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
|
.ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
|
||||||
if !worktree.read(cx).is_visible() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let project_path = ProjectPath {
|
let project_path = ProjectPath {
|
||||||
worktree_id: worktree.read(cx).id(),
|
worktree_id: worktree.read(cx).id(),
|
||||||
@ -4011,13 +4008,15 @@ impl Project {
|
|||||||
abs_path: &Path,
|
abs_path: &Path,
|
||||||
cx: &AppContext,
|
cx: &AppContext,
|
||||||
) -> Option<(ModelHandle<Worktree>, PathBuf)> {
|
) -> Option<(ModelHandle<Worktree>, PathBuf)> {
|
||||||
for tree in self.worktrees(cx) {
|
for tree in &self.worktrees {
|
||||||
if let Some(relative_path) = tree
|
if let Some(tree) = tree.upgrade(cx) {
|
||||||
.read(cx)
|
if let Some(relative_path) = tree
|
||||||
.as_local()
|
.read(cx)
|
||||||
.and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
|
.as_local()
|
||||||
{
|
.and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
|
||||||
return Some((tree.clone(), relative_path.into()));
|
{
|
||||||
|
return Some((tree.clone(), relative_path.into()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
@ -4258,7 +4257,7 @@ impl Project {
|
|||||||
&'a self,
|
&'a self,
|
||||||
cx: &'a AppContext,
|
cx: &'a AppContext,
|
||||||
) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
|
) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
|
||||||
self.worktrees(cx).flat_map(move |worktree| {
|
self.visible_worktrees(cx).flat_map(move |worktree| {
|
||||||
let worktree = worktree.read(cx);
|
let worktree = worktree.read(cx);
|
||||||
let worktree_id = worktree.id();
|
let worktree_id = worktree.id();
|
||||||
worktree
|
worktree
|
||||||
@ -6411,6 +6410,80 @@ mod tests {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
|
||||||
|
cx.foreground().forbid_parking();
|
||||||
|
|
||||||
|
let fs = FakeFs::new(cx.background());
|
||||||
|
fs.insert_tree(
|
||||||
|
"/root",
|
||||||
|
json!({
|
||||||
|
"dir": {
|
||||||
|
"a.rs": "let a = 1;",
|
||||||
|
},
|
||||||
|
"other.rs": "let b = c;"
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
|
||||||
|
|
||||||
|
let (worktree, _) = project
|
||||||
|
.update(cx, |project, cx| {
|
||||||
|
project.find_or_create_local_worktree("/root/other.rs", false, cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
|
||||||
|
|
||||||
|
project.update(cx, |project, cx| {
|
||||||
|
project
|
||||||
|
.update_diagnostics(
|
||||||
|
0,
|
||||||
|
lsp::PublishDiagnosticsParams {
|
||||||
|
uri: Url::from_file_path("/root/other.rs").unwrap(),
|
||||||
|
version: None,
|
||||||
|
diagnostics: vec![lsp::Diagnostic {
|
||||||
|
range: lsp::Range::new(
|
||||||
|
lsp::Position::new(0, 8),
|
||||||
|
lsp::Position::new(0, 9),
|
||||||
|
),
|
||||||
|
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||||
|
message: "unknown variable 'c'".to_string(),
|
||||||
|
..Default::default()
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
&[],
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
});
|
||||||
|
|
||||||
|
let buffer = project
|
||||||
|
.update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
buffer.read_with(cx, |buffer, _| {
|
||||||
|
let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
|
||||||
|
assert_eq!(
|
||||||
|
chunks
|
||||||
|
.iter()
|
||||||
|
.map(|(s, d)| (s.as_str(), *d))
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
&[
|
||||||
|
("let b = ", None),
|
||||||
|
("c", Some(DiagnosticSeverity::ERROR)),
|
||||||
|
(";", None),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
project.read_with(cx, |project, cx| {
|
||||||
|
assert_eq!(project.diagnostic_summaries(cx).next(), None);
|
||||||
|
assert_eq!(project.diagnostic_summary(cx).error_count, 0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
|
async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
|
||||||
cx.foreground().forbid_parking();
|
cx.foreground().forbid_parking();
|
||||||
|
@ -995,9 +995,9 @@ impl Workspace {
|
|||||||
// Sort the paths to ensure we add worktrees for parents before their children.
|
// Sort the paths to ensure we add worktrees for parents before their children.
|
||||||
abs_paths.sort_unstable();
|
abs_paths.sort_unstable();
|
||||||
cx.spawn(|this, mut cx| async move {
|
cx.spawn(|this, mut cx| async move {
|
||||||
let mut entries = Vec::new();
|
let mut project_paths = Vec::new();
|
||||||
for path in &abs_paths {
|
for path in &abs_paths {
|
||||||
entries.push(
|
project_paths.push(
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
this.project_path_for_path(path, visible, cx)
|
this.project_path_for_path(path, visible, cx)
|
||||||
})
|
})
|
||||||
@ -1009,7 +1009,7 @@ impl Workspace {
|
|||||||
let tasks = abs_paths
|
let tasks = abs_paths
|
||||||
.iter()
|
.iter()
|
||||||
.cloned()
|
.cloned()
|
||||||
.zip(entries.into_iter())
|
.zip(project_paths.into_iter())
|
||||||
.map(|(abs_path, project_path)| {
|
.map(|(abs_path, project_path)| {
|
||||||
let this = this.clone();
|
let this = this.clone();
|
||||||
cx.spawn(|mut cx| {
|
cx.spawn(|mut cx| {
|
||||||
|
Loading…
Reference in New Issue
Block a user