Merge branch 'main' into git-menu

This commit is contained in:
Piotr Osiewicz 2023-07-03 16:25:36 +02:00
commit 806268f0db
17 changed files with 882 additions and 196 deletions

1
Cargo.lock generated
View File

@ -5035,6 +5035,7 @@ dependencies = [
"language",
"menu",
"postage",
"pretty_assertions",
"project",
"schemars",
"serde",

View File

@ -101,6 +101,7 @@ time = { version = "0.3", features = ["serde", "serde-well-known"] }
toml = { version = "0.5" }
tree-sitter = "0.20"
unindent = { version = "0.1.7" }
pretty_assertions = "1.3.0"
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "49226023693107fba9a1191136a4f47f38cdca73" }

View File

@ -24,9 +24,7 @@
],
"ctrl-shift-down": "editor::AddSelectionBelow",
"ctrl-shift-up": "editor::AddSelectionAbove",
"cmd-shift-backspace": "editor::DeleteToBeginningOfLine",
"cmd-shift-enter": "editor::NewlineAbove",
"cmd-enter": "editor::NewlineBelow"
"cmd-shift-backspace": "editor::DeleteToBeginningOfLine"
}
},
{

View File

@ -24,9 +24,7 @@
"ctrl-.": "editor::GoToHunk",
"ctrl-,": "editor::GoToPrevHunk",
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
"ctrl-delete": "editor::DeleteToNextWordEnd",
"cmd-shift-enter": "editor::NewlineAbove",
"cmd-enter": "editor::NewlineBelow"
"ctrl-delete": "editor::DeleteToNextWordEnd"
}
},
{

View File

@ -12,8 +12,6 @@
"ctrl-shift-d": "editor::DuplicateLine",
"cmd-b": "editor::GoToDefinition",
"cmd-j": "editor::ScrollCursorCenter",
"cmd-alt-enter": "editor::NewlineAbove",
"cmd-enter": "editor::NewlineBelow",
"cmd-shift-l": "editor::SelectLine",
"cmd-shift-t": "outline::Toggle",
"alt-backspace": "editor::DeleteToPreviousWordStart",
@ -56,7 +54,9 @@
},
{
"context": "Editor && mode == full",
"bindings": {}
"bindings": {
"cmd-alt-enter": "editor::NewlineAbove"
}
},
{
"context": "BufferSearchBar",

View File

@ -67,7 +67,7 @@ fs = { path = "../fs", features = ["test-support"] }
git = { path = "../git", features = ["test-support"] }
live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
pretty_assertions = "1.3.0"
pretty_assertions.workspace = true
project = { path = "../project", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }

View File

@ -49,6 +49,19 @@ pub struct InlayProperties<T> {
pub text: T,
}
impl InlayProperties<String> {
pub fn new(position: Anchor, hint: &project::InlayHint) -> Self {
let mut text = hint.text();
if hint.padding_right && !text.ends_with(' ') {
text.push(' ');
}
if hint.padding_left && !text.starts_with(' ') {
text.insert(0, ' ');
}
Self { position, text }
}
}
impl sum_tree::Item for Transform {
type Summary = TransformSummary;
@ -1095,6 +1108,7 @@ mod tests {
use super::*;
use crate::{InlayId, MultiBuffer};
use gpui::AppContext;
use project::{InlayHint, InlayHintLabel};
use rand::prelude::*;
use settings::SettingsStore;
use std::{cmp::Reverse, env, sync::Arc};
@ -1102,6 +1116,81 @@ mod tests {
use text::Patch;
use util::post_inc;
#[test]
fn test_inlay_properties_label_padding() {
assert_eq!(
InlayProperties::new(
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String("a".to_string()),
buffer_id: 0,
position: text::Anchor::default(),
padding_left: false,
padding_right: false,
tooltip: None,
kind: None,
},
)
.text,
"a",
"Should not pad label if not requested"
);
assert_eq!(
InlayProperties::new(
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String("a".to_string()),
buffer_id: 0,
position: text::Anchor::default(),
padding_left: true,
padding_right: true,
tooltip: None,
kind: None,
},
)
.text,
" a ",
"Should pad label for every side requested"
);
assert_eq!(
InlayProperties::new(
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String(" a ".to_string()),
buffer_id: 0,
position: text::Anchor::default(),
padding_left: false,
padding_right: false,
tooltip: None,
kind: None,
},
)
.text,
" a ",
"Should not change already padded label"
);
assert_eq!(
InlayProperties::new(
Anchor::min(),
&InlayHint {
label: InlayHintLabel::String(" a ".to_string()),
buffer_id: 0,
position: text::Anchor::default(),
padding_left: true,
padding_right: true,
tooltip: None,
kind: None,
},
)
.text,
" a ",
"Should not change already padded label"
);
}
#[gpui::test]
fn test_basic_inlays(cx: &mut AppContext) {
let buffer = MultiBuffer::build_simple("abcdefghi", cx);

View File

@ -26,7 +26,7 @@ use aho_corasick::AhoCorasick;
use anyhow::{anyhow, Result};
use blink_manager::BlinkManager;
use client::{ClickhouseEvent, TelemetrySettings};
use clock::ReplicaId;
use clock::{Global, ReplicaId};
use collections::{BTreeMap, Bound, HashMap, HashSet, VecDeque};
use copilot::Copilot;
pub use display_map::DisplayPoint;
@ -1195,11 +1195,11 @@ enum GotoDefinitionKind {
Type,
}
#[derive(Debug, Copy, Clone)]
#[derive(Debug, Clone)]
enum InlayRefreshReason {
SettingsChange(InlayHintSettings),
NewLinesShown,
ExcerptEdited,
BufferEdited(HashSet<Arc<Language>>),
RefreshRequested,
}
@ -2617,7 +2617,7 @@ impl Editor {
return;
}
let invalidate_cache = match reason {
let (invalidate_cache, required_languages) = match reason {
InlayRefreshReason::SettingsChange(new_settings) => {
match self.inlay_hint_cache.update_settings(
&self.buffer,
@ -2633,16 +2633,18 @@ impl Editor {
return;
}
ControlFlow::Break(None) => return,
ControlFlow::Continue(()) => InvalidationStrategy::RefreshRequested,
ControlFlow::Continue(()) => (InvalidationStrategy::RefreshRequested, None),
}
}
InlayRefreshReason::NewLinesShown => InvalidationStrategy::None,
InlayRefreshReason::ExcerptEdited => InvalidationStrategy::ExcerptEdited,
InlayRefreshReason::RefreshRequested => InvalidationStrategy::RefreshRequested,
InlayRefreshReason::NewLinesShown => (InvalidationStrategy::None, None),
InlayRefreshReason::BufferEdited(buffer_languages) => {
(InvalidationStrategy::BufferEdited, Some(buffer_languages))
}
InlayRefreshReason::RefreshRequested => (InvalidationStrategy::RefreshRequested, None),
};
self.inlay_hint_cache.refresh_inlay_hints(
self.excerpt_visible_offsets(cx),
self.excerpt_visible_offsets(required_languages.as_ref(), cx),
invalidate_cache,
cx,
)
@ -2661,8 +2663,9 @@ impl Editor {
fn excerpt_visible_offsets(
&self,
restrict_to_languages: Option<&HashSet<Arc<Language>>>,
cx: &mut ViewContext<'_, '_, Editor>,
) -> HashMap<ExcerptId, (ModelHandle<Buffer>, Range<usize>)> {
) -> HashMap<ExcerptId, (ModelHandle<Buffer>, Global, Range<usize>)> {
let multi_buffer = self.buffer().read(cx);
let multi_buffer_snapshot = multi_buffer.snapshot(cx);
let multi_buffer_visible_start = self
@ -2680,8 +2683,22 @@ impl Editor {
.range_to_buffer_ranges(multi_buffer_visible_range, cx)
.into_iter()
.filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty())
.map(|(buffer, excerpt_visible_range, excerpt_id)| {
(excerpt_id, (buffer, excerpt_visible_range))
.filter_map(|(buffer_handle, excerpt_visible_range, excerpt_id)| {
let buffer = buffer_handle.read(cx);
let language = buffer.language()?;
if let Some(restrict_to_languages) = restrict_to_languages {
if !restrict_to_languages.contains(language) {
return None;
}
}
Some((
excerpt_id,
(
buffer_handle,
buffer.version().clone(),
excerpt_visible_range,
),
))
})
.collect()
}
@ -2695,16 +2712,7 @@ impl Editor {
let buffer = self.buffer.read(cx).read(cx);
let new_inlays = to_insert
.into_iter()
.map(|(position, id, hint)| {
let mut text = hint.text();
if hint.padding_right {
text.push(' ');
}
if hint.padding_left {
text.insert(0, ' ');
}
(id, InlayProperties { position, text })
})
.map(|(position, id, hint)| (id, InlayProperties::new(position, &hint)))
.collect();
drop(buffer);
self.display_map.update(cx, |display_map, cx| {
@ -7256,7 +7264,7 @@ impl Editor {
fn on_buffer_event(
&mut self,
_: ModelHandle<MultiBuffer>,
multibuffer: ModelHandle<MultiBuffer>,
event: &multi_buffer::Event,
cx: &mut ViewContext<Self>,
) {
@ -7268,7 +7276,33 @@ impl Editor {
self.update_visible_copilot_suggestion(cx);
}
cx.emit(Event::BufferEdited);
self.refresh_inlays(InlayRefreshReason::ExcerptEdited, cx);
if let Some(project) = &self.project {
let project = project.read(cx);
let languages_affected = multibuffer
.read(cx)
.all_buffers()
.into_iter()
.filter_map(|buffer| {
let buffer = buffer.read(cx);
let language = buffer.language()?;
if project.is_local()
&& project.language_servers_for_buffer(buffer, cx).count() == 0
{
None
} else {
Some(language)
}
})
.cloned()
.collect::<HashSet<_>>();
if !languages_affected.is_empty() {
self.refresh_inlays(
InlayRefreshReason::BufferEdited(languages_affected),
cx,
);
}
}
}
multi_buffer::Event::ExcerptsAdded {
buffer,

View File

@ -38,7 +38,7 @@ pub struct CachedExcerptHints {
#[derive(Debug, Clone, Copy)]
pub enum InvalidationStrategy {
RefreshRequested,
ExcerptEdited,
BufferEdited,
None,
}
@ -94,7 +94,7 @@ impl InvalidationStrategy {
fn should_invalidate(&self) -> bool {
matches!(
self,
InvalidationStrategy::RefreshRequested | InvalidationStrategy::ExcerptEdited
InvalidationStrategy::RefreshRequested | InvalidationStrategy::BufferEdited
)
}
}
@ -197,7 +197,7 @@ impl InlayHintCache {
pub fn refresh_inlay_hints(
&mut self,
mut excerpts_to_query: HashMap<ExcerptId, (ModelHandle<Buffer>, Range<usize>)>,
mut excerpts_to_query: HashMap<ExcerptId, (ModelHandle<Buffer>, Global, Range<usize>)>,
invalidate: InvalidationStrategy,
cx: &mut ViewContext<Editor>,
) {
@ -342,104 +342,113 @@ impl InlayHintCache {
fn spawn_new_update_tasks(
editor: &mut Editor,
excerpts_to_query: HashMap<ExcerptId, (ModelHandle<Buffer>, Range<usize>)>,
excerpts_to_query: HashMap<ExcerptId, (ModelHandle<Buffer>, Global, Range<usize>)>,
invalidate: InvalidationStrategy,
update_cache_version: usize,
cx: &mut ViewContext<'_, '_, Editor>,
) {
let visible_hints = Arc::new(editor.visible_inlay_hints(cx));
for (excerpt_id, (buffer_handle, excerpt_visible_range)) in excerpts_to_query {
if !excerpt_visible_range.is_empty() {
let buffer = buffer_handle.read(cx);
let buffer_snapshot = buffer.snapshot();
let cached_excerpt_hints = editor.inlay_hint_cache.hints.get(&excerpt_id).cloned();
if let Some(cached_excerpt_hints) = &cached_excerpt_hints {
let new_task_buffer_version = buffer_snapshot.version();
let cached_excerpt_hints = cached_excerpt_hints.read();
let cached_buffer_version = &cached_excerpt_hints.buffer_version;
if cached_excerpt_hints.version > update_cache_version
|| cached_buffer_version.changed_since(new_task_buffer_version)
{
return;
}
if !new_task_buffer_version.changed_since(&cached_buffer_version)
&& !matches!(invalidate, InvalidationStrategy::RefreshRequested)
{
return;
}
for (excerpt_id, (buffer_handle, new_task_buffer_version, excerpt_visible_range)) in
excerpts_to_query
{
if excerpt_visible_range.is_empty() {
continue;
}
let buffer = buffer_handle.read(cx);
let buffer_snapshot = buffer.snapshot();
if buffer_snapshot
.version()
.changed_since(&new_task_buffer_version)
{
continue;
}
let cached_excerpt_hints = editor.inlay_hint_cache.hints.get(&excerpt_id).cloned();
if let Some(cached_excerpt_hints) = &cached_excerpt_hints {
let cached_excerpt_hints = cached_excerpt_hints.read();
let cached_buffer_version = &cached_excerpt_hints.buffer_version;
if cached_excerpt_hints.version > update_cache_version
|| cached_buffer_version.changed_since(&new_task_buffer_version)
{
continue;
}
if !new_task_buffer_version.changed_since(&cached_buffer_version)
&& !matches!(invalidate, InvalidationStrategy::RefreshRequested)
{
continue;
}
};
let buffer_id = buffer.remote_id();
let excerpt_visible_range_start = buffer.anchor_before(excerpt_visible_range.start);
let excerpt_visible_range_end = buffer.anchor_after(excerpt_visible_range.end);
let (multi_buffer_snapshot, full_excerpt_range) =
editor.buffer.update(cx, |multi_buffer, cx| {
let multi_buffer_snapshot = multi_buffer.snapshot(cx);
(
multi_buffer_snapshot,
multi_buffer
.excerpts_for_buffer(&buffer_handle, cx)
.into_iter()
.find(|(id, _)| id == &excerpt_id)
.map(|(_, range)| range.context),
)
});
if let Some(full_excerpt_range) = full_excerpt_range {
let query = ExcerptQuery {
buffer_id,
excerpt_id,
dimensions: ExcerptDimensions {
excerpt_range_start: full_excerpt_range.start,
excerpt_range_end: full_excerpt_range.end,
excerpt_visible_range_start,
excerpt_visible_range_end,
},
cache_version: update_cache_version,
invalidate,
};
let buffer_id = buffer.remote_id();
let excerpt_visible_range_start = buffer.anchor_before(excerpt_visible_range.start);
let excerpt_visible_range_end = buffer.anchor_after(excerpt_visible_range.end);
let (multi_buffer_snapshot, full_excerpt_range) =
editor.buffer.update(cx, |multi_buffer, cx| {
let multi_buffer_snapshot = multi_buffer.snapshot(cx);
(
multi_buffer_snapshot,
multi_buffer
.excerpts_for_buffer(&buffer_handle, cx)
.into_iter()
.find(|(id, _)| id == &excerpt_id)
.map(|(_, range)| range.context),
)
});
if let Some(full_excerpt_range) = full_excerpt_range {
let query = ExcerptQuery {
buffer_id,
excerpt_id,
dimensions: ExcerptDimensions {
excerpt_range_start: full_excerpt_range.start,
excerpt_range_end: full_excerpt_range.end,
excerpt_visible_range_start,
excerpt_visible_range_end,
},
cache_version: update_cache_version,
invalidate,
};
let new_update_task = |is_refresh_after_regular_task| {
new_update_task(
query,
multi_buffer_snapshot,
buffer_snapshot,
Arc::clone(&visible_hints),
cached_excerpt_hints,
is_refresh_after_regular_task,
cx,
)
};
match editor.inlay_hint_cache.update_tasks.entry(excerpt_id) {
hash_map::Entry::Occupied(mut o) => {
let update_task = o.get_mut();
match (update_task.invalidate, invalidate) {
(_, InvalidationStrategy::None) => {}
(
InvalidationStrategy::ExcerptEdited,
InvalidationStrategy::RefreshRequested,
) if !update_task.task.is_running_rx.is_closed() => {
update_task.pending_refresh = Some(query);
}
_ => {
o.insert(UpdateTask {
invalidate,
cache_version: query.cache_version,
task: new_update_task(false),
pending_refresh: None,
});
}
let new_update_task = |is_refresh_after_regular_task| {
new_update_task(
query,
multi_buffer_snapshot,
buffer_snapshot,
Arc::clone(&visible_hints),
cached_excerpt_hints,
is_refresh_after_regular_task,
cx,
)
};
match editor.inlay_hint_cache.update_tasks.entry(excerpt_id) {
hash_map::Entry::Occupied(mut o) => {
let update_task = o.get_mut();
match (update_task.invalidate, invalidate) {
(_, InvalidationStrategy::None) => {}
(
InvalidationStrategy::BufferEdited,
InvalidationStrategy::RefreshRequested,
) if !update_task.task.is_running_rx.is_closed() => {
update_task.pending_refresh = Some(query);
}
_ => {
o.insert(UpdateTask {
invalidate,
cache_version: query.cache_version,
task: new_update_task(false),
pending_refresh: None,
});
}
}
hash_map::Entry::Vacant(v) => {
v.insert(UpdateTask {
invalidate,
cache_version: query.cache_version,
task: new_update_task(false),
pending_refresh: None,
});
}
}
hash_map::Entry::Vacant(v) => {
v.insert(UpdateTask {
invalidate,
cache_version: query.cache_version,
task: new_update_task(false),
pending_refresh: None,
});
}
}
}
@ -961,6 +970,247 @@ mod tests {
});
}
#[gpui::test]
async fn test_no_hint_updates_for_unrelated_language_files(cx: &mut gpui::TestAppContext) {
let allowed_hint_kinds = HashSet::from_iter([None, Some(InlayHintKind::Type)]);
init_test(cx, |settings| {
settings.defaults.inlay_hints = Some(InlayHintSettings {
enabled: true,
show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)),
show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)),
show_other_hints: allowed_hint_kinds.contains(&None),
})
});
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/a",
json!({
"main.rs": "fn main() { a } // and some long comment to ensure inlays are not trimmed out",
"other.md": "Test md file with some text",
}),
)
.await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let worktree_id = workspace.update(cx, |workspace, cx| {
workspace.project().read_with(cx, |project, cx| {
project.worktrees(cx).next().unwrap().read(cx).id()
})
});
let mut rs_fake_servers = None;
let mut md_fake_servers = None;
for (name, path_suffix) in [("Rust", "rs"), ("Markdown", "md")] {
let mut language = Language::new(
LanguageConfig {
name: name.into(),
path_suffixes: vec![path_suffix.to_string()],
..Default::default()
},
Some(tree_sitter_rust::language()),
);
let fake_servers = language
.set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
name,
capabilities: lsp::ServerCapabilities {
inlay_hint_provider: Some(lsp::OneOf::Left(true)),
..Default::default()
},
..Default::default()
}))
.await;
match name {
"Rust" => rs_fake_servers = Some(fake_servers),
"Markdown" => md_fake_servers = Some(fake_servers),
_ => unreachable!(),
}
project.update(cx, |project, _| {
project.languages().add(Arc::new(language));
});
}
let _rs_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/a/main.rs", cx)
})
.await
.unwrap();
cx.foreground().run_until_parked();
cx.foreground().start_waiting();
let rs_fake_server = rs_fake_servers.unwrap().next().await.unwrap();
let rs_editor = workspace
.update(cx, |workspace, cx| {
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let rs_lsp_request_count = Arc::new(AtomicU32::new(0));
rs_fake_server
.handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
let task_lsp_request_count = Arc::clone(&rs_lsp_request_count);
async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(),
);
let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst);
Ok(Some(vec![lsp::InlayHint {
position: lsp::Position::new(0, i),
label: lsp::InlayHintLabel::String(i.to_string()),
kind: None,
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
}]))
}
})
.next()
.await;
cx.foreground().run_until_parked();
rs_editor.update(cx, |editor, cx| {
let expected_layers = vec!["0".to_string()];
assert_eq!(
expected_layers,
cached_hint_labels(editor),
"Should get its first hints when opening the editor"
);
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(
inlay_cache.allowed_hint_kinds, allowed_hint_kinds,
"Cache should use editor settings to get the allowed hint kinds"
);
assert_eq!(
inlay_cache.version, 1,
"Rust editor update the cache version after every cache/view change"
);
});
cx.foreground().run_until_parked();
let _md_buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/a/other.md", cx)
})
.await
.unwrap();
cx.foreground().run_until_parked();
cx.foreground().start_waiting();
let md_fake_server = md_fake_servers.unwrap().next().await.unwrap();
let md_editor = workspace
.update(cx, |workspace, cx| {
workspace.open_path((worktree_id, "other.md"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let md_lsp_request_count = Arc::new(AtomicU32::new(0));
md_fake_server
.handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
let task_lsp_request_count = Arc::clone(&md_lsp_request_count);
async move {
assert_eq!(
params.text_document.uri,
lsp::Url::from_file_path("/a/other.md").unwrap(),
);
let i = Arc::clone(&task_lsp_request_count).fetch_add(1, Ordering::SeqCst);
Ok(Some(vec![lsp::InlayHint {
position: lsp::Position::new(0, i),
label: lsp::InlayHintLabel::String(i.to_string()),
kind: None,
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
}]))
}
})
.next()
.await;
cx.foreground().run_until_parked();
md_editor.update(cx, |editor, cx| {
let expected_layers = vec!["0".to_string()];
assert_eq!(
expected_layers,
cached_hint_labels(editor),
"Markdown editor should have a separate verison, repeating Rust editor rules"
);
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.allowed_hint_kinds, allowed_hint_kinds);
assert_eq!(inlay_cache.version, 1);
});
rs_editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |s| s.select_ranges([13..13]));
editor.handle_input("some rs change", cx);
});
cx.foreground().run_until_parked();
rs_editor.update(cx, |editor, cx| {
let expected_layers = vec!["1".to_string()];
assert_eq!(
expected_layers,
cached_hint_labels(editor),
"Rust inlay cache should change after the edit"
);
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.allowed_hint_kinds, allowed_hint_kinds);
assert_eq!(
inlay_cache.version, 2,
"Every time hint cache changes, cache version should be incremented"
);
});
md_editor.update(cx, |editor, cx| {
let expected_layers = vec!["0".to_string()];
assert_eq!(
expected_layers,
cached_hint_labels(editor),
"Markdown editor should not be affected by Rust editor changes"
);
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.allowed_hint_kinds, allowed_hint_kinds);
assert_eq!(inlay_cache.version, 1);
});
md_editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |s| s.select_ranges([13..13]));
editor.handle_input("some md change", cx);
});
cx.foreground().run_until_parked();
md_editor.update(cx, |editor, cx| {
let expected_layers = vec!["1".to_string()];
assert_eq!(
expected_layers,
cached_hint_labels(editor),
"Rust editor should not be affected by Markdown editor changes"
);
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.allowed_hint_kinds, allowed_hint_kinds);
assert_eq!(inlay_cache.version, 2);
});
rs_editor.update(cx, |editor, cx| {
let expected_layers = vec!["1".to_string()];
assert_eq!(
expected_layers,
cached_hint_labels(editor),
"Markdown editor should also change independently"
);
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.allowed_hint_kinds, allowed_hint_kinds);
assert_eq!(inlay_cache.version, 2);
});
}
#[gpui::test]
async fn test_hint_setting_changes(cx: &mut gpui::TestAppContext) {
let allowed_hint_kinds = HashSet::from_iter([None, Some(InlayHintKind::Type)]);

View File

@ -279,6 +279,9 @@ impl Fs for RealFs {
async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> {
let buffer_size = text.summary().len.min(10 * 1024);
if let Some(path) = path.parent() {
self.create_dir(path).await?;
}
let file = smol::fs::File::create(path).await?;
let mut writer = smol::io::BufWriter::with_capacity(buffer_size, file);
for chunk in chunks(text, line_ending) {
@ -1077,6 +1080,9 @@ impl Fs for FakeFs {
self.simulate_random_delay().await;
let path = normalize_path(path);
let content = chunks(text, line_ending).collect();
if let Some(path) = path.parent() {
self.create_dir(path).await?;
}
self.write_file_internal(path, content)?;
Ok(())
}

View File

@ -64,7 +64,7 @@ itertools = "0.10"
[dev-dependencies]
ctor.workspace = true
env_logger.workspace = true
pretty_assertions = "1.3.0"
pretty_assertions.workspace = true
client = { path = "../client", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
db = { path = "../db", features = ["test-support"] }

View File

@ -981,6 +981,19 @@ impl LocalWorktree {
})
}
/// Find the lowest path in the worktree's datastructures that is an ancestor
fn lowest_ancestor(&self, path: &Path) -> PathBuf {
let mut lowest_ancestor = None;
for path in path.ancestors() {
if self.entry_for_path(path).is_some() {
lowest_ancestor = Some(path.to_path_buf());
break;
}
}
lowest_ancestor.unwrap_or_else(|| PathBuf::from(""))
}
pub fn create_entry(
&self,
path: impl Into<Arc<Path>>,
@ -988,6 +1001,7 @@ impl LocalWorktree {
cx: &mut ModelContext<Worktree>,
) -> Task<Result<Entry>> {
let path = path.into();
let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let write = cx.background().spawn(async move {
@ -1001,10 +1015,31 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
write.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut().unwrap().refresh_entry(path, None, cx)
})
.await
let (result, refreshes) = this.update(&mut cx, |this, cx| {
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
for refresh_path in refresh_paths.ancestors() {
if refresh_path == Path::new("") {
continue;
}
let refresh_full_path = lowest_ancestor.join(refresh_path);
refreshes.push(this.as_local_mut().unwrap().refresh_entry(
refresh_full_path.into(),
None,
cx,
));
}
(
this.as_local_mut().unwrap().refresh_entry(path, None, cx),
refreshes,
)
});
for refresh in refreshes {
refresh.await.log_err();
}
result.await
})
}
@ -2140,6 +2175,7 @@ impl LocalSnapshot {
impl BackgroundScannerState {
fn should_scan_directory(&self, entry: &Entry) -> bool {
(!entry.is_external && !entry.is_ignored)
|| entry.path.file_name() == Some(&*DOT_GIT)
|| self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning
|| self
.paths_to_scan
@ -2319,6 +2355,7 @@ impl BackgroundScannerState {
.entry_for_id(entry_id)
.map(|entry| RepositoryWorkDirectory(entry.path.clone())) else { continue };
log::info!("reload git repository {:?}", dot_git_dir);
let repository = repository.repo_ptr.lock();
let branch = repository.branch_name();
repository.reload_index();
@ -2359,6 +2396,8 @@ impl BackgroundScannerState {
}
fn build_repository(&mut self, dot_git_path: Arc<Path>, fs: &dyn Fs) -> Option<()> {
log::info!("build git repository {:?}", dot_git_path);
let work_dir_path: Arc<Path> = dot_git_path.parent().unwrap().into();
// Guard against repositories inside the repository metadata
@ -3138,8 +3177,6 @@ impl BackgroundScanner {
}
async fn process_events(&mut self, mut abs_paths: Vec<PathBuf>) {
log::debug!("received fs events {:?}", abs_paths);
let root_path = self.state.lock().snapshot.abs_path.clone();
let root_canonical_path = match self.fs.canonicalize(&root_path).await {
Ok(path) => path,
@ -3150,7 +3187,6 @@ impl BackgroundScanner {
};
let mut relative_paths = Vec::with_capacity(abs_paths.len());
let mut unloaded_relative_paths = Vec::new();
abs_paths.sort_unstable();
abs_paths.dedup_by(|a, b| a.starts_with(&b));
abs_paths.retain(|abs_path| {
@ -3173,7 +3209,6 @@ impl BackgroundScanner {
});
if !parent_dir_is_loaded {
log::debug!("ignoring event {relative_path:?} within unloaded directory");
unloaded_relative_paths.push(relative_path);
return false;
}
@ -3182,27 +3217,30 @@ impl BackgroundScanner {
}
});
if !relative_paths.is_empty() {
let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.reload_entries_for_paths(
root_path,
root_canonical_path,
&relative_paths,
abs_paths,
Some(scan_job_tx.clone()),
)
.await;
drop(scan_job_tx);
self.scan_dirs(false, scan_job_rx).await;
let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.update_ignore_statuses(scan_job_tx).await;
self.scan_dirs(false, scan_job_rx).await;
if relative_paths.is_empty() {
return;
}
log::debug!("received fs events {:?}", relative_paths);
let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.reload_entries_for_paths(
root_path,
root_canonical_path,
&relative_paths,
abs_paths,
Some(scan_job_tx.clone()),
)
.await;
drop(scan_job_tx);
self.scan_dirs(false, scan_job_rx).await;
let (scan_job_tx, scan_job_rx) = channel::unbounded();
self.update_ignore_statuses(scan_job_tx).await;
self.scan_dirs(false, scan_job_rx).await;
{
let mut state = self.state.lock();
relative_paths.extend(unloaded_relative_paths);
state.reload_repositories(&relative_paths, self.fs.as_ref());
state.snapshot.completed_scan_id = state.snapshot.scan_id;
for (_, entry_id) in mem::take(&mut state.removed_entry_ids) {
@ -3610,23 +3648,28 @@ impl BackgroundScanner {
}
}
let fs_entry = state.insert_entry(fs_entry, self.fs.as_ref());
if let Some(scan_queue_tx) = &scan_queue_tx {
let mut ancestor_inodes = state.snapshot.ancestor_inodes_for_path(&path);
if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
ancestor_inodes.insert(metadata.inode);
smol::block_on(scan_queue_tx.send(ScanJob {
abs_path,
path: path.clone(),
ignore_stack,
ancestor_inodes,
is_external: fs_entry.is_external,
scan_queue: scan_queue_tx.clone(),
}))
.unwrap();
if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) {
if state.should_scan_directory(&fs_entry) {
let mut ancestor_inodes =
state.snapshot.ancestor_inodes_for_path(&path);
if !ancestor_inodes.contains(&metadata.inode) {
ancestor_inodes.insert(metadata.inode);
smol::block_on(scan_queue_tx.send(ScanJob {
abs_path,
path: path.clone(),
ignore_stack,
ancestor_inodes,
is_external: fs_entry.is_external,
scan_queue: scan_queue_tx.clone(),
}))
.unwrap();
}
} else {
fs_entry.kind = EntryKind::UnloadedDir;
}
}
state.insert_entry(fs_entry, self.fs.as_ref());
}
Ok(None) => {
self.remove_repo_path(&path, &mut state.snapshot);

View File

@ -936,6 +936,119 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
);
}
#[gpui::test]
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_fake = FakeFs::new(cx.background());
fs_fake
.insert_tree(
"/root",
json!({
"a": {},
}),
)
.await;
let tree_fake = Worktree::local(
client_fake,
"/root".as_ref(),
true,
fs_fake,
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
let entry = tree_fake
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
.unwrap();
assert!(entry.is_file());
cx.foreground().run_until_parked();
tree_fake.read_with(cx, |tree, _| {
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
});
let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs_real = Arc::new(RealFs);
let temp_root = temp_tree(json!({
"a": {}
}));
let tree_real = Worktree::local(
client_real,
temp_root.path(),
true,
fs_real,
Default::default(),
&mut cx.to_async(),
)
.await
.unwrap();
let entry = tree_real
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
.unwrap();
assert!(entry.is_file());
cx.foreground().run_until_parked();
tree_real.read_with(cx, |tree, _| {
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
});
// Test smallest change
let entry = tree_real
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.create_entry("a/b/c/e.txt".as_ref(), false, cx)
})
.await
.unwrap();
assert!(entry.is_file());
cx.foreground().run_until_parked();
tree_real.read_with(cx, |tree, _| {
assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
});
// Test largest change
let entry = tree_real
.update(cx, |tree, cx| {
tree.as_local_mut()
.unwrap()
.create_entry("d/e/f/g.txt".as_ref(), false, cx)
})
.await
.unwrap();
assert!(entry.is_file());
cx.foreground().run_until_parked();
tree_real.read_with(cx, |tree, _| {
assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
assert!(tree.entry_for_path("d/").unwrap().is_dir());
});
}
#[gpui::test(iterations = 100)]
async fn test_random_worktree_operations_during_initial_scan(
cx: &mut TestAppContext,
@ -1654,6 +1767,23 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
}));
const A_TXT: &'static str = "a.txt";
const B_TXT: &'static str = "b.txt";
const E_TXT: &'static str = "c/d/e.txt";
const F_TXT: &'static str = "f.txt";
const DOTGITIGNORE: &'static str = ".gitignore";
const BUILD_FILE: &'static str = "target/build_file";
let project_path = Path::new("project");
// Set up git repository before creating the worktree.
let work_dir = root.path().join("project");
let mut repo = git_init(work_dir.as_path());
repo.add_ignore_rule(IGNORE_RULE).unwrap();
git_add(A_TXT, &repo);
git_add(E_TXT, &repo);
git_add(DOTGITIGNORE, &repo);
git_commit("Initial commit", &repo);
let tree = Worktree::local(
build_client(cx),
root.path(),
@ -1665,26 +1795,9 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
.await
.unwrap();
tree.flush_fs_events(cx).await;
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
.await;
const A_TXT: &'static str = "a.txt";
const B_TXT: &'static str = "b.txt";
const E_TXT: &'static str = "c/d/e.txt";
const F_TXT: &'static str = "f.txt";
const DOTGITIGNORE: &'static str = ".gitignore";
const BUILD_FILE: &'static str = "target/build_file";
let project_path: &Path = &Path::new("project");
let work_dir = root.path().join("project");
let mut repo = git_init(work_dir.as_path());
repo.add_ignore_rule(IGNORE_RULE).unwrap();
git_add(Path::new(A_TXT), &repo);
git_add(Path::new(E_TXT), &repo);
git_add(Path::new(DOTGITIGNORE), &repo);
git_commit("Initial commit", &repo);
tree.flush_fs_events(cx).await;
deterministic.run_until_parked();
// Check that the right git state is observed on startup
@ -1704,39 +1817,39 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
);
});
// Modify a file in the working copy.
std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
tree.flush_fs_events(cx).await;
deterministic.run_until_parked();
// The worktree detects that the file's git status has changed.
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
assert_eq!(
snapshot.status_for_file(project_path.join(A_TXT)),
Some(GitFileStatus::Modified)
);
});
git_add(Path::new(A_TXT), &repo);
git_add(Path::new(B_TXT), &repo);
// Create a commit in the git repository.
git_add(A_TXT, &repo);
git_add(B_TXT, &repo);
git_commit("Committing modified and added", &repo);
tree.flush_fs_events(cx).await;
deterministic.run_until_parked();
// Check that repo only changes are tracked
// The worktree detects that the files' git status have changed.
tree.read_with(cx, |tree, _cx| {
let snapshot = tree.snapshot();
assert_eq!(
snapshot.status_for_file(project_path.join(F_TXT)),
Some(GitFileStatus::Added)
);
assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
});
// Modify files in the working copy and perform git operations on other files.
git_reset(0, &repo);
git_remove_index(Path::new(B_TXT), &repo);
git_stash(&mut repo);

View File

@ -27,6 +27,7 @@ serde_derive.workspace = true
serde_json.workspace = true
anyhow.workspace = true
schemars.workspace = true
pretty_assertions.workspace = true
unicase = "2.6"
[dev-dependencies]

View File

@ -64,7 +64,7 @@ pub struct ProjectPanel {
pending_serialization: Task<Option<()>>,
}
#[derive(Copy, Clone)]
#[derive(Copy, Clone, Debug)]
struct Selection {
worktree_id: WorktreeId,
entry_id: ProjectEntryId,
@ -547,7 +547,7 @@ impl ProjectPanel {
worktree_id,
entry_id: NEW_ENTRY_ID,
});
let new_path = entry.path.join(&filename);
let new_path = entry.path.join(&filename.trim_start_matches("/"));
if path_already_exists(new_path.as_path()) {
return None;
}
@ -588,6 +588,7 @@ impl ProjectPanel {
if selection.entry_id == edited_entry_id {
selection.worktree_id = worktree_id;
selection.entry_id = new_entry.id;
this.expand_to_selection(cx);
}
}
this.update_visible_entries(None, cx);
@ -965,6 +966,24 @@ impl ProjectPanel {
Some((worktree, entry))
}
fn expand_to_selection(&mut self, cx: &mut ViewContext<Self>) -> Option<()> {
let (worktree, entry) = self.selected_entry(cx)?;
let expanded_dir_ids = self.expanded_dir_ids.entry(worktree.id()).or_default();
for path in entry.path.ancestors() {
let Some(entry) = worktree.entry_for_path(path) else {
continue;
};
if entry.is_dir() {
if let Err(idx) = expanded_dir_ids.binary_search(&entry.id) {
expanded_dir_ids.insert(idx, entry.id);
}
}
}
Some(())
}
fn update_visible_entries(
&mut self,
new_selected_entry: Option<(WorktreeId, ProjectEntryId)>,
@ -1592,6 +1611,7 @@ impl ClipboardEntry {
mod tests {
use super::*;
use gpui::{TestAppContext, ViewHandle};
use pretty_assertions::assert_eq;
use project::FakeFs;
use serde_json::json;
use settings::SettingsStore;
@ -2002,6 +2022,133 @@ mod tests {
);
}
#[gpui::test(iterations = 30)]
async fn test_adding_directories_via_file(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root1",
json!({
".dockerignore": "",
".git": {
"HEAD": "",
},
"a": {
"0": { "q": "", "r": "", "s": "" },
"1": { "t": "", "u": "" },
"2": { "v": "", "w": "", "x": "", "y": "" },
},
"b": {
"3": { "Q": "" },
"4": { "R": "", "S": "", "T": "", "U": "" },
},
"C": {
"5": {},
"6": { "V": "", "W": "" },
"7": { "X": "" },
"8": { "Y": {}, "Z": "" }
}
}),
)
.await;
fs.insert_tree(
"/root2",
json!({
"d": {
"9": ""
},
"e": {}
}),
)
.await;
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
select_path(&panel, "root1", cx);
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v root1 <== selected",
" > .git",
" > a",
" > b",
" > C",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
// Add a file with the root folder selected. The filename editor is placed
// before the first file in the root folder.
panel.update(cx, |panel, cx| panel.new_file(&NewFile, cx));
cx.read_window(window_id, |cx| {
let panel = panel.read(cx);
assert!(panel.filename_editor.is_focused(cx));
});
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v root1",
" > .git",
" > a",
" > b",
" > C",
" [EDITOR: ''] <== selected",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
let confirm = panel.update(cx, |panel, cx| {
panel.filename_editor.update(cx, |editor, cx| {
editor.set_text("/bdir1/dir2/the-new-filename", cx)
});
panel.confirm(&Confirm, cx).unwrap()
});
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v root1",
" > .git",
" > a",
" > b",
" > C",
" [PROCESSING: '/bdir1/dir2/the-new-filename'] <== selected",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
confirm.await.unwrap();
assert_eq!(
visible_entries_as_strings(&panel, 0..13, cx),
&[
"v root1",
" > .git",
" > a",
" > b",
" v bdir1",
" v dir2",
" the-new-filename <== selected",
" > C",
" .dockerignore",
"v root2",
" > d",
" > e",
]
);
}
#[gpui::test]
async fn test_copy_paste(cx: &mut gpui::TestAppContext) {
init_test(cx);

View File

@ -38,5 +38,5 @@ tree-sitter-json = "*"
gpui = { path = "../gpui", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
indoc.workspace = true
pretty_assertions = "1.3.0"
pretty_assertions.workspace = true
unindent.workspace = true

View File

@ -2489,7 +2489,12 @@ impl ToOffset for Point {
impl ToOffset for usize {
fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
assert!(*self <= snapshot.len(), "offset {self} is out of range");
assert!(
*self <= snapshot.len(),
"offset {} is out of range, max allowed is {}",
self,
snapshot.len()
);
*self
}
}