From 373a17acf413c293e445bbb7b266dc970079a7ba Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 11:11:38 -0400 Subject: [PATCH 01/96] Add ability to display backgrounds for inlay hints (#18010) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds the ability to display backgrounds for inlay hints within the editor. This is controlled by the new `inlay_hints.show_background` setting. This setting defaults to `false`. To enable the setting, add the following to your `settings.json`: ```json { "inlay_hints": { "enabled": true, "show_background": true } } ``` When enabled, the inlay hint backgrounds will use the `hint.background` color from the theme. | Disabled | Enabled | | -------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------- | | Screenshot 2024-09-17 at 4 21 53 PM | Screenshot 2024-09-17 at 4 21 43 PM | Related issues: - #12485 - #17392 Release Notes: - Added an `inlay_hints.show_background` setting to allow displaying backgrounds for inlay hints in the editor. - This setting defaults to `false`. - If enabled, the inlay hint backgrounds will use the `hint.background` color from the theme. --- assets/settings/default.json | 4 ++++ crates/assistant/src/prompt_library.rs | 6 ++---- crates/collab/src/tests/editor_tests.rs | 4 ++++ crates/editor/src/editor.rs | 21 +++++++++++++++------ crates/editor/src/hover_links.rs | 1 + crates/editor/src/hover_popover.rs | 1 + crates/editor/src/inlay_hint_cache.rs | 15 +++++++++++++++ crates/language/src/language_settings.rs | 8 ++++++++ docs/src/configuring-zed.md | 1 + 9 files changed, 51 insertions(+), 10 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 35b2ca20f2..a9e1865258 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -318,6 +318,10 @@ "show_parameter_hints": true, // Corresponds to null/None LSP hint type value. "show_other_hints": true, + // Whether to show a background for inlay hints. + // + // If set to `true`, the background will use the `hint.background` color from the current theme. + "show_background": false, // Time to wait after editing the buffer, before requesting the hints, // set to 0 to disable debouncing. "edit_debounce_ms": 700, diff --git a/crates/assistant/src/prompt_library.rs b/crates/assistant/src/prompt_library.rs index c99a7c1521..76ee95d507 100644 --- a/crates/assistant/src/prompt_library.rs +++ b/crates/assistant/src/prompt_library.rs @@ -921,10 +921,8 @@ impl PromptLibrary { scrollbar_width: Pixels::ZERO, syntax: cx.theme().syntax().clone(), status: cx.theme().status().clone(), - inlay_hints_style: HighlightStyle { - color: Some(cx.theme().status().hint), - ..HighlightStyle::default() - }, + inlay_hints_style: + editor::make_inlay_hints_style(cx), suggestions_style: HighlightStyle { color: Some(cx.theme().status().predictive), ..HighlightStyle::default() diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index de03144774..7fb1a49f87 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -1524,6 +1524,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( show_type_hints: true, show_parameter_hints: false, show_other_hints: true, + show_background: false, }) }); }); @@ -1538,6 +1539,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( show_type_hints: true, show_parameter_hints: false, show_other_hints: true, + show_background: false, }) }); }); @@ -1786,6 +1788,7 @@ async fn test_inlay_hint_refresh_is_forwarded( show_type_hints: false, show_parameter_hints: false, show_other_hints: false, + show_background: false, }) }); }); @@ -1800,6 +1803,7 @@ async fn test_inlay_hint_refresh_is_forwarded( show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); }); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 61a59665c1..f797f82832 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -412,6 +412,19 @@ impl Default for EditorStyle { } } +pub fn make_inlay_hints_style(cx: &WindowContext) -> HighlightStyle { + let show_background = all_language_settings(None, cx) + .language(None) + .inlay_hints + .show_background; + + HighlightStyle { + color: Some(cx.theme().status().hint), + background_color: show_background.then(|| cx.theme().status().hint_background), + ..HighlightStyle::default() + } +} + type CompletionId = usize; #[derive(Clone, Debug)] @@ -10034,9 +10047,8 @@ impl Editor { syntax: cx.editor_style.syntax.clone(), status: cx.editor_style.status.clone(), inlay_hints_style: HighlightStyle { - color: Some(cx.theme().status().hint), font_weight: Some(FontWeight::BOLD), - ..HighlightStyle::default() + ..make_inlay_hints_style(cx) }, suggestions_style: HighlightStyle { color: Some(cx.theme().status().predictive), @@ -12992,10 +13004,7 @@ impl Render for Editor { scrollbar_width: EditorElement::SCROLLBAR_WIDTH, syntax: cx.theme().syntax().clone(), status: cx.theme().status().clone(), - inlay_hints_style: HighlightStyle { - color: Some(cx.theme().status().hint), - ..HighlightStyle::default() - }, + inlay_hints_style: make_inlay_hints_style(cx), suggestions_style: HighlightStyle { color: Some(cx.theme().status().predictive), ..HighlightStyle::default() diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 3f590273df..ac30b91996 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -1205,6 +1205,7 @@ mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index adbb5899ff..f6eb837ae8 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -1337,6 +1337,7 @@ mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 62c5cde9d8..24ccf64c4c 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -1296,6 +1296,7 @@ pub mod tests { show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)), show_other_hints: allowed_hint_kinds.contains(&None), + show_background: false, }) }); @@ -1428,6 +1429,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -1547,6 +1549,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -1777,6 +1780,7 @@ pub mod tests { show_type_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Type)), show_parameter_hints: allowed_hint_kinds.contains(&Some(InlayHintKind::Parameter)), show_other_hints: allowed_hint_kinds.contains(&None), + show_background: false, }) }); @@ -1941,6 +1945,7 @@ pub mod tests { show_parameter_hints: new_allowed_hint_kinds .contains(&Some(InlayHintKind::Parameter)), show_other_hints: new_allowed_hint_kinds.contains(&None), + show_background: false, }) }); cx.executor().run_until_parked(); @@ -1987,6 +1992,7 @@ pub mod tests { show_parameter_hints: another_allowed_hint_kinds .contains(&Some(InlayHintKind::Parameter)), show_other_hints: another_allowed_hint_kinds.contains(&None), + show_background: false, }) }); cx.executor().run_until_parked(); @@ -2047,6 +2053,7 @@ pub mod tests { show_parameter_hints: final_allowed_hint_kinds .contains(&Some(InlayHintKind::Parameter)), show_other_hints: final_allowed_hint_kinds.contains(&None), + show_background: false, }) }); cx.executor().run_until_parked(); @@ -2122,6 +2129,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -2256,6 +2264,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -2551,6 +2560,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -2902,6 +2912,7 @@ pub mod tests { show_type_hints: false, show_parameter_hints: false, show_other_hints: false, + show_background: false, }) }); @@ -3096,6 +3107,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); cx.executor().run_until_parked(); @@ -3131,6 +3143,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -3225,6 +3238,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); @@ -3305,6 +3319,7 @@ pub mod tests { show_type_hints: true, show_parameter_hints: true, show_other_hints: true, + show_background: false, }) }); cx.executor().run_until_parked(); diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index b465173cee..77c9a1d18c 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -741,6 +741,14 @@ pub struct InlayHintSettings { /// Default: true #[serde(default = "default_true")] pub show_other_hints: bool, + /// Whether to show a background for inlay hints. + /// + /// If set to `true`, the background will use the `hint.background` color + /// from the current theme. + /// + /// Default: false + #[serde(default)] + pub show_background: bool, /// Whether or not to debounce inlay hints updates after buffer edits. /// /// Set to 0 to disable debouncing. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 87cd053f1a..382c33c216 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -982,6 +982,7 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files "show_type_hints": true, "show_parameter_hints": true, "show_other_hints": true, + "show_background": false, "edit_debounce_ms": 700, "scroll_debounce_ms": 50 } From a7977aa64dc8a4c600eb167d645de880e9964d68 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 18 Sep 2024 17:18:56 +0200 Subject: [PATCH 02/96] Tweak multibuffer header padding (#18011) --- crates/editor/src/element.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 7e2b3cc63f..47107b9754 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2079,13 +2079,13 @@ impl EditorElement { .id(("path excerpt header", EntityId::from(block_id))) .w_full() .px(header_padding) + .pt(header_padding) .child( h_flex() .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667))) .id("path header block") .h(2. * cx.line_height()) - .pl(gpui::px(12.)) - .pr(gpui::px(8.)) + .px(gpui::px(12.)) .rounded_md() .shadow_md() .border_1() From eda7e88fd4a9e500c951344f6ddd659ee13ca0fc Mon Sep 17 00:00:00 2001 From: Marek Fajkus Date: Wed, 18 Sep 2024 18:51:11 +0200 Subject: [PATCH 03/96] nix: Fix (potential) glibc errors in dev shell (#17974) Previously the rustc and cargo did were not declared dependencies supplied to devshell. This means that shell relied some impure cargo and rustc version found in the system. This lead to issues with GLIBC version on systems which have different GLIBC version globally. This package exposes nixpkgs rustc and cargo version into the shell preventing issues with incompatibility. Release Notes: - N/A --- nix/shell.nix | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/nix/shell.nix b/nix/shell.nix index 03e298e132..476374b67e 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -20,6 +20,8 @@ in wayland xorg.libxcb vulkan-loader + rustc + cargo ]; in pkgs.mkShell.override {inherit stdenv;} { @@ -36,10 +38,7 @@ in inherit buildInputs; shellHook = '' - export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath ([ - pkgs.vulkan-loader - ] - ++ buildInputs)}:$LD_LIBRARY_PATH" + export LD_LIBRARY_PATH="${pkgs.lib.makeLibraryPath buildInputs}:$LD_LIBRARY_PATH" export PROTOC="${pkgs.protobuf}/bin/protoc" ''; From 826777a257b8e43900fd1d8cbf7cee3757bbe765 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 11:15:46 -0600 Subject: [PATCH 04/96] Tidy up LSP (#17973) Release Notes: - N/A --- crates/extension/src/extension_lsp_adapter.rs | 2 +- crates/language/src/language.rs | 10 +++++++--- crates/language/src/language_registry.rs | 12 +++++------- crates/project/src/lsp_store.rs | 11 ----------- crates/remote_server/src/headless_project.rs | 6 +----- 5 files changed, 14 insertions(+), 27 deletions(-) diff --git a/crates/extension/src/extension_lsp_adapter.rs b/crates/extension/src/extension_lsp_adapter.rs index f82b6c9e0e..d6125241f1 100644 --- a/crates/extension/src/extension_lsp_adapter.rs +++ b/crates/extension/src/extension_lsp_adapter.rs @@ -38,7 +38,7 @@ impl LspAdapter for ExtensionLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, + _: Option>, delegate: Arc, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 7901a49d00..309a67a1a9 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -208,7 +208,7 @@ impl CachedLspAdapter { pub async fn get_language_server_command( self: Arc, - container_dir: Arc, + container_dir: Option>, delegate: Arc, cx: &mut AsyncAppContext, ) -> Result { @@ -294,7 +294,7 @@ pub trait LspAdapter: 'static + Send + Sync { fn get_language_server_command<'a>( self: Arc, - container_dir: Arc, + container_dir: Option>, delegate: Arc, mut cached_binary: futures::lock::MutexGuard<'a, Option>, cx: &'a mut AsyncAppContext, @@ -325,6 +325,10 @@ pub trait LspAdapter: 'static + Send + Sync { return Ok(cached_binary.clone()); } + let Some(container_dir) = container_dir else { + anyhow::bail!("cannot download language servers for remotes (yet)") + }; + if !container_dir.exists() { smol::fs::create_dir_all(&container_dir) .await @@ -1664,7 +1668,7 @@ impl LspAdapter for FakeLspAdapter { fn get_language_server_command<'a>( self: Arc, - _: Arc, + _: Option>, _: Arc, _: futures::lock::MutexGuard<'a, Option>, _: &'a mut AsyncAppContext, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 918da4873f..17ebef50e8 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -869,12 +869,10 @@ impl LanguageRegistry { adapter.name.0 ); - let download_dir = &self - .language_server_download_dir - .clone() - .ok_or_else(|| anyhow!("language server download directory has not been assigned before starting server")) - .log_err()?; - let container_dir: Arc = Arc::from(download_dir.join(adapter.name.0.as_ref())); + let container_dir: Option> = self + .language_server_download_dir + .as_ref() + .map(|dir| Arc::from(dir.join(adapter.name.0.as_ref()))); let root_path = root_path.clone(); let this = Arc::downgrade(self); @@ -969,7 +967,7 @@ impl LanguageRegistry { Some(PendingLanguageServer { server_id, task, - container_dir: Some(container_dir), + container_dir, }) } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 24852afd70..6dd528147b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4742,17 +4742,6 @@ impl LspStore { .reorder_language_servers(&language, enabled_lsp_adapters); } - /* - ssh client owns the lifecycle of the language servers - ssh host actually runs the binaries - - in the future: ssh client will use the local extensions to get the downloads etc. - and send them up over the ssh connection (but today) we'll just the static config - - languages::() <-- registers lsp adapters - on the ssh host we won't have adapters for the LSPs - */ - fn start_language_server_on_ssh_host( &mut self, worktree: &Model, diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index ec26bddfc3..bbd82281d8 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -42,11 +42,7 @@ impl HeadlessProject { } pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { - let mut languages = LanguageRegistry::new(cx.background_executor().clone()); - languages - .set_language_server_download_dir(PathBuf::from("/Users/conrad/what-could-go-wrong")); - - let languages = Arc::new(languages); + let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); let buffer_store = cx.new_model(|cx| { From fb7a7a564a54aa12f0f97a9dd36bf9bf30c16807 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 11:15:54 -0600 Subject: [PATCH 05/96] ssh remoting: open settings locally (#18020) Release Notes: - ssh remoting: Open settings files in a non-remote window. --- crates/workspace/src/workspace.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a7c63c57f6..98ac49992d 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1578,7 +1578,7 @@ impl Workspace { T: 'static, F: 'static + FnOnce(&mut Workspace, &mut ViewContext) -> T, { - if self.project.read(cx).is_local_or_ssh() { + if self.project.read(cx).is_local() { Task::Ready(Some(Ok(callback(self, cx)))) } else { let env = self.project.read(cx).cli_environment(cx); From 772bda54a21079be9c6019f8a8836576add71d8c Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 13:35:55 -0400 Subject: [PATCH 06/96] Move remaining self-hosted jobs to BuildJet (#18018) --- .github/workflows/bump_patch_version.yml | 3 +-- .github/workflows/deploy_collab.yml | 6 ++---- .github/workflows/randomized_tests.yml | 3 +-- .github/workflows/release_nightly.yml | 3 +-- 4 files changed, 5 insertions(+), 10 deletions(-) diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index e2789a7da7..d05da31e6a 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -15,8 +15,7 @@ concurrency: jobs: bump_patch_version: runs-on: - - self-hosted - - test + - buildjet-16vcpu-ubuntu-2204 steps: - name: Checkout code uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 6801be2a54..7abd52e5a6 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -61,8 +61,7 @@ jobs: - style - tests runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH @@ -89,8 +88,7 @@ jobs: needs: - publish runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - name: Sign into Kubernetes diff --git a/.github/workflows/randomized_tests.yml b/.github/workflows/randomized_tests.yml index 8b628fe5a2..57f43d4961 100644 --- a/.github/workflows/randomized_tests.yml +++ b/.github/workflows/randomized_tests.yml @@ -19,8 +19,7 @@ jobs: tests: name: Run randomized tests runs-on: - - self-hosted - - randomized-tests + - buildjet-16vcpu-ubuntu-2204 steps: - name: Install Node uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4 diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index d8e6b6d919..450c63b82f 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -97,8 +97,7 @@ jobs: name: Create a Linux *.tar.gz bundle for x86 if: github.repository_owner == 'zed-industries' runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 needs: tests env: DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} From 97dc1d193f9a4b7cc90a63c7cc01870dbf05db79 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 14:24:09 -0400 Subject: [PATCH 07/96] Use `@tag.doctype` for HTML doctype highlights (#18024) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates the following extensions to use the `@tag.doctype` selector for highlighting HTML doctypes: - Astro - Elixir (HEEx) - HTML Additionally, it also changes the base selector for HTML tags from `@keyword` to `@tag`. | Before | After | | ------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | | Screenshot 2024-09-18 at 2 04 41 PM | Screenshot 2024-09-18 at 2 05 00 PM | Extracted this from https://github.com/zed-industries/zed/pull/16723. Release Notes: - N/A --------- Co-authored-by: 狐狸 <134658521+Huliiiiii@users.noreply.github.com> --- docs/src/extensions/languages.md | 1 + extensions/astro/languages/astro/highlights.scm | 2 +- extensions/elixir/languages/heex/highlights.scm | 2 +- extensions/html/languages/html/highlights.scm | 4 ++-- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index c003285303..174a27c6e6 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -123,6 +123,7 @@ This query marks strings, object keys, and numbers for highlighting. The followi | @string.special | Captures special strings | | @string.special.symbol | Captures special symbols | | @tag | Captures tags | +| @tag.doctype | Captures doctypes (e.g., in HTML) | | @text.literal | Captures literal text | | @title | Captures titles | | @type | Captures types | diff --git a/extensions/astro/languages/astro/highlights.scm b/extensions/astro/languages/astro/highlights.scm index 491e8cc337..a565e22b6e 100644 --- a/extensions/astro/languages/astro/highlights.scm +++ b/extensions/astro/languages/astro/highlights.scm @@ -1,6 +1,6 @@ (tag_name) @tag (erroneous_end_tag_name) @keyword -(doctype) @constant +(doctype) @tag.doctype (attribute_name) @property (attribute_value) @string (comment) @comment diff --git a/extensions/elixir/languages/heex/highlights.scm b/extensions/elixir/languages/heex/highlights.scm index 5252b71fac..9662c95524 100644 --- a/extensions/elixir/languages/heex/highlights.scm +++ b/extensions/elixir/languages/heex/highlights.scm @@ -27,7 +27,7 @@ "=" @operator ; HEEx inherits the DOCTYPE tag from HTML -(doctype) @constant +(doctype) @tag.doctype (comment) @comment diff --git a/extensions/html/languages/html/highlights.scm b/extensions/html/languages/html/highlights.scm index e2b8e35bf4..6bb0c23374 100644 --- a/extensions/html/languages/html/highlights.scm +++ b/extensions/html/languages/html/highlights.scm @@ -1,6 +1,6 @@ -(tag_name) @keyword +(tag_name) @tag (erroneous_end_tag_name) @keyword -(doctype) @constant +(doctype) @tag.doctype (attribute_name) @property (attribute_value) @string (comment) @comment From 30ef7e62bfff3b2e4c5cdbeead502d5a0814c83e Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 14:28:00 -0400 Subject: [PATCH 08/96] Fix arm buildjet (#18023) Run `apt-get update` before `apt-get install` on Linux. Hopefully will fix building on Linux Arm. --- script/linux | 1 + 1 file changed, 1 insertion(+) diff --git a/script/linux b/script/linux index d894d33ea8..eca3bf7f7d 100755 --- a/script/linux +++ b/script/linux @@ -33,6 +33,7 @@ if [[ -n $apt ]]; then elfutils libsqlite3-dev ) + $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" exit 0 fi From 71b6f739cdce3303ba129e41673cb6e38044f279 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:08:19 -0400 Subject: [PATCH 09/96] Pin actions/checkout action to 692973e (#18030) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [actions/checkout](https://redirect.github.com/actions/checkout) | action | pinDigest | -> `692973e` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/bump_nightly_tag.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/bump_nightly_tag.yml b/.github/workflows/bump_nightly_tag.yml index 54a3970a1c..0959ae9677 100644 --- a/.github/workflows/bump_nightly_tag.yml +++ b/.github/workflows/bump_nightly_tag.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 with: fetch-depth: 0 From 97f5fcf8e6a42c07d0b12982030b701246ac3d65 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 15:18:29 -0400 Subject: [PATCH 10/96] Fix nightly linux x86 build (#18029) Makes our nightly script for Linux x86 (broken) match the steps for Linux ARM (working). --- .github/workflows/release_nightly.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 450c63b82f..17db66a264 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -113,6 +113,12 @@ jobs: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install Linux dependencies + run: ./script/linux + + - name: Limit target directory size + run: script/clear-target-dir-if-larger-than 100 + - name: Set release channel to nightly run: | set -euo pipefail From 9016de5d6350e0a9bbf6a51076c04acd9b7fba96 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 15:56:40 -0400 Subject: [PATCH 11/96] Update Rust crate anyhow to v1.0.89 (#18031) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [anyhow](https://redirect.github.com/dtolnay/anyhow) | workspace.dependencies | patch | `1.0.86` -> `1.0.89` | --- ### Release Notes
dtolnay/anyhow (anyhow) ### [`v1.0.89`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.89) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.88...1.0.89) - Make anyhow::Error's `UnwindSafe` and `RefUnwindSafe` impl consistently available between versions of Rust newer and older than 1.72 ([#​386](https://redirect.github.com/dtolnay/anyhow/issues/386)) ### [`v1.0.88`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.88) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.87...1.0.88) - Documentation improvements ### [`v1.0.87`](https://redirect.github.com/dtolnay/anyhow/releases/tag/1.0.87) [Compare Source](https://redirect.github.com/dtolnay/anyhow/compare/1.0.86...1.0.87) - Support more APIs, including `Error::new` and `Error::chain`, in no-std mode on Rust 1.81+ ([#​383](https://redirect.github.com/dtolnay/anyhow/issues/383))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 930415440b..0640aff19c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -263,9 +263,9 @@ checksum = "34cd60c5e3152cef0a592f1b296f1cc93715d89d2551d85315828c3a09575ff4" [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" [[package]] name = "approx" From 2c8a6ee7cc18cb8b3e29fa4c7efa74dde8458f4f Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 18 Sep 2024 23:29:34 +0200 Subject: [PATCH 12/96] remote_server: Remove dependency on libssl and libcrypto (#15446) Fixes: #15599 Release Notes: - N/A --------- Co-authored-by: Mikayla Co-authored-by: Conrad --- Cargo.lock | 176 ++++++++++++++--- Cargo.toml | 7 + crates/auto_update/Cargo.toml | 1 - crates/auto_update/src/auto_update.rs | 3 +- crates/client/Cargo.toml | 4 +- crates/client/src/client.rs | 42 ++-- crates/collab/Cargo.toml | 1 + crates/collab/src/llm.rs | 3 +- crates/collab/src/rpc.rs | 8 +- crates/evals/Cargo.toml | 1 + crates/evals/src/eval.rs | 22 ++- crates/extension/Cargo.toml | 1 + crates/extension/src/extension_builder.rs | 2 + crates/extension/src/extension_store.rs | 13 +- crates/extension/src/extension_store_test.rs | 24 ++- crates/extension_cli/Cargo.toml | 2 +- crates/extension_cli/src/main.rs | 10 +- crates/git_hosting_providers/Cargo.toml | 1 - .../src/providers/codeberg.rs | 10 +- .../src/providers/github.rs | 10 +- crates/gpui/Cargo.toml | 6 +- crates/gpui/examples/image/image.rs | 1 + crates/gpui/src/app.rs | 31 ++- crates/gpui/src/elements/img.rs | 7 +- crates/gpui/src/gpui.rs | 1 + crates/http_client/Cargo.toml | 5 +- crates/http_client/src/async_body.rs | 109 +++++++++++ crates/http_client/src/github.rs | 5 +- crates/http_client/src/http_client.rs | 179 +++++++++--------- crates/isahc_http_client/Cargo.toml | 22 +++ crates/isahc_http_client/LICENSE-APACHE | 1 + .../src/isahc_http_client.rs | 93 +++++++++ crates/ollama/Cargo.toml | 1 - crates/ollama/src/ollama.rs | 31 ++- crates/project/src/lsp_store.rs | 12 +- crates/semantic_index/Cargo.toml | 1 + crates/semantic_index/examples/index.rs | 7 +- crates/semantic_index/src/embedding/ollama.rs | 2 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 31 ++- script/bundle-linux | 9 +- 41 files changed, 670 insertions(+), 226 deletions(-) create mode 100644 crates/http_client/src/async_body.rs create mode 100644 crates/isahc_http_client/Cargo.toml create mode 120000 crates/isahc_http_client/LICENSE-APACHE create mode 100644 crates/isahc_http_client/src/isahc_http_client.rs diff --git a/Cargo.lock b/Cargo.lock index 0640aff19c..652c584fd5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -876,6 +876,20 @@ version = "4.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" +[[package]] +name = "async-tls" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfeefd0ca297cbbb3bd34fd6b228401c2a5177038257afd751bc29f0a2da4795" +dependencies = [ + "futures-core", + "futures-io", + "rustls 0.20.9", + "rustls-pemfile 1.0.4", + "webpki", + "webpki-roots 0.22.6", +] + [[package]] name = "async-trait" version = "0.1.81" @@ -893,8 +907,8 @@ version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1e9efbe14612da0a19fb983059a0b621e9cf6225d7018ecab4f9988215540dc" dependencies = [ - "async-native-tls", "async-std", + "async-tls", "futures-io", "futures-util", "log", @@ -981,7 +995,6 @@ dependencies = [ "editor", "gpui", "http_client", - "isahc", "log", "markdown_preview", "menu", @@ -1049,7 +1062,7 @@ dependencies = [ "fastrand 2.1.1", "hex", "http 0.2.12", - "ring", + "ring 0.17.8", "time", "tokio", "tracing", @@ -1218,7 +1231,7 @@ dependencies = [ "once_cell", "p256", "percent-encoding", - "ring", + "ring 0.17.8", "sha2", "subtle", "time", @@ -1331,7 +1344,7 @@ dependencies = [ "once_cell", "pin-project-lite", "pin-utils", - "rustls", + "rustls 0.21.12", "tokio", "tracing", ] @@ -2405,6 +2418,8 @@ dependencies = [ "rand 0.8.5", "release_channel", "rpc", + "rustls 0.20.9", + "rustls-native-certs 0.8.0", "schemars", "serde", "serde_json", @@ -2553,6 +2568,7 @@ dependencies = [ "http_client", "hyper", "indoc", + "isahc_http_client", "jsonwebtoken", "language", "language_model", @@ -4015,6 +4031,7 @@ dependencies = [ "git", "gpui", "http_client", + "isahc_http_client", "language", "languages", "node_runtime", @@ -4110,6 +4127,7 @@ dependencies = [ "http_client", "indexed_docs", "isahc", + "isahc_http_client", "language", "log", "lsp", @@ -4148,7 +4166,7 @@ dependencies = [ "env_logger", "extension", "fs", - "http_client", + "isahc_http_client", "language", "log", "rpc", @@ -4395,7 +4413,7 @@ dependencies = [ "futures-core", "futures-sink", "nanorand", - "spin", + "spin 0.9.8", ] [[package]] @@ -4904,7 +4922,6 @@ dependencies = [ "git", "gpui", "http_client", - "isahc", "pretty_assertions", "regex", "serde", @@ -5537,12 +5554,11 @@ dependencies = [ "anyhow", "derive_more", "futures 0.3.30", - "futures-lite 1.13.0", - "http 1.1.0", - "isahc", + "http 0.2.12", "log", "serde", "serde_json", + "smol", "url", ] @@ -5604,8 +5620,8 @@ dependencies = [ "http 0.2.12", "hyper", "log", - "rustls", - "rustls-native-certs", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", "tokio", "tokio-rustls", ] @@ -6017,6 +6033,17 @@ dependencies = [ "waker-fn", ] +[[package]] +name = "isahc_http_client" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.30", + "http_client", + "isahc", + "util", +] + [[package]] name = "itertools" version = "0.10.5" @@ -6121,7 +6148,7 @@ dependencies = [ "base64 0.21.7", "js-sys", "pem", - "ring", + "ring 0.17.8", "serde", "serde_json", "simple_asn1", @@ -6372,7 +6399,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -7483,7 +7510,6 @@ dependencies = [ "anyhow", "futures 0.3.30", "http_client", - "isahc", "schemars", "serde", "serde_json", @@ -9175,7 +9201,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", @@ -9239,6 +9265,21 @@ dependencies = [ "util", ] +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.8" @@ -9249,8 +9290,8 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin", - "untrusted", + "spin 0.9.8", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -9406,7 +9447,7 @@ dependencies = [ "futures 0.3.30", "glob", "rand 0.8.5", - "ring", + "ring 0.17.8", "serde", "serde_json", "shellexpand 3.1.0", @@ -9527,6 +9568,18 @@ dependencies = [ "rustix 0.38.35", ] +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + [[package]] name = "rustls" version = "0.21.12" @@ -9534,7 +9587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring", + "ring 0.17.8", "rustls-webpki", "sct", ] @@ -9546,7 +9599,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.3", + "rustls-pki-types", "schannel", "security-framework", ] @@ -9560,14 +9626,30 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" + [[package]] name = "rustls-webpki" version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9681,8 +9763,8 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.8", + "untrusted 0.9.0", ] [[package]] @@ -9878,6 +9960,7 @@ dependencies = [ "gpui", "heed", "http_client", + "isahc_http_client", "language", "language_model", "languages", @@ -10437,6 +10520,12 @@ dependencies = [ "smallvec", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spin" version = "0.9.8" @@ -10559,8 +10648,8 @@ dependencies = [ "paste", "percent-encoding", "rust_decimal", - "rustls", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "serde_json", "sha2", @@ -10573,7 +10662,7 @@ dependencies = [ "tracing", "url", "uuid", - "webpki-roots", + "webpki-roots 0.25.4", ] [[package]] @@ -11705,7 +11794,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", "tokio", ] @@ -12232,7 +12321,6 @@ dependencies = [ "http 0.2.12", "httparse", "log", - "native-tls", "rand 0.8.5", "sha1", "thiserror", @@ -12417,6 +12505,12 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" @@ -13271,6 +13365,25 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + [[package]] name = "webpki-roots" version = "0.25.4" @@ -14305,6 +14418,7 @@ dependencies = [ "inline_completion_button", "install_cli", "isahc", + "isahc_http_client", "journal", "language", "language_model", diff --git a/Cargo.toml b/Cargo.toml index ec3138179b..2071fdcb6f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,6 +52,7 @@ members = [ "crates/indexed_docs", "crates/inline_completion_button", "crates/install_cli", + "crates/isahc_http_client", "crates/journal", "crates/language", "crates/language_model", @@ -173,6 +174,9 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] + + + # # Workspace member crates # @@ -212,6 +216,7 @@ file_icons = { path = "crates/file_icons" } fs = { path = "crates/fs" } fsevent = { path = "crates/fsevent" } fuzzy = { path = "crates/fuzzy" } +isahc_http_client = { path = "crates/isahc_http_client" } git = { path = "crates/git" } git_hosting_providers = { path = "crates/git_hosting_providers" } go_to_line = { path = "crates/go_to_line" } @@ -394,6 +399,8 @@ runtimelib = { version = "0.15", default-features = false, features = [ ] } rustc-demangle = "0.1.23" rust-embed = { version = "8.4", features = ["include-exclude"] } +rustls = "0.20.3" +rustls-native-certs = "0.8.0" schemars = { version = "0.8", features = ["impl_json_schema"] } semver = "1.0" serde = { version = "1.0", features = ["derive", "rc"] } diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 12e669780d..1e08c9a768 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -19,7 +19,6 @@ db.workspace = true editor.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true log.workspace = true markdown_preview.workspace = true menu.workspace = true diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index c0863e41d1..cfda6d6e58 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -9,7 +9,6 @@ use gpui::{ actions, AppContext, AsyncAppContext, Context as _, Global, Model, ModelContext, SemanticVersion, SharedString, Task, View, ViewContext, VisualContext, WindowContext, }; -use isahc::AsyncBody; use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView}; use schemars::JsonSchema; @@ -20,7 +19,7 @@ use smol::{fs, io::AsyncReadExt}; use settings::{Settings, SettingsSources, SettingsStore}; use smol::{fs::File, process::Command}; -use http_client::{HttpClient, HttpClientWithUrl}; +use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use std::{ env::{ diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 82237ebaa5..8ae4f15c97 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -18,7 +18,7 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup [dependencies] anyhow.workspace = true async-recursion = "0.3" -async-tungstenite = { workspace = true, features = ["async-std", "async-native-tls"] } +async-tungstenite = { workspace = true, features = ["async-std", "async-tls"] } chrono = { workspace = true, features = ["serde"] } clock.workspace = true collections.workspace = true @@ -35,6 +35,8 @@ postage.workspace = true rand.workspace = true release_channel.workspace = true rpc = { workspace = true, features = ["gpui"] } +rustls.workspace = true +rustls-native-certs.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 6e1362c43e..09286300d9 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -240,8 +240,6 @@ pub enum EstablishConnectionError { #[error("{0}")] Other(#[from] anyhow::Error), #[error("{0}")] - Http(#[from] http_client::Error), - #[error("{0}")] InvalidHeaderValue(#[from] async_tungstenite::tungstenite::http::header::InvalidHeaderValue), #[error("{0}")] Io(#[from] std::io::Error), @@ -529,19 +527,13 @@ impl Client { } pub fn production(cx: &mut AppContext) -> Arc { - let user_agent = format!( - "Zed/{} ({}; {})", - AppVersion::global(cx), - std::env::consts::OS, - std::env::consts::ARCH - ); let clock = Arc::new(clock::RealSystemClock); - let http = Arc::new(HttpClientWithUrl::new( + let http = Arc::new(HttpClientWithUrl::new_uri( + cx.http_client(), &ClientSettings::get_global(cx).server_url, - Some(user_agent), - ProxySettings::get_global(cx).proxy.clone(), + cx.http_client().proxy().cloned(), )); - Self::new(clock, http.clone(), cx) + Self::new(clock, http, cx) } pub fn id(&self) -> u64 { @@ -1145,8 +1137,32 @@ impl Client { match url_scheme { Https => { + let client_config = { + let mut root_store = rustls::RootCertStore::empty(); + + let root_certs = rustls_native_certs::load_native_certs(); + for error in root_certs.errors { + log::warn!("error loading native certs: {:?}", error); + } + root_store.add_parsable_certificates( + &root_certs + .certs + .into_iter() + .map(|cert| cert.as_ref().to_owned()) + .collect::>(), + ); + rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(root_store) + .with_no_client_auth() + }; let (stream, _) = - async_tungstenite::async_std::client_async_tls(request, stream).await?; + async_tungstenite::async_tls::client_async_tls_with_connector( + request, + stream, + Some(client_config.into()), + ) + .await?; Ok(Connection::new( stream .map_err(|error| anyhow!(error)) diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index f8ba847ab2..296809158d 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -36,6 +36,7 @@ envy = "0.4.2" futures.workspace = true google_ai.workspace = true hex.workspace = true +isahc_http_client.workspace = true http_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true diff --git a/crates/collab/src/llm.rs b/crates/collab/src/llm.rs index def4499ae4..53f0bfdfd0 100644 --- a/crates/collab/src/llm.rs +++ b/crates/collab/src/llm.rs @@ -22,7 +22,7 @@ use chrono::{DateTime, Duration, Utc}; use collections::HashMap; use db::{usage_measure::UsageMeasure, ActiveUserCount, LlmDatabase}; use futures::{Stream, StreamExt as _}; -use http_client::IsahcHttpClient; +use isahc_http_client::IsahcHttpClient; use rpc::ListModelsResponse; use rpc::{ proto::Plan, LanguageModelProvider, PerformCompletionParams, EXPIRED_LLM_TOKEN_HEADER_NAME, @@ -72,6 +72,7 @@ impl LlmState { let http_client = IsahcHttpClient::builder() .default_header("User-Agent", user_agent) .build() + .map(IsahcHttpClient::from) .context("failed to construct http client")?; let this = Self { diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 4146eafb87..b2a694027a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -35,6 +35,8 @@ use chrono::Utc; use collections::{HashMap, HashSet}; pub use connection_pool::{ConnectionPool, ZedVersion}; use core::fmt::{self, Debug, Formatter}; +use http_client::HttpClient; +use isahc_http_client::IsahcHttpClient; use open_ai::{OpenAiEmbeddingModel, OPEN_AI_API_URL}; use sha2::Digest; use supermaven_api::{CreateExternalUserRequest, SupermavenAdminApi}; @@ -45,7 +47,6 @@ use futures::{ stream::FuturesUnordered, FutureExt, SinkExt, StreamExt, TryStreamExt, }; -use http_client::IsahcHttpClient; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ @@ -139,7 +140,7 @@ struct Session { connection_pool: Arc>, app_state: Arc, supermaven_client: Option>, - http_client: Arc, + http_client: Arc, /// The GeoIP country code for the user. #[allow(unused)] geoip_country_code: Option, @@ -955,9 +956,10 @@ impl Server { tracing::info!("connection opened"); + let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { - Ok(http_client) => Arc::new(http_client), + Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), Err(error) => { tracing::error!(?error, "failed to create HTTP client"); return; diff --git a/crates/evals/Cargo.toml b/crates/evals/Cargo.toml index e680e4f504..400ab139aa 100644 --- a/crates/evals/Cargo.toml +++ b/crates/evals/Cargo.toml @@ -24,6 +24,7 @@ feature_flags.workspace = true fs.workspace = true git.workspace = true gpui.workspace = true +isahc_http_client.workspace = true language.workspace = true languages.workspace = true http_client.workspace = true diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index d7e63fafbf..751dcd09aa 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -97,13 +97,14 @@ fn main() -> Result<()> { gpui::App::headless().run(move |cx| { let executor = cx.background_executor().clone(); - + let client = isahc_http_client::IsahcHttpClient::new(None, None); + cx.set_http_client(client.clone()); match cli.command { Commands::Fetch {} => { executor .clone() .spawn(async move { - if let Err(err) = fetch_evaluation_resources(&executor).await { + if let Err(err) = fetch_evaluation_resources(client, &executor).await { eprintln!("Error: {}", err); exit(1); } @@ -127,10 +128,12 @@ fn main() -> Result<()> { Ok(()) } -async fn fetch_evaluation_resources(executor: &BackgroundExecutor) -> Result<()> { - let http_client = http_client::HttpClientWithProxy::new(None, None); - fetch_code_search_net_resources(&http_client).await?; - fetch_eval_repos(executor, &http_client).await?; +async fn fetch_evaluation_resources( + http_client: Arc, + executor: &BackgroundExecutor, +) -> Result<()> { + fetch_code_search_net_resources(&*http_client).await?; + fetch_eval_repos(executor, &*http_client).await?; Ok(()) } @@ -239,6 +242,7 @@ async fn run_evaluation( executor: &BackgroundExecutor, cx: &mut AsyncAppContext, ) -> Result<()> { + let mut http_client = None; cx.update(|cx| { let mut store = SettingsStore::new(cx); store @@ -248,15 +252,15 @@ async fn run_evaluation( client::init_settings(cx); language::init(cx); Project::init_settings(cx); + http_client = Some(cx.http_client()); cx.update_flags(false, vec![]); }) .unwrap(); - + let http_client = http_client.unwrap(); let dataset_dir = Path::new(CODESEARCH_NET_DIR); let evaluations_path = dataset_dir.join("evaluations.json"); let repos_dir = Path::new(EVAL_REPOS_DIR); let db_path = Path::new(EVAL_DB_PATH); - let http_client = http_client::HttpClientWithProxy::new(None, None); let api_key = std::env::var("OPENAI_API_KEY").unwrap(); let git_hosting_provider_registry = Arc::new(GitHostingProviderRegistry::new()); let fs = Arc::new(RealFs::new(git_hosting_provider_registry, None)) as Arc; @@ -266,9 +270,9 @@ async fn run_evaluation( Client::new( clock, Arc::new(http_client::HttpClientWithUrl::new( + http_client.clone(), "https://zed.dev", None, - None, )), cx, ) diff --git a/crates/extension/Cargo.toml b/crates/extension/Cargo.toml index 0371b1866d..edf6184d38 100644 --- a/crates/extension/Cargo.toml +++ b/crates/extension/Cargo.toml @@ -57,6 +57,7 @@ task.workspace = true serde_json_lenient.workspace = true [dev-dependencies] +isahc_http_client.workspace = true ctor.workspace = true env_logger.workspace = true parking_lot.workspace = true diff --git a/crates/extension/src/extension_builder.rs b/crates/extension/src/extension_builder.rs index e42929f78e..7380e699f9 100644 --- a/crates/extension/src/extension_builder.rs +++ b/crates/extension/src/extension_builder.rs @@ -246,6 +246,7 @@ impl ExtensionBuilder { .args(scanner_path.exists().then_some(scanner_path)) .output() .context("failed to run clang")?; + if !clang_output.status.success() { bail!( "failed to compile {} parser with clang: {}", @@ -431,6 +432,7 @@ impl ExtensionBuilder { let body = BufReader::new(response.body_mut()); let body = GzipDecoder::new(body); let tar = Archive::new(body); + tar.unpack(&tar_out_dir) .await .context("failed to unpack wasi-sdk archive")?; diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index 3ebc4f20d3..bd416f4029 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -190,6 +190,7 @@ pub fn init( None, fs, client.http_client().clone(), + client.http_client().clone(), Some(client.telemetry().clone()), node_runtime, language_registry, @@ -225,6 +226,7 @@ impl ExtensionStore { build_dir: Option, fs: Arc, http_client: Arc, + builder_client: Arc, telemetry: Option>, node_runtime: Arc, language_registry: Arc, @@ -244,12 +246,7 @@ impl ExtensionStore { extension_index: Default::default(), installed_dir, index_path, - builder: Arc::new(ExtensionBuilder::new( - // Construct a real HTTP client for the extension builder, as we - // don't want to use a fake one in the tests. - ::http_client::client(None, http_client.proxy().cloned()), - build_dir, - )), + builder: Arc::new(ExtensionBuilder::new(builder_client, build_dir)), outstanding_operations: Default::default(), modified_extensions: Default::default(), reload_complete_senders: Vec::new(), @@ -830,7 +827,6 @@ impl ExtensionStore { let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_source_path).await?; let extension_id = extension_manifest.id.clone(); - if !this.update(&mut cx, |this, cx| { match this.outstanding_operations.entry(extension_id.clone()) { btree_map::Entry::Occupied(_) => return false, @@ -854,7 +850,6 @@ impl ExtensionStore { .ok(); } }); - cx.background_executor() .spawn({ let extension_source_path = extension_source_path.clone(); @@ -885,10 +880,8 @@ impl ExtensionStore { bail!("extension {extension_id} is already installed"); } } - fs.create_symlink(output_path, extension_source_path) .await?; - this.update(&mut cx, |this, cx| this.reload(None, cx))? .await; Ok(()) diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 326c713bd5..0fbd00e0b4 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -13,10 +13,12 @@ use futures::{io::BufReader, AsyncReadExt, StreamExt}; use gpui::{Context, SemanticVersion, TestAppContext}; use http_client::{FakeHttpClient, Response}; use indexed_docs::IndexedDocsRegistry; +use isahc_http_client::IsahcHttpClient; use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName}; use node_runtime::FakeNodeRuntime; use parking_lot::Mutex; use project::{Project, DEFAULT_COMPLETION_CONTEXT}; +use release_channel::AppVersion; use serde_json::json; use settings::{Settings as _, SettingsStore}; use snippet_provider::SnippetRegistry; @@ -270,6 +272,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { None, fs.clone(), http_client.clone(), + http_client.clone(), None, node_runtime.clone(), language_registry.clone(), @@ -397,6 +400,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { None, fs.clone(), http_client.clone(), + http_client.clone(), None, node_runtime.clone(), language_registry.clone(), @@ -453,6 +457,8 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } +// TODO remove +#[ignore] #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); @@ -502,7 +508,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { http_request_count: 0, })); - let http_client = FakeHttpClient::create({ + let extension_client = FakeHttpClient::create({ let language_server_version = language_server_version.clone(); move |request| { let language_server_version = language_server_version.clone(); @@ -558,19 +564,33 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice())); encoder.read_to_end(&mut gzipped_bytes).await.unwrap(); Ok(Response::new(gzipped_bytes.into())) + // } else if uri == WASI_ADAPTER_URL { + // let binary_contents = + // include_bytes!("wasi_snapshot_preview1.reactor.wasm").as_slice(); + // Ok(Response::new(binary_contents.into())) } else { Ok(Response::builder().status(404).body("not found".into())?) } } } }); + let user_agent = cx.update(|cx| { + format!( + "Zed/{} ({}; {})", + AppVersion::global(cx), + std::env::consts::OS, + std::env::consts::ARCH + ) + }); + let builder_client = IsahcHttpClient::new(None, Some(user_agent)); let extension_store = cx.new_model(|cx| { ExtensionStore::new( extensions_dir.clone(), Some(cache_dir), fs.clone(), - http_client.clone(), + extension_client.clone(), + builder_client, None, node_runtime, language_registry.clone(), diff --git a/crates/extension_cli/Cargo.toml b/crates/extension_cli/Cargo.toml index 54c47f4a82..bc649d8e04 100644 --- a/crates/extension_cli/Cargo.toml +++ b/crates/extension_cli/Cargo.toml @@ -18,7 +18,7 @@ clap = { workspace = true, features = ["derive"] } env_logger.workspace = true extension = { workspace = true, features = ["no-webrtc"] } fs.workspace = true -http_client.workspace = true +isahc_http_client.workspace = true language.workspace = true log.workspace = true rpc.workspace = true diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 029c560e57..6eaebca2f0 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -7,13 +7,13 @@ use std::{ }; use ::fs::{copy_recursive, CopyOptions, Fs, RealFs}; -use ::http_client::HttpClientWithProxy; use anyhow::{anyhow, bail, Context, Result}; use clap::Parser; use extension::{ extension_builder::{CompileExtensionOptions, ExtensionBuilder}, ExtensionManifest, }; +use isahc_http_client::IsahcHttpClient; use language::LanguageConfig; use theme::ThemeRegistry; use tree_sitter::{Language, Query, WasmStore}; @@ -66,7 +66,13 @@ async fn main() -> Result<()> { std::env::consts::OS, std::env::consts::ARCH ); - let http_client = Arc::new(HttpClientWithProxy::new(Some(user_agent), None)); + let http_client = Arc::new( + IsahcHttpClient::builder() + .default_header("User-Agent", user_agent) + .build() + .map(IsahcHttpClient::from)?, + ); + let builder = ExtensionBuilder::new(http_client, scratch_dir); builder .compile_extension( diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index caca91c1ab..b8ad1ed05d 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -18,7 +18,6 @@ futures.workspace = true git.workspace = true gpui.workspace = true http_client.workspace = true -isahc.workspace = true regex.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/git_hosting_providers/src/providers/codeberg.rs b/crates/git_hosting_providers/src/providers/codeberg.rs index b34d809100..eaadca1ecf 100644 --- a/crates/git_hosting_providers/src/providers/codeberg.rs +++ b/crates/git_hosting_providers/src/providers/codeberg.rs @@ -3,9 +3,7 @@ use std::sync::Arc; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::HttpClient; -use isahc::config::Configurable; -use isahc::{AsyncBody, Request}; +use http_client::{AsyncBody, HttpClient, Request}; use serde::Deserialize; use url::Url; @@ -51,16 +49,14 @@ impl Codeberg { let url = format!("https://codeberg.org/api/v1/repos/{repo_owner}/{repo}/git/commits/{commit}"); - let mut request = Request::get(&url) - .redirect_policy(isahc::config::RedirectPolicy::Follow) - .header("Content-Type", "application/json"); + let mut request = Request::get(&url).header("Content-Type", "application/json"); if let Ok(codeberg_token) = std::env::var("CODEBERG_TOKEN") { request = request.header("Authorization", format!("Bearer {}", codeberg_token)); } let mut response = client - .send(request.body(AsyncBody::default())?) + .send_with_redirect_policy(request.body(AsyncBody::default())?, true) .await .with_context(|| format!("error fetching Codeberg commit details at {:?}", url))?; diff --git a/crates/git_hosting_providers/src/providers/github.rs b/crates/git_hosting_providers/src/providers/github.rs index 103f6ae1ce..be46b51ddf 100644 --- a/crates/git_hosting_providers/src/providers/github.rs +++ b/crates/git_hosting_providers/src/providers/github.rs @@ -3,9 +3,7 @@ use std::sync::{Arc, OnceLock}; use anyhow::{bail, Context, Result}; use async_trait::async_trait; use futures::AsyncReadExt; -use http_client::HttpClient; -use isahc::config::Configurable; -use isahc::{AsyncBody, Request}; +use http_client::{AsyncBody, HttpClient, Request}; use regex::Regex; use serde::Deserialize; use url::Url; @@ -55,16 +53,14 @@ impl Github { ) -> Result> { let url = format!("https://api.github.com/repos/{repo_owner}/{repo}/commits/{commit}"); - let mut request = Request::get(&url) - .redirect_policy(isahc::config::RedirectPolicy::Follow) - .header("Content-Type", "application/json"); + let mut request = Request::get(&url).header("Content-Type", "application/json"); if let Ok(github_token) = std::env::var("GITHUB_TOKEN") { request = request.header("Authorization", format!("Bearer {}", github_token)); } let mut response = client - .send(request.body(AsyncBody::default())?) + .send_with_redirect_policy(request.body(AsyncBody::default())?, true) .await .with_context(|| format!("error fetching GitHub commit details at {:?}", url))?; diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index d0d75b73e9..e2339a38ed 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -11,13 +11,13 @@ license = "Apache-2.0" workspace = true [features] -default = [] +default = ["http_client"] test-support = [ "backtrace", "collections/test-support", "rand", "util/test-support", - "http_client/test-support", + "http_client?/test-support", ] runtime_shaders = [] macos-blade = ["blade-graphics", "blade-macros", "blade-util", "bytemuck"] @@ -40,7 +40,7 @@ derive_more.workspace = true etagere = "0.2" futures.workspace = true gpui_macros.workspace = true -http_client.workspace = true +http_client = { optional = true, workspace = true } image = "0.25.1" itertools.workspace = true linkme = "0.3" diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index ac7af186d3..157dbdf70f 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -131,6 +131,7 @@ fn main() { PathBuf::from_str("crates/gpui/examples/image/app-icon.png").unwrap(), ), remote_resource: "https://picsum.photos/512/512".into(), + asset_resource: "image/color.svg".into(), }) }) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index ee7a6ef191..6cb491b100 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -117,7 +117,7 @@ impl App { Self(AppContext::new( current_platform(false), Arc::new(()), - http_client::client(None, None), + Arc::new(NullHttpClient), )) } @@ -128,7 +128,7 @@ impl App { Self(AppContext::new( current_platform(true), Arc::new(()), - http_client::client(None, None), + Arc::new(NullHttpClient), )) } @@ -142,6 +142,14 @@ impl App { self } + /// Set the http client for the application + pub fn with_http_client(self, http_client: Arc) -> Self { + let mut context_lock = self.0.borrow_mut(); + context_lock.http_client = http_client; + drop(context_lock); + self + } + /// Start the application. The provided callback will be called once the /// app is fully launched. pub fn run(self, on_finish_launching: F) @@ -1512,3 +1520,22 @@ pub struct KeystrokeEvent { /// The action that was resolved for the keystroke, if any pub action: Option>, } + +struct NullHttpClient; + +impl HttpClient for NullHttpClient { + fn send_with_redirect_policy( + &self, + _req: http_client::Request, + _follow_redirects: bool, + ) -> futures::future::BoxFuture< + 'static, + Result, anyhow::Error>, + > { + async move { Err(anyhow!("No HttpClient available")) }.boxed() + } + + fn proxy(&self) -> Option<&http_client::Uri> { + None + } +} diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index f1e8bb68e3..63236d5309 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -345,7 +345,10 @@ impl Asset for ImageAsset { let bytes = match source.clone() { UriOrPath::Path(uri) => fs::read(uri.as_ref())?, UriOrPath::Uri(uri) => { - let mut response = client.get(uri.as_ref(), ().into(), true).await?; + let mut response = client + .get(uri.as_ref(), ().into(), true) + .await + .map_err(|e| ImageCacheError::Client(Arc::new(e)))?; let mut body = Vec::new(); response.body_mut().read_to_end(&mut body).await?; if !response.status().is_success() { @@ -429,7 +432,7 @@ impl Asset for ImageAsset { pub enum ImageCacheError { /// An error that occurred while fetching an image from a remote source. #[error("http error: {0}")] - Client(#[from] http_client::Error), + Client(#[from] Arc), /// An error that occurred while reading the image from disk. #[error("IO error: {0}")] Io(Arc), diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index a447478a9b..7ba3ce055e 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -128,6 +128,7 @@ pub use executor::*; pub use geometry::*; pub use global::*; pub use gpui_macros::{register_action, test, IntoElement, Render}; +pub use http_client; pub use input::*; pub use interactive::*; use key_dispatch::*; diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index ae017685a9..0244ac4104 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -16,13 +16,12 @@ path = "src/http_client.rs" doctest = true [dependencies] -http = "1.0.0" +http = "0.2" anyhow.workspace = true derive_more.workspace = true futures.workspace = true -isahc.workspace = true log.workspace = true serde.workspace = true serde_json.workspace = true -futures-lite.workspace = true +smol.workspace = true url.workspace = true diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs new file mode 100644 index 0000000000..e2544f60fe --- /dev/null +++ b/crates/http_client/src/async_body.rs @@ -0,0 +1,109 @@ +use std::{borrow::Cow, io::Read, pin::Pin, task::Poll}; + +use futures::{AsyncRead, AsyncReadExt}; + +/// Based on the implementation of AsyncBody in +/// https://github.com/sagebind/isahc/blob/5c533f1ef4d6bdf1fd291b5103c22110f41d0bf0/src/body/mod.rs +pub struct AsyncBody(pub Inner); + +pub enum Inner { + /// An empty body. + Empty, + + /// A body stored in memory. + SyncReader(std::io::Cursor>), + + /// An asynchronous reader. + AsyncReader(Pin>), +} + +impl AsyncBody { + /// Create a new empty body. + /// + /// An empty body represents the *absence* of a body, which is semantically + /// different than the presence of a body of zero length. + pub fn empty() -> Self { + Self(Inner::Empty) + } + /// Create a streaming body that reads from the given reader. + pub fn from_reader(read: R) -> Self + where + R: AsyncRead + Send + Sync + 'static, + { + Self(Inner::AsyncReader(Box::pin(read))) + } +} + +impl Default for AsyncBody { + fn default() -> Self { + Self(Inner::Empty) + } +} + +impl From<()> for AsyncBody { + fn from(_: ()) -> Self { + Self(Inner::Empty) + } +} + +impl From> for AsyncBody { + fn from(body: Vec) -> Self { + Self(Inner::SyncReader(std::io::Cursor::new(Cow::Owned(body)))) + } +} + +impl From<&'_ [u8]> for AsyncBody { + fn from(body: &[u8]) -> Self { + body.to_vec().into() + } +} + +impl From for AsyncBody { + fn from(body: String) -> Self { + body.into_bytes().into() + } +} + +impl From<&'_ str> for AsyncBody { + fn from(body: &str) -> Self { + body.as_bytes().into() + } +} + +impl> From> for AsyncBody { + fn from(body: Option) -> Self { + match body { + Some(body) => body.into(), + None => Self(Inner::Empty), + } + } +} + +impl std::io::Read for AsyncBody { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + match &mut self.0 { + Inner::Empty => Ok(0), + Inner::SyncReader(cursor) => cursor.read(buf), + Inner::AsyncReader(async_reader) => smol::block_on(async_reader.read(buf)), + } + } +} + +impl futures::AsyncRead for AsyncBody { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut [u8], + ) -> std::task::Poll> { + // SAFETY: Standard Enum pin projection + let inner = unsafe { &mut self.get_unchecked_mut().0 }; + match inner { + Inner::Empty => Poll::Ready(Ok(0)), + // Blocking call is over an in-memory buffer + Inner::SyncReader(cursor) => Poll::Ready(cursor.read(buf)), + Inner::AsyncReader(async_reader) => { + AsyncRead::poll_read(async_reader.as_mut(), cx, buf) + } + } + } +} diff --git a/crates/http_client/src/github.rs b/crates/http_client/src/github.rs index a64a5bae5c..70587fa3ce 100644 --- a/crates/http_client/src/github.rs +++ b/crates/http_client/src/github.rs @@ -34,7 +34,7 @@ pub async fn latest_github_release( ) -> Result { let mut response = http .get( - &format!("https://api.github.com/repos/{repo_name_with_owner}/releases"), + format!("https://api.github.com/repos/{repo_name_with_owner}/releases").as_str(), Default::default(), true, ) @@ -91,13 +91,14 @@ pub async fn get_release_by_tag_name( .context("error fetching latest release")?; let mut body = Vec::new(); + let status = response.status(); response .body_mut() .read_to_end(&mut body) .await .context("error reading latest release")?; - if response.status().is_client_error() { + if status.is_client_error() { let text = String::from_utf8_lossy(body.as_slice()); bail!( "status error {}, response: {text:?}", diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 7ea0029d79..d78b2dd23c 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -1,47 +1,48 @@ +mod async_body; pub mod github; pub use anyhow::{anyhow, Result}; +pub use async_body::{AsyncBody, Inner}; use derive_more::Deref; +pub use http::{self, Method, Request, Response, StatusCode, Uri}; + use futures::future::BoxFuture; -use futures_lite::FutureExt; -use isahc::config::{Configurable, RedirectPolicy}; -pub use isahc::http; -pub use isahc::{ - http::{Method, StatusCode, Uri}, - AsyncBody, Error, HttpClient as IsahcHttpClient, Request, Response, -}; +use http::request::Builder; #[cfg(feature = "test-support")] use std::fmt; -use std::{ - sync::{Arc, Mutex}, - time::Duration, -}; +use std::sync::{Arc, Mutex}; pub use url::Url; -pub trait HttpClient: Send + Sync { +pub trait HttpClient: 'static + Send + Sync { fn send( + &self, + req: http::Request, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send_with_redirect_policy(req, false) + } + + // TODO: Make a better API for this + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>>; + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>>; fn get<'a>( &'a self, uri: &str, body: AsyncBody, follow_redirects: bool, - ) -> BoxFuture<'a, Result, Error>> { - let request = isahc::Request::builder() - .redirect_policy(if follow_redirects { - RedirectPolicy::Follow - } else { - RedirectPolicy::None - }) - .method(Method::GET) - .uri(uri) - .body(body); + ) -> BoxFuture<'a, Result, anyhow::Error>> { + let request = Builder::new().uri(uri).body(body); + match request { - Ok(request) => self.send(request), - Err(error) => async move { Err(error.into()) }.boxed(), + Ok(request) => Box::pin(async move { + self.send_with_redirect_policy(request, follow_redirects) + .await + .map_err(Into::into) + }), + Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -49,15 +50,16 @@ pub trait HttpClient: Send + Sync { &'a self, uri: &str, body: AsyncBody, - ) -> BoxFuture<'a, Result, Error>> { - let request = isahc::Request::builder() - .method(Method::POST) + ) -> BoxFuture<'a, Result, anyhow::Error>> { + let request = Builder::new() .uri(uri) + .method(Method::POST) .header("Content-Type", "application/json") .body(body); + match request { - Ok(request) => self.send(request), - Err(error) => async move { Err(error.into()) }.boxed(), + Ok(request) => Box::pin(async move { self.send(request).await.map_err(Into::into) }), + Err(e) => Box::pin(async move { Err(e.into()) }), } } @@ -74,29 +76,28 @@ pub struct HttpClientWithProxy { impl HttpClientWithProxy { /// Returns a new [`HttpClientWithProxy`] with the given proxy URL. - pub fn new(user_agent: Option, proxy_url: Option) -> Self { - let proxy_url = proxy_url - .and_then(|input| { - input - .parse::() - .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e)) - .ok() - }) + pub fn new(client: Arc, proxy_url: Option) -> Self { + let proxy_uri = proxy_url + .and_then(|proxy| proxy.parse().ok()) .or_else(read_proxy_from_env); + Self::new_uri(client, proxy_uri) + } + pub fn new_uri(client: Arc, proxy_uri: Option) -> Self { Self { - client: client(user_agent, proxy_url.clone()), - proxy: proxy_url, + client, + proxy: proxy_uri, } } } impl HttpClient for HttpClientWithProxy { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -105,11 +106,12 @@ impl HttpClient for HttpClientWithProxy { } impl HttpClient for Arc { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -123,14 +125,35 @@ pub struct HttpClientWithUrl { client: HttpClientWithProxy, } +impl std::ops::Deref for HttpClientWithUrl { + type Target = HttpClientWithProxy; + + fn deref(&self) -> &Self::Target { + &self.client + } +} + impl HttpClientWithUrl { /// Returns a new [`HttpClientWithUrl`] with the given base URL. pub fn new( + client: Arc, base_url: impl Into, - user_agent: Option, proxy_url: Option, ) -> Self { - let client = HttpClientWithProxy::new(user_agent, proxy_url); + let client = HttpClientWithProxy::new(client, proxy_url); + + Self { + base_url: Mutex::new(base_url.into()), + client, + } + } + + pub fn new_uri( + client: Arc, + base_url: impl Into, + proxy_uri: Option, + ) -> Self { + let client = HttpClientWithProxy::new_uri(client, proxy_uri); Self { base_url: Mutex::new(base_url.into()), @@ -195,11 +218,12 @@ impl HttpClientWithUrl { } impl HttpClient for Arc { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -208,11 +232,12 @@ impl HttpClient for Arc { } impl HttpClient for HttpClientWithUrl { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { - self.client.send(req) + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.client.send_with_redirect_policy(req, follow_redirects) } fn proxy(&self) -> Option<&Uri> { @@ -220,26 +245,7 @@ impl HttpClient for HttpClientWithUrl { } } -pub fn client(user_agent: Option, proxy: Option) -> Arc { - let mut builder = isahc::HttpClient::builder() - // Some requests to Qwen2 models on Runpod can take 32+ seconds, - // especially if there's a cold boot involved. We may need to have - // those requests use a different http client, because global timeouts - // of 50 and 60 seconds, respectively, would be very high! - .connect_timeout(Duration::from_secs(5)) - .low_speed_timeout(100, Duration::from_secs(30)) - .proxy(proxy.clone()); - if let Some(user_agent) = user_agent { - builder = builder.default_header("User-Agent", user_agent); - } - - Arc::new(HttpClientWithProxy { - client: Arc::new(builder.build().unwrap()), - proxy, - }) -} - -fn read_proxy_from_env() -> Option { +pub fn read_proxy_from_env() -> Option { const ENV_VARS: &[&str] = &[ "ALL_PROXY", "all_proxy", @@ -258,23 +264,9 @@ fn read_proxy_from_env() -> Option { None } -impl HttpClient for isahc::HttpClient { - fn send( - &self, - req: Request, - ) -> BoxFuture<'static, Result, Error>> { - let client = self.clone(); - Box::pin(async move { client.send_async(req).await }) - } - - fn proxy(&self) -> Option<&Uri> { - None - } -} - #[cfg(feature = "test-support")] type FakeHttpHandler = Box< - dyn Fn(Request) -> BoxFuture<'static, Result, Error>> + dyn Fn(Request) -> BoxFuture<'static, Result, anyhow::Error>> + Send + Sync + 'static, @@ -289,7 +281,7 @@ pub struct FakeHttpClient { impl FakeHttpClient { pub fn create(handler: F) -> Arc where - Fut: futures::Future, Error>> + Send + 'static, + Fut: futures::Future, anyhow::Error>> + Send + 'static, F: Fn(Request) -> Fut + Send + Sync + 'static, { Arc::new(HttpClientWithUrl { @@ -331,12 +323,13 @@ impl fmt::Debug for FakeHttpClient { #[cfg(feature = "test-support")] impl HttpClient for FakeHttpClient { - fn send( + fn send_with_redirect_policy( &self, req: Request, - ) -> BoxFuture<'static, Result, Error>> { + _follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { let future = (self.handler)(req); - Box::pin(async move { future.await.map(Into::into) }) + future } fn proxy(&self) -> Option<&Uri> { diff --git a/crates/isahc_http_client/Cargo.toml b/crates/isahc_http_client/Cargo.toml new file mode 100644 index 0000000000..b90163ef74 --- /dev/null +++ b/crates/isahc_http_client/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "isahc_http_client" +version = "0.1.0" +edition = "2021" +publish = false +license = "Apache-2.0" + +[lints] +workspace = true + +[features] +test-support = [] + +[lib] +path = "src/isahc_http_client.rs" + +[dependencies] +http_client.workspace = true +isahc.workspace = true +futures.workspace = true +anyhow.workspace = true +util.workspace = true diff --git a/crates/isahc_http_client/LICENSE-APACHE b/crates/isahc_http_client/LICENSE-APACHE new file mode 120000 index 0000000000..1cd601d0a3 --- /dev/null +++ b/crates/isahc_http_client/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/crates/isahc_http_client/src/isahc_http_client.rs b/crates/isahc_http_client/src/isahc_http_client.rs new file mode 100644 index 0000000000..6c40b9f53b --- /dev/null +++ b/crates/isahc_http_client/src/isahc_http_client.rs @@ -0,0 +1,93 @@ +use std::{mem, sync::Arc, time::Duration}; + +use futures::future::BoxFuture; +use isahc::config::RedirectPolicy; +use util::maybe; + +pub use isahc::config::Configurable; +pub struct IsahcHttpClient(isahc::HttpClient); + +pub use http_client::*; + +impl IsahcHttpClient { + pub fn new(proxy: Option, user_agent: Option) -> Arc { + let mut builder = isahc::HttpClient::builder() + .connect_timeout(Duration::from_secs(5)) + .low_speed_timeout(100, Duration::from_secs(5)) + .proxy(proxy.clone()); + if let Some(agent) = user_agent { + builder = builder.default_header("User-Agent", agent); + } + Arc::new(IsahcHttpClient(builder.build().unwrap())) + } + pub fn builder() -> isahc::HttpClientBuilder { + isahc::HttpClientBuilder::new() + } +} + +impl From for IsahcHttpClient { + fn from(client: isahc::HttpClient) -> Self { + Self(client) + } +} + +impl HttpClient for IsahcHttpClient { + fn proxy(&self) -> Option<&Uri> { + None + } + + fn send_with_redirect_policy( + &self, + req: http_client::http::Request, + follow_redirects: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> + { + let req = maybe!({ + let (mut parts, body) = req.into_parts(); + let mut builder = isahc::Request::builder() + .method(parts.method) + .uri(parts.uri) + .version(parts.version); + + let headers = builder.headers_mut()?; + mem::swap(headers, &mut parts.headers); + + let extensions = builder.extensions_mut()?; + mem::swap(extensions, &mut parts.extensions); + + let isahc_body = match body.0 { + http_client::Inner::Empty => isahc::AsyncBody::empty(), + http_client::Inner::AsyncReader(reader) => isahc::AsyncBody::from_reader(reader), + http_client::Inner::SyncReader(reader) => { + isahc::AsyncBody::from_bytes_static(reader.into_inner()) + } + }; + + builder + .redirect_policy(if follow_redirects { + RedirectPolicy::Follow + } else { + RedirectPolicy::None + }) + .body(isahc_body) + .ok() + }); + + let client = self.0.clone(); + + Box::pin(async move { + match req { + Some(req) => client + .send_async(req) + .await + .map_err(Into::into) + .map(|response| { + let (parts, body) = response.into_parts(); + let body = http_client::AsyncBody::from_reader(body); + http_client::Response::from_parts(parts, body) + }), + None => Err(anyhow::anyhow!("Request was malformed")), + } + }) + } +} diff --git a/crates/ollama/Cargo.toml b/crates/ollama/Cargo.toml index 76a8b1a8c1..34d8802b97 100644 --- a/crates/ollama/Cargo.toml +++ b/crates/ollama/Cargo.toml @@ -19,7 +19,6 @@ schemars = ["dep:schemars"] anyhow.workspace = true futures.workspace = true http_client.workspace = true -isahc.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true serde_json.workspace = true diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index a65d6eaf90..972520e61f 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -1,7 +1,6 @@ use anyhow::{anyhow, Context, Result}; use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; -use isahc::config::Configurable; +use http_client::{http, AsyncBody, HttpClient, Method, Request as HttpRequest}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::{value::RawValue, Value}; @@ -262,18 +261,14 @@ pub async fn stream_chat_completion( client: &dyn HttpClient, api_url: &str, request: ChatRequest, - low_speed_timeout: Option, + _: Option, ) -> Result>> { let uri = format!("{api_url}/api/chat"); - let mut request_builder = HttpRequest::builder() + let request_builder = http::Request::builder() .method(Method::POST) .uri(uri) .header("Content-Type", "application/json"); - if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); - }; - let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; let mut response = client.send(request).await?; if response.status().is_success() { @@ -305,18 +300,14 @@ pub async fn stream_chat_completion( pub async fn get_models( client: &dyn HttpClient, api_url: &str, - low_speed_timeout: Option, + _: Option, ) -> Result> { let uri = format!("{api_url}/api/tags"); - let mut request_builder = HttpRequest::builder() + let request_builder = HttpRequest::builder() .method(Method::GET) .uri(uri) .header("Accept", "application/json"); - if let Some(low_speed_timeout) = low_speed_timeout { - request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); - }; - let request = request_builder.body(AsyncBody::default())?; let mut response = client.send(request).await?; @@ -354,13 +345,13 @@ pub async fn preload_model(client: Arc, api_url: &str, model: &s let mut response = match client.send(request).await { Ok(response) => response, - Err(err) => { + Err(error) => { // Be ok with a timeout during preload of the model - if err.is_timeout() { - return Ok(()); - } else { - return Err(err.into()); - } + // if err.is_timeout() { + // return Ok(()); + // } else { + return Err(error); + //} } }; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6dd528147b..58d9ba8926 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -26,7 +26,7 @@ use gpui::{ AppContext, AsyncAppContext, Context, Entity, EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel, }; -use http_client::{AsyncBody, Error, HttpClient, Request, Response, Uri}; +use http_client::{AsyncBody, HttpClient, Request, Response, Uri}; use language::{ language_settings::{ all_language_settings, language_settings, AllLanguageSettings, LanguageSettings, @@ -7339,7 +7339,7 @@ impl HttpClient for BlockedHttpClient { fn send( &self, _req: Request, - ) -> BoxFuture<'static, Result, Error>> { + ) -> BoxFuture<'static, Result, anyhow::Error>> { Box::pin(async { Err(std::io::Error::new( std::io::ErrorKind::PermissionDenied, @@ -7352,6 +7352,14 @@ impl HttpClient for BlockedHttpClient { fn proxy(&self) -> Option<&Uri> { None } + + fn send_with_redirect_policy( + &self, + req: Request, + _: bool, + ) -> BoxFuture<'static, Result, anyhow::Error>> { + self.send(req) + } } struct SshLspAdapterDelegate { diff --git a/crates/semantic_index/Cargo.toml b/crates/semantic_index/Cargo.toml index c8dbb6a9f5..691d6e57f6 100644 --- a/crates/semantic_index/Cargo.toml +++ b/crates/semantic_index/Cargo.toml @@ -51,6 +51,7 @@ workspace.workspace = true worktree.workspace = true [dev-dependencies] +isahc_http_client.workspace = true env_logger.workspace = true client = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 977473d1dc..0cc3f9f317 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -2,6 +2,7 @@ use client::Client; use futures::channel::oneshot; use gpui::App; use http_client::HttpClientWithUrl; +use isahc_http_client::IsahcHttpClient; use language::language_settings::AllLanguageSettings; use project::Project; use semantic_index::{OpenAiEmbeddingModel, OpenAiEmbeddingProvider, SemanticDb}; @@ -26,8 +27,12 @@ fn main() { }); let clock = Arc::new(FakeSystemClock::default()); - let http = Arc::new(HttpClientWithUrl::new("http://localhost:11434", None, None)); + let http = Arc::new(HttpClientWithUrl::new( + IsahcHttpClient::new(None, None), + "http://localhost:11434", + None, + )); let client = client::Client::new(clock, http.clone(), cx); Client::set_global(client.clone(), cx); diff --git a/crates/semantic_index/src/embedding/ollama.rs b/crates/semantic_index/src/embedding/ollama.rs index 09d33c584a..6d3fa67902 100644 --- a/crates/semantic_index/src/embedding/ollama.rs +++ b/crates/semantic_index/src/embedding/ollama.rs @@ -1,5 +1,5 @@ use anyhow::{Context as _, Result}; -use futures::{future::BoxFuture, AsyncReadExt, FutureExt}; +use futures::{future::BoxFuture, AsyncReadExt as _, FutureExt}; use http_client::HttpClient; use serde::{Deserialize, Serialize}; use std::sync::Arc; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 28d2c7f825..7fa9602a14 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -47,6 +47,7 @@ file_finder.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true +isahc_http_client.workspace = true git.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index eb6d2853fd..d3a722ec65 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -11,7 +11,7 @@ use assistant::PromptBuilder; use chrono::Offset; use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; -use client::{parse_zed_link, Client, DevServerToken, UserStore}; +use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; use db::kvp::KEY_VALUE_STORE; use editor::Editor; @@ -23,6 +23,8 @@ use gpui::{ Action, App, AppContext, AsyncAppContext, Context, DismissEvent, Global, Task, UpdateGlobal as _, VisualContext, }; +use http_client::{read_proxy_from_env, Uri}; +use isahc_http_client::IsahcHttpClient; use language::LanguageRegistry; use log::LevelFilter; @@ -327,7 +329,10 @@ fn main() { init_logger(); log::info!("========== starting zed =========="); - let app = App::new().with_assets(Assets); + + let app = App::new() + .with_assets(Assets) + .with_http_client(IsahcHttpClient::new(None, None)); let (installation_id, existing_installation_id_found) = app .background_executor() @@ -436,6 +441,26 @@ fn main() { if let Some(build_sha) = option_env!("ZED_COMMIT_SHA") { AppCommitSha::set_global(AppCommitSha(build_sha.into()), cx); } + settings::init(cx); + client::init_settings(cx); + let user_agent = format!( + "Zed/{} ({}; {})", + AppVersion::global(cx), + std::env::consts::OS, + std::env::consts::ARCH + ); + let proxy_str = ProxySettings::get_global(cx).proxy.to_owned(); + let proxy_url = proxy_str + .as_ref() + .and_then(|input| { + input + .parse::() + .inspect_err(|e| log::error!("Error parsing proxy settings: {}", e)) + .ok() + }) + .or_else(read_proxy_from_env); + let http = IsahcHttpClient::new(proxy_url, Some(user_agent)); + cx.set_http_client(http); ::set_global(fs.clone(), cx); @@ -444,11 +469,9 @@ fn main() { OpenListener::set_global(cx, open_listener.clone()); - settings::init(cx); handle_settings_file_changes(user_settings_file_rx, cx, handle_settings_changed); handle_keymap_file_changes(user_keymap_file_rx, cx, handle_keymap_changed); - client::init_settings(cx); let client = Client::production(cx); cx.set_http_client(client.http_client().clone()); let mut languages = LanguageRegistry::new(cx.background_executor().clone()); diff --git a/script/bundle-linux b/script/bundle-linux index 029d748f4f..deecd0984b 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -43,7 +43,10 @@ script/generate-licenses # Build binary in release mode export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" -cargo build --release --target "${target_triple}" --package zed --package cli --package remote_server +cargo build --release --target "${target_triple}" --package zed --package cli +# Build remote_server in separate invocation to prevent feature unification from other crates +# from influencing dynamic libraries required by it. +cargo build --release --target "${target_triple}" --package remote_server # Strip the binary of all debug symbols # Later, we probably want to do something like this: https://github.com/GabrielMajeri/separate-symbols @@ -51,6 +54,10 @@ strip --strip-debug "${target_dir}/${target_triple}/release/zed" strip --strip-debug "${target_dir}/${target_triple}/release/cli" strip --strip-debug "${target_dir}/${target_triple}/release/remote_server" + +# Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. +ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' + suffix="" if [ "$channel" != "stable" ]; then suffix="-$channel" From a62e8f6396bf41176ddd00cbc705b699d71fe6cf Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 18 Sep 2024 18:05:30 -0400 Subject: [PATCH 13/96] ci: Explicitly set cache-provider for swatinem/rust-cache (#18034) - Switches the Cache Dependencies step (`swatinem/rust-cache`) of Linux tests to use buildjet as `cache-provider`. Explicitly add 'github' (the default cache provider) to other uses of `swatinem/rust-cache` for consistency. Release Notes: - N/A --- .github/workflows/ci.yml | 2 ++ .github/workflows/publish_extension_cli.yml | 1 + 2 files changed, 3 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c55a3a9907..f059b47004 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -115,6 +115,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "buildjet" - name: Install Linux dependencies run: ./script/linux @@ -143,6 +144,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: cargo clippy # Windows can't run shell scripts, so we need to use `cargo xtask`. diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml index 698a09ad00..7c47ec5ded 100644 --- a/.github/workflows/publish_extension_cli.yml +++ b/.github/workflows/publish_extension_cli.yml @@ -24,6 +24,7 @@ jobs: uses: swatinem/rust-cache@23bce251a8cd2ffc3c1075eaa2367cf899916d84 # v2 with: save-if: ${{ github.ref == 'refs/heads/main' }} + cache-provider: "github" - name: Configure linux shell: bash -euxo pipefail {0} From 2cd9a88f53954051f639b120940c06d7bebcf250 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 19:39:15 -0400 Subject: [PATCH 14/96] Clean up after `isahc_http_client` introduction (#18045) This PR does some clean up after #15446. Release Notes: - N/A --- Cargo.toml | 5 +---- crates/collab/Cargo.toml | 2 +- crates/collab/src/rpc.rs | 1 - crates/extension/src/extension_store.rs | 4 ++++ crates/isahc_http_client/Cargo.toml | 4 ++-- crates/zed/Cargo.toml | 2 +- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 2071fdcb6f..c72fec020f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -174,9 +174,6 @@ members = [ default-members = ["crates/zed"] [workspace.dependencies] - - - # # Workspace member crates # @@ -216,7 +213,6 @@ file_icons = { path = "crates/file_icons" } fs = { path = "crates/fs" } fsevent = { path = "crates/fsevent" } fuzzy = { path = "crates/fuzzy" } -isahc_http_client = { path = "crates/isahc_http_client" } git = { path = "crates/git" } git_hosting_providers = { path = "crates/git_hosting_providers" } go_to_line = { path = "crates/go_to_line" } @@ -231,6 +227,7 @@ image_viewer = { path = "crates/image_viewer" } indexed_docs = { path = "crates/indexed_docs" } inline_completion_button = { path = "crates/inline_completion_button" } install_cli = { path = "crates/install_cli" } +isahc_http_client = { path = "crates/isahc_http_client" } journal = { path = "crates/journal" } language = { path = "crates/language" } language_model = { path = "crates/language_model" } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 296809158d..ad43d2d1f0 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -36,8 +36,8 @@ envy = "0.4.2" futures.workspace = true google_ai.workspace = true hex.workspace = true -isahc_http_client.workspace = true http_client.workspace = true +isahc_http_client.workspace = true jsonwebtoken.workspace = true live_kit_server.workspace = true log.workspace = true diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index b2a694027a..bc0f827e78 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -956,7 +956,6 @@ impl Server { tracing::info!("connection opened"); - let user_agent = format!("Zed Server/{}", env!("CARGO_PKG_VERSION")); let http_client = match IsahcHttpClient::builder().default_header("User-Agent", user_agent).build() { Ok(http_client) => Arc::new(IsahcHttpClient::from(http_client)), diff --git a/crates/extension/src/extension_store.rs b/crates/extension/src/extension_store.rs index bd416f4029..8dbd618a25 100644 --- a/crates/extension/src/extension_store.rs +++ b/crates/extension/src/extension_store.rs @@ -827,6 +827,7 @@ impl ExtensionStore { let mut extension_manifest = ExtensionManifest::load(fs.clone(), &extension_source_path).await?; let extension_id = extension_manifest.id.clone(); + if !this.update(&mut cx, |this, cx| { match this.outstanding_operations.entry(extension_id.clone()) { btree_map::Entry::Occupied(_) => return false, @@ -850,6 +851,7 @@ impl ExtensionStore { .ok(); } }); + cx.background_executor() .spawn({ let extension_source_path = extension_source_path.clone(); @@ -880,8 +882,10 @@ impl ExtensionStore { bail!("extension {extension_id} is already installed"); } } + fs.create_symlink(output_path, extension_source_path) .await?; + this.update(&mut cx, |this, cx| this.reload(None, cx))? .await; Ok(()) diff --git a/crates/isahc_http_client/Cargo.toml b/crates/isahc_http_client/Cargo.toml index b90163ef74..82f7621bf8 100644 --- a/crates/isahc_http_client/Cargo.toml +++ b/crates/isahc_http_client/Cargo.toml @@ -15,8 +15,8 @@ test-support = [] path = "src/isahc_http_client.rs" [dependencies] +anyhow.workspace = true +futures.workspace = true http_client.workspace = true isahc.workspace = true -futures.workspace = true -anyhow.workspace = true util.workspace = true diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 7fa9602a14..645d12fc76 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -47,7 +47,6 @@ file_finder.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true -isahc_http_client.workspace = true git.workspace = true git_hosting_providers.workspace = true go_to_line.workspace = true @@ -58,6 +57,7 @@ image_viewer.workspace = true inline_completion_button.workspace = true install_cli.workspace = true isahc.workspace = true +isahc_http_client.workspace = true journal.workspace = true language.workspace = true language_model.workspace = true From 106ca5076fd8d485a9016fa202d618efb66e40dc Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 18 Sep 2024 16:43:59 -0700 Subject: [PATCH 15/96] Fix leak of LMDB connection in semantic index (#17992) Apparently, to close LMDB's file descriptors when using the `heed` library, you need to explicitly call `prepare_for_closing`. Release Notes: - N/A --------- Co-authored-by: Richard Feldman Co-authored-by: Jason --- crates/evals/src/eval.rs | 9 +++++++++ crates/semantic_index/src/semantic_index.rs | 12 +++++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 751dcd09aa..708cfa7511 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -446,6 +446,15 @@ async fn run_evaluation( println!("{}", serde_json::to_string(&query_results).unwrap()); } + + user_store + .update(cx, |_, _| { + drop(semantic_index); + drop(project); + drop(worktree); + drop(project_index); + }) + .unwrap(); } eprint!( diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 3435d0a9ca..6c97ece024 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -25,7 +25,7 @@ pub use summary_index::FileSummary; pub struct SemanticDb { embedding_provider: Arc, - db_connection: heed::Env, + db_connection: Option, project_indices: HashMap, Model>, } @@ -70,7 +70,7 @@ impl SemanticDb { .ok(); Ok(SemanticDb { - db_connection, + db_connection: Some(db_connection), embedding_provider, project_indices: HashMap::default(), }) @@ -148,7 +148,7 @@ impl SemanticDb { let project_index = cx.new_model(|cx| { ProjectIndex::new( project.clone(), - self.db_connection.clone(), + self.db_connection.clone().unwrap(), self.embedding_provider.clone(), cx, ) @@ -171,6 +171,12 @@ impl SemanticDb { } } +impl Drop for SemanticDb { + fn drop(&mut self) { + self.db_connection.take().unwrap().prepare_for_closing(); + } +} + #[cfg(test)] mod tests { use super::*; From eef44aff7f9b17f1ea38cbc64ac52bbbd435ef10 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 18 Sep 2024 19:48:34 -0400 Subject: [PATCH 16/96] extension: Re-enable `test_extension_store_with_test_extension` test (#18046) The `test_extension_store_with_test_extension` test was disabled in #15446, which got merged before re-enabling the test. This PR re-enables that test. Release Notes: - N/A --- crates/extension/src/extension_store_test.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/crates/extension/src/extension_store_test.rs b/crates/extension/src/extension_store_test.rs index 0fbd00e0b4..4bdafaa32c 100644 --- a/crates/extension/src/extension_store_test.rs +++ b/crates/extension/src/extension_store_test.rs @@ -457,8 +457,6 @@ async fn test_extension_store(cx: &mut TestAppContext) { }); } -// TODO remove -#[ignore] #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { init_test(cx); @@ -564,10 +562,6 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice())); encoder.read_to_end(&mut gzipped_bytes).await.unwrap(); Ok(Response::new(gzipped_bytes.into())) - // } else if uri == WASI_ADAPTER_URL { - // let binary_contents = - // include_bytes!("wasi_snapshot_preview1.reactor.wasm").as_slice(); - // Ok(Response::new(binary_contents.into())) } else { Ok(Response::builder().status(404).body("not found".into())?) } From b43b800a54919103062e8fd7f5ff82c80026f211 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 18 Sep 2024 18:07:39 -0600 Subject: [PATCH 17/96] More assistant events (#18032) Release Notes: - N/A --- crates/assistant/src/context.rs | 3 +- crates/assistant/src/inline_assistant.rs | 29 +++++++++++++++++++ .../src/terminal_inline_assistant.rs | 1 + crates/client/src/telemetry.rs | 8 +++-- crates/collab/src/api/events.rs | 2 ++ .../telemetry_events/src/telemetry_events.rs | 28 +++++++++++++++++- 6 files changed, 66 insertions(+), 5 deletions(-) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d55b1aee08..d72b04e3cd 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -46,7 +46,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use telemetry_events::AssistantKind; +use telemetry_events::{AssistantKind, AssistantPhase}; use text::BufferSnapshot; use util::{post_inc, ResultExt, TryFutureExt}; use uuid::Uuid; @@ -2134,6 +2134,7 @@ impl Context { telemetry.report_assistant_event( Some(this.id.0.clone()), AssistantKind::Panel, + AssistantPhase::Response, model.telemetry_id(), response_latency, error_message, diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index eb1bc1eee8..c9360213ae 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -174,6 +174,18 @@ impl InlineAssistant { initial_prompt: Option, cx: &mut WindowContext, ) { + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + telemetry.report_assistant_event( + None, + telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Invoked, + model.telemetry_id(), + None, + None, + ); + } + } let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); let mut selections = Vec::>::new(); @@ -708,6 +720,22 @@ impl InlineAssistant { } pub fn finish_assist(&mut self, assist_id: InlineAssistId, undo: bool, cx: &mut WindowContext) { + if let Some(telemetry) = self.telemetry.as_ref() { + if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { + telemetry.report_assistant_event( + None, + telemetry_events::AssistantKind::Inline, + if undo { + telemetry_events::AssistantPhase::Rejected + } else { + telemetry_events::AssistantPhase::Accepted + }, + model.telemetry_id(), + None, + None, + ); + } + } if let Some(assist) = self.assists.get(&assist_id) { let assist_group_id = assist.group_id; if self.assist_groups[&assist_group_id].linked { @@ -2558,6 +2586,7 @@ impl Codegen { telemetry.report_assistant_event( None, telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Response, model_telemetry_id, response_latency, error_message, diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index 06661944d9..caf819bae5 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -1066,6 +1066,7 @@ impl Codegen { telemetry.report_assistant_event( None, telemetry_events::AssistantKind::Inline, + telemetry_events::AssistantPhase::Response, model_telemetry_id, response_latency, error_message, diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index b415cae14c..46304819a4 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -16,9 +16,9 @@ use std::io::Write; use std::{env, mem, path::PathBuf, sync::Arc, time::Duration}; use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System}; use telemetry_events::{ - ActionEvent, AppEvent, AssistantEvent, AssistantKind, CallEvent, CpuEvent, EditEvent, - EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, - MemoryEvent, ReplEvent, SettingEvent, + ActionEvent, AppEvent, AssistantEvent, AssistantKind, AssistantPhase, CallEvent, CpuEvent, + EditEvent, EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, + InlineCompletionEvent, MemoryEvent, ReplEvent, SettingEvent, }; use tempfile::NamedTempFile; #[cfg(not(debug_assertions))] @@ -391,6 +391,7 @@ impl Telemetry { self: &Arc, conversation_id: Option, kind: AssistantKind, + phase: AssistantPhase, model: String, response_latency: Option, error_message: Option, @@ -398,6 +399,7 @@ impl Telemetry { let event = Event::Assistant(AssistantEvent { conversation_id, kind, + phase, model: model.to_string(), response_latency, error_message, diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 30ed10a76f..45c25d261e 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -834,6 +834,7 @@ pub struct AssistantEventRow { // AssistantEventRow conversation_id: String, kind: String, + phase: String, model: String, response_latency_in_ms: Option, error_message: Option, @@ -866,6 +867,7 @@ impl AssistantEventRow { time: time.timestamp_millis(), conversation_id: event.conversation_id.unwrap_or_default(), kind: event.kind.to_string(), + phase: event.phase.to_string(), model: event.model, response_latency_in_ms: event .response_latency diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index 87ecfb76b6..eb84322e83 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -44,7 +44,6 @@ pub enum AssistantKind { Panel, Inline, } - impl Display for AssistantKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( @@ -58,6 +57,31 @@ impl Display for AssistantKind { } } +#[derive(Default, Clone, Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum AssistantPhase { + #[default] + Response, + Invoked, + Accepted, + Rejected, +} + +impl Display for AssistantPhase { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + match self { + Self::Response => "response", + Self::Invoked => "invoked", + Self::Accepted => "accepted", + Self::Rejected => "rejected", + } + ) + } +} + #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(tag = "type")] pub enum Event { @@ -121,6 +145,8 @@ pub struct AssistantEvent { pub conversation_id: Option, /// The kind of assistant (Panel, Inline) pub kind: AssistantKind, + #[serde(default)] + pub phase: AssistantPhase, /// Name of the AI model used (gpt-4o, claude-3-5-sonnet, etc) pub model: String, pub response_latency: Option, From 43e005e936e13947ed99799375bcbfa35703b8cd Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 02:19:58 +0200 Subject: [PATCH 18/96] chore: Remove commented out code following 15446 (#18047) Closes #ISSUE Release Notes: - N/A --- crates/ollama/src/ollama.rs | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 972520e61f..51c4829048 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -343,17 +343,7 @@ pub async fn preload_model(client: Arc, api_url: &str, model: &s }), )?))?; - let mut response = match client.send(request).await { - Ok(response) => response, - Err(error) => { - // Be ok with a timeout during preload of the model - // if err.is_timeout() { - // return Ok(()); - // } else { - return Err(error); - //} - } - }; + let mut response = client.send(request).await?; if response.status().is_success() { Ok(()) From c3f47b8040a83b6414b8a28399628370fb7224f4 Mon Sep 17 00:00:00 2001 From: hekmyr <163496286+hekmyr@users.noreply.github.com> Date: Thu, 19 Sep 2024 02:28:31 +0200 Subject: [PATCH 19/96] vim: Fix increment/decrement command (#17644) Improving vim increment and decrement command. Closes: #16672 ## Release Notes: - vim: Improved edge-case handling for ctrl-a/ctrl-x --------- Co-authored-by: Conrad Irwin --- crates/vim/src/normal/increment.rs | 241 ++++++++++++++++-- ...st_increment_bin_wrapping_and_padding.json | 10 + .../test_data/test_increment_hex_casing.json | 5 + ...st_increment_hex_wrapping_and_padding.json | 10 + .../vim/test_data/test_increment_inline.json | 10 + .../test_data/test_increment_sign_change.json | 6 + .../test_data/test_increment_wrapping.json | 13 + 7 files changed, 273 insertions(+), 22 deletions(-) create mode 100644 crates/vim/test_data/test_increment_bin_wrapping_and_padding.json create mode 100644 crates/vim/test_data/test_increment_hex_casing.json create mode 100644 crates/vim/test_data/test_increment_hex_wrapping_and_padding.json create mode 100644 crates/vim/test_data/test_increment_inline.json create mode 100644 crates/vim/test_data/test_increment_sign_change.json create mode 100644 crates/vim/test_data/test_increment_wrapping.json diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 8786eae872..6d66e380c3 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -28,18 +28,18 @@ pub fn register(editor: &mut Editor, cx: &mut ViewContext) { vim.record_current_action(cx); let count = vim.take_count(cx).unwrap_or(1); let step = if action.step { 1 } else { 0 }; - vim.increment(count as i32, step, cx) + vim.increment(count as i64, step, cx) }); Vim::action(editor, cx, |vim, action: &Decrement, cx| { vim.record_current_action(cx); let count = vim.take_count(cx).unwrap_or(1); let step = if action.step { -1 } else { 0 }; - vim.increment(-(count as i32), step, cx) + vim.increment(-(count as i64), step, cx) }); } impl Vim { - fn increment(&mut self, mut delta: i32, step: i32, cx: &mut ViewContext) { + fn increment(&mut self, mut delta: i64, step: i32, cx: &mut ViewContext) { self.store_visual_marks(cx); self.update_editor(cx, |vim, editor, cx| { let mut edits = Vec::new(); @@ -60,23 +60,14 @@ impl Vim { }; if let Some((range, num, radix)) = find_number(&snapshot, start) { - if let Ok(val) = i32::from_str_radix(&num, radix) { - let result = val + delta; - delta += step; - let replace = match radix { - 10 => format!("{}", result), - 16 => { - if num.to_ascii_lowercase() == num { - format!("{:x}", result) - } else { - format!("{:X}", result) - } - } - 2 => format!("{:b}", result), - _ => unreachable!(), - }; - edits.push((range.clone(), replace)); - } + let replace = match radix { + 10 => increment_decimal_string(&num, delta), + 16 => increment_hex_string(&num, delta), + 2 => increment_binary_string(&num, delta), + _ => unreachable!(), + }; + delta += step as i64; + edits.push((range.clone(), replace)); if selection.is_empty() { new_anchors.push((false, snapshot.anchor_after(range.end))) } @@ -107,6 +98,70 @@ impl Vim { } } +fn increment_decimal_string(mut num: &str, mut delta: i64) -> String { + let mut negative = false; + if num.chars().next() == Some('-') { + negative = true; + delta = 0 - delta; + num = &num[1..]; + } + let result = if let Ok(value) = u64::from_str_radix(num, 10) { + let wrapped = value.wrapping_add_signed(delta); + if delta < 0 && wrapped > value { + negative = !negative; + (u64::MAX - wrapped).wrapping_add(1) + } else if delta > 0 && wrapped < value { + negative = !negative; + u64::MAX - wrapped + } else { + wrapped + } + } else { + u64::MAX + }; + + if result == 0 || !negative { + format!("{}", result) + } else { + format!("-{}", result) + } +} + +fn increment_hex_string(num: &str, delta: i64) -> String { + let result = if let Ok(val) = u64::from_str_radix(&num, 16) { + val.wrapping_add_signed(delta) + } else { + u64::MAX + }; + if should_use_lowercase(num) { + format!("{:0width$x}", result, width = num.len()) + } else { + format!("{:0width$X}", result, width = num.len()) + } +} + +fn should_use_lowercase(num: &str) -> bool { + let mut use_uppercase = false; + for ch in num.chars() { + if ch.is_ascii_lowercase() { + return true; + } + if ch.is_ascii_uppercase() { + use_uppercase = true; + } + } + !use_uppercase +} + +fn increment_binary_string(num: &str, delta: i64) -> String { + let result = if let Ok(val) = u64::from_str_radix(&num, 2) { + val.wrapping_add_signed(delta) + } else { + u64::MAX + }; + format!("{:0width$b}", result, width = num.len()) +} + fn find_number( snapshot: &MultiBufferSnapshot, start: Point, @@ -114,10 +169,10 @@ fn find_number( let mut offset = start.to_offset(snapshot); let ch0 = snapshot.chars_at(offset).next(); - if ch0.as_ref().is_some_and(char::is_ascii_digit) || matches!(ch0, Some('-' | 'b' | 'x')) { + if ch0.as_ref().is_some_and(char::is_ascii_hexdigit) || matches!(ch0, Some('-' | 'b' | 'x')) { // go backwards to the start of any number the selection is within for ch in snapshot.reversed_chars_at(offset) { - if ch.is_ascii_digit() || ch == '-' || ch == 'b' || ch == 'x' { + if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' { offset -= ch.len_utf8(); continue; } @@ -158,6 +213,8 @@ fn find_number( begin = Some(offset); } num.push(ch); + println!("pushing {}", ch); + println!(); } else if begin.is_some() { end = Some(offset); break; @@ -250,6 +307,146 @@ mod test { "}); } + #[gpui::test] + async fn test_increment_sign_change(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + ˇ0 + "}) + .await; + cx.simulate_shared_keystrokes("ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + -ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + ˇ1 + "}); + } + + #[gpui::test] + async fn test_increment_bin_wrapping_and_padding(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0 + "}); + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1 + "}); + } + + #[gpui::test] + async fn test_increment_hex_wrapping_and_padding(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0xfffffffffffffffffffˇf + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000fffffffffffffffˇf + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000000000000000000ˇ0 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000000000000000000ˇ1 + "}); + cx.simulate_shared_keystrokes("2 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 0x0000fffffffffffffffˇf + "}); + } + + #[gpui::test] + async fn test_increment_wrapping(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 1844674407370955161ˇ9 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 1844674407370955161ˇ5 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ5 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ4 + "}); + cx.simulate_shared_keystrokes("3 ctrl-x").await; + cx.shared_state().await.assert_eq(indoc! {" + 1844674407370955161ˇ4 + "}); + cx.simulate_shared_keystrokes("2 ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + -1844674407370955161ˇ5 + "}); + } + + #[gpui::test] + async fn test_increment_inline(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + inline0x3ˇ9u32 + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3ˇau32 + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3ˇbu32 + "}); + cx.simulate_shared_keystrokes("l l l ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + inline0x3bu3ˇ3 + "}); + } + + #[gpui::test] + async fn test_increment_hex_casing(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_shared_state(indoc! {" + 0xFˇa + "}) + .await; + + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0xfˇb + "}); + cx.simulate_shared_keystrokes("ctrl-a").await; + cx.shared_state().await.assert_eq(indoc! {" + 0xfˇc + "}); + } + #[gpui::test] async fn test_increment_radix(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json new file mode 100644 index 0000000000..4f1a6aa1d3 --- /dev/null +++ b/crates/vim/test_data/test_increment_bin_wrapping_and_padding.json @@ -0,0 +1,10 @@ +{"Put":{"state":"0b111111111111111111111111111111111111111111111111111111111111111111111ˇ1\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ0\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0b000000000000000000000000000000000000000000000000000000000000000000000ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-x"} +{"Get":{"state":"0b000000111111111111111111111111111111111111111111111111111111111111111ˇ1\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_hex_casing.json b/crates/vim/test_data/test_increment_hex_casing.json new file mode 100644 index 0000000000..951906fa25 --- /dev/null +++ b/crates/vim/test_data/test_increment_hex_casing.json @@ -0,0 +1,5 @@ +{"Put":{"state":"0xFˇa\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0xfˇb\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0xfˇc\n","mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json new file mode 100644 index 0000000000..23a5611264 --- /dev/null +++ b/crates/vim/test_data/test_increment_hex_wrapping_and_padding.json @@ -0,0 +1,10 @@ +{"Put":{"state":"0xfffffffffffffffffffˇf\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000000000000000000ˇ0\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"0x0000000000000000000ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-x"} +{"Get":{"state":"0x0000fffffffffffffffˇf\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_inline.json b/crates/vim/test_data/test_increment_inline.json new file mode 100644 index 0000000000..98c4fc2805 --- /dev/null +++ b/crates/vim/test_data/test_increment_inline.json @@ -0,0 +1,10 @@ +{"Put":{"state":"inline0x3ˇ9u32\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3ˇau32\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3ˇbu32\n", "mode":"Normal"}} +{"Key":"l"} +{"Key":"l"} +{"Key":"l"} +{"Key":"ctrl-a"} +{"Get":{"state":"inline0x3bu3ˇ3\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_sign_change.json b/crates/vim/test_data/test_increment_sign_change.json new file mode 100644 index 0000000000..1f4edd57b4 --- /dev/null +++ b/crates/vim/test_data/test_increment_sign_change.json @@ -0,0 +1,6 @@ +{"Put":{"state":"ˇ0\n"}} +{"Key":"ctrl-x"} +{"Get":{"state":"-ˇ1\n","mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-a"} +{"Get":{"state":"ˇ1\n", "mode":"Normal"}} diff --git a/crates/vim/test_data/test_increment_wrapping.json b/crates/vim/test_data/test_increment_wrapping.json new file mode 100644 index 0000000000..9f84c8cb11 --- /dev/null +++ b/crates/vim/test_data/test_increment_wrapping.json @@ -0,0 +1,13 @@ +{"Put":{"state":"1844674407370955161ˇ9\n"}} +{"Key":"ctrl-a"} +{"Get":{"state":"1844674407370955161ˇ5\n","mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Key":"3"} +{"Key":"ctrl-x"} +{"Get":{"state":"1844674407370955161ˇ4\n", "mode":"Normal"}} +{"Key":"2"} +{"Key":"ctrl-a"} +{"Get":{"state":"-1844674407370955161ˇ5\n", "mode":"Normal"}} From 1b612108bae7e4c7ac194e5803b4144fbc218df6 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:40:01 +0200 Subject: [PATCH 20/96] linux: Fix invalid check for denylisted dependencies (#18050) Closes #ISSUE Release Notes: - N/A --- script/bundle-linux | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/bundle-linux b/script/bundle-linux index deecd0984b..c519f3b9ab 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -56,7 +56,7 @@ strip --strip-debug "${target_dir}/${target_triple}/release/remote_server" # Ensure that remote_server does not depend on libssl nor libcrypto, as we got rid of these deps. -ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' +! ldd "${target_dir}/${target_triple}/release/remote_server" | grep -q 'libcrypto\|libssl' suffix="" if [ "$channel" != "stable" ]; then From 5e6d1814e5c69d4e2e50d01744f5effe2b92ce70 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 19 Sep 2024 12:22:10 +0200 Subject: [PATCH 21/96] Add stray UI tweaks on the task picker (#18059) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR adds tiny UI tweaks to the task picker. Just making sure it is consistent with other pickers throughout Zed. | Before | After | |--------|--------| | Screenshot 2024-09-19 at 12 07 44 PM | Screenshot 2024-09-19 at 12 07 09 PM | Release Notes: - N/A --- crates/tasks_ui/src/modal.rs | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 1255d3a94e..931a0b09c3 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -410,7 +410,7 @@ impl PickerDelegate for TasksModalDelegate { Some( ListItem::new(SharedString::from(format!("tasks-modal-{ix}"))) - .inset(false) + .inset(true) .start_slot::(icon) .end_slot::(history_run_icon) .spacing(ListItemSpacing::Sparse) @@ -448,7 +448,7 @@ impl PickerDelegate for TasksModalDelegate { picker.refresh(cx); })) .tooltip(|cx| { - Tooltip::text("Delete previously scheduled task", cx) + Tooltip::text("Delete Previously Scheduled Task", cx) }), ); item.end_hover_slot(delete_button) @@ -499,7 +499,7 @@ impl PickerDelegate for TasksModalDelegate { .last_scheduled_task(None) .is_some() { - Some(("Rerun last task", Rerun::default().boxed_clone())) + Some(("Rerun Last Task", Rerun::default().boxed_clone())) } else { None }; @@ -511,6 +511,8 @@ impl PickerDelegate for TasksModalDelegate { .justify_between() .rounded_b_md() .bg(cx.theme().colors().ghost_element_selected) + .border_t_1() + .border_color(cx.theme().colors().border_variant) .child( left_button .map(|(label, action)| { @@ -535,9 +537,9 @@ impl PickerDelegate for TasksModalDelegate { .boxed_clone(); this.children(KeyBinding::for_action(&*action, cx).map(|keybind| { let spawn_oneshot_label = if current_modifiers.secondary() { - "Spawn oneshot without history" + "Spawn Oneshot Without History" } else { - "Spawn oneshot" + "Spawn Oneshot" }; Button::new("spawn-onehshot", spawn_oneshot_label) @@ -549,9 +551,9 @@ impl PickerDelegate for TasksModalDelegate { this.children(KeyBinding::for_action(&menu::SecondaryConfirm, cx).map( |keybind| { let label = if is_recent_selected { - "Rerun without history" + "Rerun Without History" } else { - "Spawn without history" + "Spawn Without History" }; Button::new("spawn", label) .label_size(LabelSize::Small) From ca4980df02aa2618ebcb3969963c6fdc8ac23fd7 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 07:20:27 -0400 Subject: [PATCH 22/96] Add system_id (#18040) This PR adds `system_id` to telemetry, which is contained within a new `global` database (accessible by any release channel of Zed on a single system). This will help us get a more accurate understanding of user count, instead of relying on `installationd_id`, which is different per release channel. This doesn't solve the problem of a user with multiple machines, but it gets us closer. Release Notes: - N/A --- crates/client/src/telemetry.rs | 17 ++-- crates/collab/src/api/events.rs | 25 ++++-- crates/db/src/db.rs | 68 ++++++++------ crates/db/src/kvp.rs | 30 +++++++ crates/feedback/src/feedback_modal.rs | 6 +- .../telemetry_events/src/telemetry_events.rs | 8 +- crates/zed/src/main.rs | 88 ++++++++++++++----- crates/zed/src/reliability.rs | 4 +- 8 files changed, 184 insertions(+), 62 deletions(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index 46304819a4..6c1803df3d 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -37,9 +37,10 @@ pub struct Telemetry { struct TelemetryState { settings: TelemetrySettings, - metrics_id: Option>, // Per logged-in user + system_id: Option>, // Per system installation_id: Option>, // Per app installation (different for dev, nightly, preview, and stable) session_id: Option, // Per app launch + metrics_id: Option>, // Per logged-in user release_channel: Option<&'static str>, architecture: &'static str, events_queue: Vec, @@ -191,9 +192,10 @@ impl Telemetry { settings: *TelemetrySettings::get_global(cx), architecture: env::consts::ARCH, release_channel, + system_id: None, installation_id: None, - metrics_id: None, session_id: None, + metrics_id: None, events_queue: Vec::new(), flush_events_task: None, log_file: None, @@ -283,11 +285,13 @@ impl Telemetry { pub fn start( self: &Arc, + system_id: Option, installation_id: Option, session_id: String, cx: &mut AppContext, ) { let mut state = self.state.lock(); + state.system_id = system_id.map(|id| id.into()); state.installation_id = installation_id.map(|id| id.into()); state.session_id = Some(session_id); state.app_version = release_channel::AppVersion::global(cx).to_string(); @@ -637,9 +641,10 @@ impl Telemetry { let state = this.state.lock(); let request_body = EventRequestBody { + system_id: state.system_id.as_deref().map(Into::into), installation_id: state.installation_id.as_deref().map(Into::into), - metrics_id: state.metrics_id.as_deref().map(Into::into), session_id: state.session_id.clone(), + metrics_id: state.metrics_id.as_deref().map(Into::into), is_staff: state.is_staff, app_version: state.app_version.clone(), os_name: state.os_name.clone(), @@ -711,6 +716,7 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); @@ -718,7 +724,7 @@ mod tests { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); @@ -796,13 +802,14 @@ mod tests { Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(), )); let http = FakeHttpClient::with_200_response(); + let system_id = Some("system_id".to_string()); let installation_id = Some("installation_id".to_string()); let session_id = "session_id".to_string(); cx.update(|cx| { let telemetry = Telemetry::new(clock.clone(), http, cx); telemetry.state.lock().max_queue_size = 4; - telemetry.start(installation_id, session_id, cx); + telemetry.start(system_id, installation_id, session_id, cx); assert!(is_empty_state(&telemetry)); diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 45c25d261e..1be8f9c37b 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -149,7 +149,8 @@ pub async fn post_crash( installation_id = %installation_id, description = %description, backtrace = %summary, - "crash report"); + "crash report" + ); if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { let payload = slack::WebhookBody::new(|w| { @@ -627,7 +628,9 @@ where #[derive(Serialize, Debug, clickhouse::Row)] pub struct EditorEventRow { + system_id: String, installation_id: String, + session_id: Option, metrics_id: String, operation: String, app_version: String, @@ -647,7 +650,6 @@ pub struct EditorEventRow { historical_event: bool, architecture: String, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, @@ -677,9 +679,10 @@ impl EditorEventRow { os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), architecture: body.architecture.clone(), + system_id: body.system_id.clone().unwrap_or_default(), installation_id: body.installation_id.clone().unwrap_or_default(), - metrics_id: body.metrics_id.clone().unwrap_or_default(), session_id: body.session_id.clone(), + metrics_id: body.metrics_id.clone().unwrap_or_default(), is_staff: body.is_staff, time: time.timestamp_millis(), operation: event.operation, @@ -699,6 +702,7 @@ impl EditorEventRow { #[derive(Serialize, Debug, clickhouse::Row)] pub struct InlineCompletionEventRow { installation_id: String, + session_id: Option, provider: String, suggestion_accepted: bool, app_version: String, @@ -713,7 +717,6 @@ pub struct InlineCompletionEventRow { city: String, time: i64, is_staff: Option, - session_id: Option, major: Option, minor: Option, patch: Option, @@ -879,7 +882,9 @@ impl AssistantEventRow { #[derive(Debug, clickhouse::Row, Serialize)] pub struct CpuEventRow { + system_id: Option, installation_id: Option, + session_id: Option, is_staff: Option, usage_as_percentage: f32, core_count: u32, @@ -888,7 +893,6 @@ pub struct CpuEventRow { os_name: String, os_version: String, time: i64, - session_id: Option, // pub normalized_cpu_usage: f64, MATERIALIZED major: Option, minor: Option, @@ -917,6 +921,7 @@ impl CpuEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -940,6 +945,7 @@ pub struct MemoryEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -971,6 +977,7 @@ impl MemoryEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -994,6 +1001,7 @@ pub struct AppEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1024,6 +1032,7 @@ impl AppEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1046,6 +1055,7 @@ pub struct SettingEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1076,6 +1086,7 @@ impl SettingEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1099,6 +1110,7 @@ pub struct ExtensionEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1134,6 +1146,7 @@ impl ExtensionEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1224,6 +1237,7 @@ pub struct EditEventRow { os_version: String, // ClientEventBase + system_id: Option, installation_id: Option, // Note: This column name has a typo in the ClickHouse table. #[serde(rename = "sesssion_id")] @@ -1261,6 +1275,7 @@ impl EditEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), + system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index 768f382203..4d87222c77 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -11,16 +11,14 @@ pub use smol; pub use sqlez; pub use sqlez_macros; -use release_channel::ReleaseChannel; pub use release_channel::RELEASE_CHANNEL; use sqlez::domain::Migrator; use sqlez::thread_safe_connection::ThreadSafeConnection; use sqlez_macros::sql; -use std::env; use std::future::Future; use std::path::Path; -use std::sync::atomic::{AtomicBool, Ordering}; -use std::sync::LazyLock; +use std::sync::{atomic::Ordering, LazyLock}; +use std::{env, sync::atomic::AtomicBool}; use util::{maybe, ResultExt}; const CONNECTION_INITIALIZE_QUERY: &str = sql!( @@ -47,16 +45,12 @@ pub static ALL_FILE_DB_FAILED: LazyLock = LazyLock::new(|| AtomicBoo /// This will retry a couple times if there are failures. If opening fails once, the db directory /// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created. /// In either case, static variables are set so that the user can be notified. -pub async fn open_db( - db_dir: &Path, - release_channel: &ReleaseChannel, -) -> ThreadSafeConnection { +pub async fn open_db(db_dir: &Path, scope: &str) -> ThreadSafeConnection { if *ZED_STATELESS { return open_fallback_db().await; } - let release_channel_name = release_channel.dev_name(); - let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); + let main_db_dir = db_dir.join(format!("0-{}", scope)); let connection = maybe!(async { smol::fs::create_dir_all(&main_db_dir) @@ -118,7 +112,7 @@ pub async fn open_test_db(db_name: &str) -> ThreadSafeConnection /// Implements a basic DB wrapper for a given domain #[macro_export] macro_rules! define_connection { - (pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>); impl ::std::ops::Deref for $t { @@ -139,18 +133,23 @@ macro_rules! define_connection { } } - use std::sync::LazyLock; #[cfg(any(test, feature = "test-support"))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { $t($crate::smol::block_on($crate::open_test_db(stringify!($id)))) }); #[cfg(not(any(test, feature = "test-support")))] - pub static $id: LazyLock<$t> = LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; - (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => { + (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => { pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>); impl ::std::ops::Deref for $t { @@ -178,7 +177,13 @@ macro_rules! define_connection { #[cfg(not(any(test, feature = "test-support")))] pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_db($crate::database_dir(), &$crate::RELEASE_CHANNEL))) + let db_dir = $crate::database_dir(); + let scope = if false $(|| stringify!($global) == "global")? { + "global" + } else { + $crate::RELEASE_CHANNEL.dev_name() + }; + $t($crate::smol::block_on($crate::open_db(db_dir, scope))) }); }; } @@ -225,7 +230,11 @@ mod tests { .prefix("DbTests") .tempdir() .unwrap(); - let _bad_db = open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let _bad_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; } /// Test that DB exists but corrupted (causing recreate) @@ -262,13 +271,19 @@ mod tests { .tempdir() .unwrap(); { - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } - let good_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let good_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() .unwrap() @@ -311,8 +326,11 @@ mod tests { .unwrap(); { // Setup the bad database - let corrupt_db = - open_db::(tempdir.path(), &release_channel::ReleaseChannel::Dev).await; + let corrupt_db = open_db::( + tempdir.path(), + &release_channel::ReleaseChannel::Dev.dev_name(), + ) + .await; assert!(corrupt_db.persistent()); } @@ -323,7 +341,7 @@ mod tests { let guard = thread::spawn(move || { let good_db = smol::block_on(open_db::( tmp_path.as_path(), - &release_channel::ReleaseChannel::Dev, + &release_channel::ReleaseChannel::Dev.dev_name(), )); assert!( good_db.select_row::("SELECT * FROM test2").unwrap()() diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs index 0b0cdd9aa1..c9d994d34d 100644 --- a/crates/db/src/kvp.rs +++ b/crates/db/src/kvp.rs @@ -60,3 +60,33 @@ mod tests { assert_eq!(db.read_kvp("key-1").unwrap(), None); } } + +define_connection!(pub static ref GLOBAL_KEY_VALUE_STORE: GlobalKeyValueStore<()> = + &[sql!( + CREATE TABLE IF NOT EXISTS kv_store( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ) STRICT; + )]; + global +); + +impl GlobalKeyValueStore { + query! { + pub fn read_kvp(key: &str) -> Result> { + SELECT value FROM kv_store WHERE key = (?) + } + } + + query! { + pub async fn write_kvp(key: String, value: String) -> Result<()> { + INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?)) + } + } + + query! { + pub async fn delete_kvp(key: String) -> Result<()> { + DELETE FROM kv_store WHERE key = (?) + } + } +} diff --git a/crates/feedback/src/feedback_modal.rs b/crates/feedback/src/feedback_modal.rs index 7369bcd853..a4a07ad2ad 100644 --- a/crates/feedback/src/feedback_modal.rs +++ b/crates/feedback/src/feedback_modal.rs @@ -44,8 +44,8 @@ const FEEDBACK_SUBMISSION_ERROR_TEXT: &str = struct FeedbackRequestBody<'a> { feedback_text: &'a str, email: Option, - metrics_id: Option>, installation_id: Option>, + metrics_id: Option>, system_specs: SystemSpecs, is_staff: bool, } @@ -296,16 +296,16 @@ impl FeedbackModal { } let telemetry = zed_client.telemetry(); - let metrics_id = telemetry.metrics_id(); let installation_id = telemetry.installation_id(); + let metrics_id = telemetry.metrics_id(); let is_staff = telemetry.is_staff(); let http_client = zed_client.http_client(); let feedback_endpoint = http_client.build_url("/api/feedback"); let request = FeedbackRequestBody { feedback_text, email, - metrics_id, installation_id, + metrics_id, system_specs, is_staff: is_staff.unwrap_or(false), }; diff --git a/crates/telemetry_events/src/telemetry_events.rs b/crates/telemetry_events/src/telemetry_events.rs index eb84322e83..d6e737b929 100644 --- a/crates/telemetry_events/src/telemetry_events.rs +++ b/crates/telemetry_events/src/telemetry_events.rs @@ -5,12 +5,14 @@ use std::{fmt::Display, sync::Arc, time::Duration}; #[derive(Serialize, Deserialize, Debug)] pub struct EventRequestBody { + /// Identifier unique to each system Zed is installed on + pub system_id: Option, /// Identifier unique to each Zed installation (differs for stable, preview, dev) pub installation_id: Option, /// Identifier unique to each logged in Zed user (randomly generated on first sign in) - pub metrics_id: Option, /// Identifier unique to each Zed session (differs for each time you open Zed) pub session_id: Option, + pub metrics_id: Option, /// True for Zed staff, otherwise false pub is_staff: Option, /// Zed version number @@ -34,6 +36,7 @@ pub struct EventWrapper { pub signed_in: bool, /// Duration between this event's timestamp and the timestamp of the first event in the current batch pub milliseconds_since_first_event: i64, + /// The event itself #[serde(flatten)] pub event: Event, } @@ -245,8 +248,11 @@ pub struct Panic { pub architecture: String, /// The time the panic occurred (UNIX millisecond timestamp) pub panicked_on: i64, + /// Identifier unique to each system Zed is installed on #[serde(skip_serializing_if = "Option::is_none")] + pub system_id: Option, /// Identifier unique to each Zed installation (differs for stable, preview, dev) + #[serde(skip_serializing_if = "Option::is_none")] pub installation_id: Option, /// Identifier unique to each Zed session (differs for each time you open Zed) pub session_id: String, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index d3a722ec65..c127a975a9 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -13,7 +13,7 @@ use clap::{command, Parser}; use cli::FORCE_CLI_MODE_ENV_VAR_NAME; use client::{parse_zed_link, Client, DevServerToken, ProxySettings, UserStore}; use collab_ui::channel_view::ChannelView; -use db::kvp::KEY_VALUE_STORE; +use db::kvp::{GLOBAL_KEY_VALUE_STORE, KEY_VALUE_STORE}; use editor::Editor; use env_logger::Builder; use fs::{Fs, RealFs}; @@ -334,19 +334,17 @@ fn main() { .with_assets(Assets) .with_http_client(IsahcHttpClient::new(None, None)); - let (installation_id, existing_installation_id_found) = app - .background_executor() - .block(installation_id()) - .ok() - .unzip(); - + let system_id = app.background_executor().block(system_id()).ok(); + let installation_id = app.background_executor().block(installation_id()).ok(); + let session_id = Uuid::new_v4().to_string(); let session = app.background_executor().block(Session::new()); - let app_version = AppVersion::init(env!("CARGO_PKG_VERSION")); + reliability::init_panic_hook( - installation_id.clone(), app_version, - session.id().to_owned(), + system_id.as_ref().map(|id| id.to_string()), + installation_id.as_ref().map(|id| id.to_string()), + session_id.clone(), ); let (open_listener, mut open_rx) = OpenListener::new(); @@ -491,14 +489,26 @@ fn main() { client::init(&client, cx); language::init(cx); let telemetry = client.telemetry(); - telemetry.start(installation_id.clone(), session.id().to_owned(), cx); - telemetry.report_app_event( - match existing_installation_id_found { - Some(false) => "first open", - _ => "open", - } - .to_string(), + telemetry.start( + system_id.as_ref().map(|id| id.to_string()), + installation_id.as_ref().map(|id| id.to_string()), + session_id, + cx, ); + if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) { + match (&system_id, &installation_id) { + (IdType::New(_), IdType::New(_)) => { + telemetry.report_app_event("first open".to_string()); + telemetry.report_app_event("first open for release channel".to_string()); + } + (IdType::Existing(_), IdType::New(_)) => { + telemetry.report_app_event("first open for release channel".to_string()); + } + (_, IdType::Existing(_)) => { + telemetry.report_app_event("open".to_string()); + } + } + } let app_session = cx.new_model(|cx| AppSession::new(session, cx)); let app_state = Arc::new(AppState { @@ -514,7 +524,11 @@ fn main() { AppState::set_global(Arc::downgrade(&app_state), cx); auto_update::init(client.http_client(), cx); - reliability::init(client.http_client(), installation_id, cx); + reliability::init( + client.http_client(), + installation_id.clone().map(|id| id.to_string()), + cx, + ); let prompt_builder = init_common(app_state.clone(), cx); let args = Args::parse(); @@ -755,7 +769,23 @@ async fn authenticate(client: Arc, cx: &AsyncAppContext) -> Result<()> { Ok::<_, anyhow::Error>(()) } -async fn installation_id() -> Result<(String, bool)> { +async fn system_id() -> Result { + let key_name = "system_id".to_string(); + + if let Ok(Some(system_id)) = GLOBAL_KEY_VALUE_STORE.read_kvp(&key_name) { + return Ok(IdType::Existing(system_id)); + } + + let system_id = Uuid::new_v4().to_string(); + + GLOBAL_KEY_VALUE_STORE + .write_kvp(key_name, system_id.clone()) + .await?; + + Ok(IdType::New(system_id)) +} + +async fn installation_id() -> Result { let legacy_key_name = "device_id".to_string(); let key_name = "installation_id".to_string(); @@ -765,11 +795,11 @@ async fn installation_id() -> Result<(String, bool)> { .write_kvp(key_name, installation_id.clone()) .await?; KEY_VALUE_STORE.delete_kvp(legacy_key_name).await?; - return Ok((installation_id, true)); + return Ok(IdType::Existing(installation_id)); } if let Ok(Some(installation_id)) = KEY_VALUE_STORE.read_kvp(&key_name) { - return Ok((installation_id, true)); + return Ok(IdType::Existing(installation_id)); } let installation_id = Uuid::new_v4().to_string(); @@ -778,7 +808,7 @@ async fn installation_id() -> Result<(String, bool)> { .write_kvp(key_name, installation_id.clone()) .await?; - Ok((installation_id, false)) + Ok(IdType::New(installation_id)) } async fn restore_or_create_workspace( @@ -1087,6 +1117,20 @@ struct Args { dev_server_token: Option, } +#[derive(Clone, Debug)] +enum IdType { + New(String), + Existing(String), +} + +impl ToString for IdType { + fn to_string(&self) -> String { + match self { + IdType::New(id) | IdType::Existing(id) => id.clone(), + } + } +} + fn parse_url_arg(arg: &str, cx: &AppContext) -> Result { match std::fs::canonicalize(Path::new(&arg)) { Ok(path) => Ok(format!( diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 188cf417f7..9e811d7c9a 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -28,8 +28,9 @@ use crate::stdout_is_a_pty; static PANIC_COUNT: AtomicU32 = AtomicU32::new(0); pub fn init_panic_hook( - installation_id: Option, app_version: SemanticVersion, + system_id: Option, + installation_id: Option, session_id: String, ) { let is_pty = stdout_is_a_pty(); @@ -102,6 +103,7 @@ pub fn init_panic_hook( architecture: env::consts::ARCH.into(), panicked_on: Utc::now().timestamp_millis(), backtrace, + system_id: system_id.clone(), installation_id: installation_id.clone(), session_id: session_id.clone(), }; From 1723713dc292074e558935b9f5c81eac7938c396 Mon Sep 17 00:00:00 2001 From: thataboy Date: Thu, 19 Sep 2024 04:43:49 -0700 Subject: [PATCH 23/96] Add ability to copy assistant code block to clipboard or insert into editor, without manual selection (#17853) Some notes: - You can put the cursor on the start or end line with triple backticks, it doesn't actually have to be inside the block. - Placing the cursor outside of a code block does nothing. - Code blocks are determined by counting triple backticks pairs from either start or end of buffer, and nothing else. - If you manually select something, the selection takes precedence over any code blocks. Release Notes: - Added the ability to copy surrounding code blocks in the assistant panel into the clipboard, or inserting them directly into the editor, without manually selecting. Place cursor anywhere in a code block (marked by triple backticks) and use the `assistant::CopyCode` action (`cmd-k c` / `ctrl-k c`) to copy to the clipboard, or the `assistant::InsertIntoEditor` action (`cmd-<` / `ctrl-<`) to insert into editor. --------- Co-authored-by: Thorsten Ball Co-authored-by: Bennet --- Cargo.lock | 2 + assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + crates/assistant/Cargo.toml | 2 + crates/assistant/src/assistant.rs | 1 + crates/assistant/src/assistant_panel.rs | 218 ++++++++++++++++++++++-- 6 files changed, 207 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 652c584fd5..a37a5350f5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -402,6 +402,7 @@ dependencies = [ "indoc", "language", "language_model", + "languages", "log", "markdown", "menu", @@ -436,6 +437,7 @@ dependencies = [ "text", "theme", "toml 0.8.19", + "tree-sitter-md", "ui", "unindent", "util", diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 02fc6d8e04..542f6c2df4 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -166,6 +166,7 @@ { "context": "AssistantPanel", "bindings": { + "ctrl-k c": "assistant::CopyCode", "ctrl-g": "search::SelectNextMatch", "ctrl-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 33536cc9ff..77fac3254b 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -188,6 +188,7 @@ { "context": "AssistantPanel", "bindings": { + "cmd-k c": "assistant::CopyCode", "cmd-g": "search::SelectNextMatch", "cmd-shift-g": "search::SelectPrevMatch", "alt-m": "assistant::ToggleModelSelector", diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index b700702062..9f715d8224 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -94,9 +94,11 @@ editor = { workspace = true, features = ["test-support"] } env_logger.workspace = true language = { workspace = true, features = ["test-support"] } language_model = { workspace = true, features = ["test-support"] } +languages = { workspace = true, features = ["test-support"] } log.workspace = true project = { workspace = true, features = ["test-support"] } rand.workspace = true serde_json_lenient.workspace = true text = { workspace = true, features = ["test-support"] } +tree-sitter-md.workspace = true unindent.workspace = true diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index af7f03ebb3..d7466878c9 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -58,6 +58,7 @@ actions!( [ Assist, Split, + CopyCode, CycleMessageRole, QuoteSelection, InsertIntoEditor, diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 5d06720fe0..094d187df2 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -12,11 +12,11 @@ use crate::{ slash_command_picker, terminal_inline_assistant::TerminalInlineAssistant, Assist, CacheStatus, ConfirmCommand, Content, Context, ContextEvent, ContextId, ContextStore, - ContextStoreEvent, CycleMessageRole, DeployHistory, DeployPromptLibrary, InlineAssistId, - InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, MessageMetadata, - MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, PendingSlashCommand, - PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, SavedContextMetadata, Split, - ToggleFocus, ToggleModelSelector, WorkflowStepResolution, + ContextStoreEvent, CopyCode, CycleMessageRole, DeployHistory, DeployPromptLibrary, + InlineAssistId, InlineAssistant, InsertDraggedFiles, InsertIntoEditor, Message, MessageId, + MessageMetadata, MessageStatus, ModelPickerDelegate, ModelSelector, NewContext, + PendingSlashCommand, PendingSlashCommandStatus, QuoteSelection, RemoteContextMetadata, + SavedContextMetadata, Split, ToggleFocus, ToggleModelSelector, WorkflowStepResolution, }; use anyhow::{anyhow, Result}; use assistant_slash_command::{SlashCommand, SlashCommandOutputSection}; @@ -45,7 +45,8 @@ use gpui::{ }; use indexed_docs::IndexedDocsStore; use language::{ - language_settings::SoftWrap, Capability, LanguageRegistry, LspAdapterDelegate, Point, ToOffset, + language_settings::SoftWrap, BufferSnapshot, Capability, LanguageRegistry, LspAdapterDelegate, + ToOffset, }; use language_model::{ provider::cloud::PROVIDER_ID, LanguageModelProvider, LanguageModelProviderId, @@ -56,6 +57,7 @@ use multi_buffer::MultiBufferRow; use picker::{Picker, PickerDelegate}; use project::lsp_store::LocalLspAdapterDelegate; use project::{Project, Worktree}; +use rope::Point; use search::{buffer_search::DivRegistrar, BufferSearchBar}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings}; @@ -81,9 +83,10 @@ use util::{maybe, ResultExt}; use workspace::{ dock::{DockPosition, Panel, PanelEvent}, item::{self, FollowableItem, Item, ItemHandle}, + notifications::NotificationId, pane::{self, SaveIntent}, searchable::{SearchEvent, SearchableItem}, - DraggedSelection, Pane, Save, ShowConfiguration, ToggleZoom, ToolbarItemEvent, + DraggedSelection, Pane, Save, ShowConfiguration, Toast, ToggleZoom, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, }; use workspace::{searchable::SearchableItemHandle, DraggedTab}; @@ -105,6 +108,7 @@ pub fn init(cx: &mut AppContext) { .register_action(AssistantPanel::inline_assist) .register_action(ContextEditor::quote_selection) .register_action(ContextEditor::insert_selection) + .register_action(ContextEditor::copy_code) .register_action(ContextEditor::insert_dragged_files) .register_action(AssistantPanel::show_configuration) .register_action(AssistantPanel::create_new_context); @@ -3100,6 +3104,40 @@ impl ContextEditor { }); } + /// Returns either the selected text, or the content of the Markdown code + /// block surrounding the cursor. + fn get_selection_or_code_block( + context_editor_view: &View, + cx: &mut ViewContext, + ) -> Option<(String, bool)> { + let context_editor = context_editor_view.read(cx).editor.read(cx); + + if context_editor.selections.newest::(cx).is_empty() { + let snapshot = context_editor.buffer().read(cx).snapshot(cx); + let (_, _, snapshot) = snapshot.as_singleton()?; + + let head = context_editor.selections.newest::(cx).head(); + let offset = snapshot.point_to_offset(head); + + let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; + let text = snapshot + .text_for_range(surrounding_code_block_range) + .collect::(); + + (!text.is_empty()).then_some((text, true)) + } else { + let anchor = context_editor.selections.newest_anchor(); + let text = context_editor + .buffer() + .read(cx) + .read(cx) + .text_for_range(anchor.range()) + .collect::(); + + (!text.is_empty()).then_some((text, false)) + } + } + fn insert_selection( workspace: &mut Workspace, _: &InsertIntoEditor, @@ -3118,17 +3156,7 @@ impl ContextEditor { return; }; - let context_editor = context_editor_view.read(cx).editor.read(cx); - let anchor = context_editor.selections.newest_anchor(); - let text = context_editor - .buffer() - .read(cx) - .read(cx) - .text_for_range(anchor.range()) - .collect::(); - - // If nothing is selected, don't delete the current selection; instead, be a no-op. - if !text.is_empty() { + if let Some((text, _)) = Self::get_selection_or_code_block(&context_editor_view, cx) { active_editor_view.update(cx, |editor, cx| { editor.insert(&text, cx); editor.focus(cx); @@ -3136,6 +3164,36 @@ impl ContextEditor { } } + fn copy_code(workspace: &mut Workspace, _: &CopyCode, cx: &mut ViewContext) { + let result = maybe!({ + let panel = workspace.panel::(cx)?; + let context_editor_view = panel.read(cx).active_context_editor(cx)?; + Self::get_selection_or_code_block(&context_editor_view, cx) + }); + let Some((text, is_code_block)) = result else { + return; + }; + + cx.write_to_clipboard(ClipboardItem::new_string(text)); + + struct CopyToClipboardToast; + workspace.show_toast( + Toast::new( + NotificationId::unique::(), + format!( + "{} copied to clipboard.", + if is_code_block { + "Code block" + } else { + "Selection" + } + ), + ) + .autohide(), + cx, + ); + } + fn insert_dragged_files( workspace: &mut Workspace, action: &InsertDraggedFiles, @@ -4215,6 +4273,48 @@ impl ContextEditor { } } +/// Returns the contents of the *outermost* fenced code block that contains the given offset. +fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Option> { + const CODE_BLOCK_NODE: &'static str = "fenced_code_block"; + const CODE_BLOCK_CONTENT: &'static str = "code_fence_content"; + + let layer = snapshot.syntax_layers().next()?; + + let root_node = layer.node(); + let mut cursor = root_node.walk(); + + // Go to the first child for the given offset + while cursor.goto_first_child_for_byte(offset).is_some() { + // If we're at the end of the node, go to the next one. + // Example: if you have a fenced-code-block, and you're on the start of the line + // right after the closing ```, you want to skip the fenced-code-block and + // go to the next sibling. + if cursor.node().end_byte() == offset { + cursor.goto_next_sibling(); + } + + if cursor.node().start_byte() > offset { + break; + } + + // We found the fenced code block. + if cursor.node().kind() == CODE_BLOCK_NODE { + // Now we need to find the child node that contains the code. + cursor.goto_first_child(); + loop { + if cursor.node().kind() == CODE_BLOCK_CONTENT { + return Some(cursor.node().byte_range()); + } + if !cursor.goto_next_sibling() { + break; + } + } + } + } + + None +} + fn render_fold_icon_button( editor: WeakView, icon: IconName, @@ -5497,3 +5597,85 @@ fn configuration_error(cx: &AppContext) -> Option { None } + +#[cfg(test)] +mod tests { + use super::*; + use gpui::{AppContext, Context}; + use language::Buffer; + use unindent::Unindent; + + #[gpui::test] + fn test_find_code_blocks(cx: &mut AppContext) { + let markdown = languages::language("markdown", tree_sitter_md::LANGUAGE.into()); + + let buffer = cx.new_model(|cx| { + let text = r#" + line 0 + line 1 + ```rust + fn main() {} + ``` + line 5 + line 6 + line 7 + ```go + func main() {} + ``` + line 11 + ``` + this is plain text code block + ``` + + ```go + func another() {} + ``` + line 19 + "# + .unindent(); + let mut buffer = Buffer::local(text, cx); + buffer.set_language(Some(markdown.clone()), cx); + buffer + }); + let snapshot = buffer.read(cx).snapshot(); + + let code_blocks = vec![ + Point::new(3, 0)..Point::new(4, 0), + Point::new(9, 0)..Point::new(10, 0), + Point::new(13, 0)..Point::new(14, 0), + Point::new(17, 0)..Point::new(18, 0), + ] + .into_iter() + .map(|range| snapshot.point_to_offset(range.start)..snapshot.point_to_offset(range.end)) + .collect::>(); + + let expected_results = vec![ + (0, None), + (1, None), + (2, Some(code_blocks[0].clone())), + (3, Some(code_blocks[0].clone())), + (4, Some(code_blocks[0].clone())), + (5, None), + (6, None), + (7, None), + (8, Some(code_blocks[1].clone())), + (9, Some(code_blocks[1].clone())), + (10, Some(code_blocks[1].clone())), + (11, None), + (12, Some(code_blocks[2].clone())), + (13, Some(code_blocks[2].clone())), + (14, Some(code_blocks[2].clone())), + (15, None), + (16, Some(code_blocks[3].clone())), + (17, Some(code_blocks[3].clone())), + (18, Some(code_blocks[3].clone())), + (19, None), + ]; + + for (row, expected) in expected_results { + let offset = snapshot.point_to_offset(Point::new(row, 0)); + let range = find_surrounding_code_block(&snapshot, offset); + assert_eq!(range, expected, "unexpected result on row {:?}", row); + } + } +} From 23e1faa48524f55c5a9a2c7be084d730667abecb Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 19 Sep 2024 14:43:56 +0200 Subject: [PATCH 24/96] assistant panel: Fix copying code when trailing newline is missing (#18067) Follow-up to #17853. Apparently tree-sitter-md extends the range of the content node to include the backticks when there is no newline. Release Notes: - N/A Co-authored-by: Bennet --- crates/assistant/src/assistant_panel.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 094d187df2..da176ebeee 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3110,6 +3110,8 @@ impl ContextEditor { context_editor_view: &View, cx: &mut ViewContext, ) -> Option<(String, bool)> { + const CODE_FENCE_DELIMITER: &'static str = "```"; + let context_editor = context_editor_view.read(cx).editor.read(cx); if context_editor.selections.newest::(cx).is_empty() { @@ -3120,10 +3122,17 @@ impl ContextEditor { let offset = snapshot.point_to_offset(head); let surrounding_code_block_range = find_surrounding_code_block(snapshot, offset)?; - let text = snapshot + let mut text = snapshot .text_for_range(surrounding_code_block_range) .collect::(); + // If there is no newline trailing the closing three-backticks, then + // tree-sitter-md extends the range of the content node to include + // the backticks. + if text.ends_with(CODE_FENCE_DELIMITER) { + text.drain((text.len() - CODE_FENCE_DELIMITER.len())..); + } + (!text.is_empty()).then_some((text, true)) } else { let anchor = context_editor.selections.newest_anchor(); From 4338ff6be496edcdd86d5b97284f3a5ba9e140c2 Mon Sep 17 00:00:00 2001 From: Casey Watson Date: Thu, 19 Sep 2024 07:01:28 -0600 Subject: [PATCH 25/96] terminal: Add ability to open file from Git diff (#17446) - strip "a/" and "b/" prefix for potential paths. Release Notes: - Allow clicking on filepaths when using `git diff` inside the built-in terminal --- crates/terminal_view/src/terminal_view.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index f19bfa7010..e0b92035d1 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -58,6 +58,8 @@ const REGEX_SPECIAL_CHARS: &[char] = &[ const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); +const GIT_DIFF_PATH_PREFIXES: &[char] = &['a', 'b']; + ///Event to transmit the scroll from the element to the view #[derive(Clone, Debug, PartialEq)] pub struct ScrollTerminal(pub i32); @@ -826,6 +828,19 @@ fn possible_open_targets( { potential_cwd_and_workspace_paths.insert(potential_worktree_path); } + + for prefix in GIT_DIFF_PATH_PREFIXES { + let prefix_str = &prefix.to_string(); + if maybe_path.starts_with(prefix_str) { + let stripped = maybe_path.strip_prefix(prefix_str).unwrap_or(&maybe_path); + for potential_worktree_path in workspace + .worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path().join(&stripped)) + { + potential_cwd_and_workspace_paths.insert(potential_worktree_path); + } + } + } }); } From 3d5c023fdae99907cf7cf9e67f7ae20bd7bd080c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 09:55:51 -0400 Subject: [PATCH 26/96] ci: Move collab deploys back to DigitalOcean runners (#18071) This PR moves the collab deployment steps in CI back to the DigitalOcean runners temporarily, so that we can deploy collab. Release Notes: - N/A --- .github/workflows/deploy_collab.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 7abd52e5a6..6801be2a54 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -61,7 +61,8 @@ jobs: - style - tests runs-on: - - buildjet-16vcpu-ubuntu-2204 + - self-hosted + - deploy steps: - name: Add Rust to the PATH run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH @@ -88,7 +89,8 @@ jobs: needs: - publish runs-on: - - buildjet-16vcpu-ubuntu-2204 + - self-hosted + - deploy steps: - name: Sign into Kubernetes From d91e62524f7c6437349426687ded6d1182ad7346 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Thu, 19 Sep 2024 22:41:42 +0800 Subject: [PATCH 27/96] assistant: Fix offset calculation not in char boundary (#18069) Closes #17825 Release Notes: - N/A --- crates/assistant/src/prompts.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 83e894f797..ae2ab4787e 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -220,7 +220,8 @@ impl PromptBuilder { let before_range = 0..range.start; let truncated_before = if before_range.len() > MAX_CTX { is_truncated = true; - range.start - MAX_CTX..range.start + let start = buffer.clip_offset(range.start - MAX_CTX, text::Bias::Right); + start..range.start } else { before_range }; @@ -228,7 +229,8 @@ impl PromptBuilder { let after_range = range.end..buffer.len(); let truncated_after = if after_range.len() > MAX_CTX { is_truncated = true; - range.end..range.end + MAX_CTX + let end = buffer.clip_offset(range.end + MAX_CTX, text::Bias::Left); + range.end..end } else { after_range }; From d2894ce9c99586e9cd0588fa9b4db27bbd64f0ca Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Thu, 19 Sep 2024 17:00:26 +0200 Subject: [PATCH 28/96] pane: Do not autopin new item created as a neighbour of pinned tab (#18072) When I used editor::NewFile or ProjectSearch from a pinned tab, the resulting new tab would be pinned (and the last pinned tab would be pushed off). This PR fixes it by always storing new tabs outside of the pinned area if there's no destination index for the new tab. Release Notes: - Fixed tab bar not preserving pinned tab state when an editor::NewFile action is executed. --- crates/workspace/src/pane.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 09b4683c0c..a5f83f961f 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -831,13 +831,14 @@ impl Pane { } } } - // If no destination index is specified, add or move the item after the active item. + // If no destination index is specified, add or move the item after the + // active item (or at the start of tab bar, if the active item is pinned) let mut insertion_index = { cmp::min( if let Some(destination_index) = destination_index { destination_index } else { - self.active_item_index + 1 + cmp::max(self.active_item_index + 1, self.pinned_count()) }, self.items.len(), ) From a944bb2f24bea7e492ced79fe0e92a7205d6f42e Mon Sep 17 00:00:00 2001 From: Joseph T Lyons Date: Thu, 19 Sep 2024 11:02:44 -0400 Subject: [PATCH 29/96] v0.155.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a37a5350f5..ca5d68881f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14375,7 +14375,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.154.0" +version = "0.155.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 645d12fc76..ad2e7cd48c 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.154.0" +version = "0.155.0" publish = false license = "GPL-3.0-or-later" authors = ["Zed Team "] From 7d0a7541bfd1ca44a7511ec077067902b0e461ef Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 11:45:06 -0400 Subject: [PATCH 30/96] ci: Fix collab deploys (#18077) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR fixes issues with deploying collab. We reverted 4882a75971abafa89467e779466749086d7d3f96—as the DigitalOcean runners are gone now—and moved back to BuildJet. We needed to make some changes to the deployment jobs to setup `doctl`. This PR also adds an automatic bump of the `collab-staging` tag on merges to `main`. This should help catch issues with collab deploys earlier. Release Notes: - N/A --------- Co-authored-by: Conrad --- .github/workflows/bump_collab_staging.yml | 23 +++++++++++++++++++++++ .github/workflows/deploy_collab.yml | 23 ++++++++++++++++------- 2 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 .github/workflows/bump_collab_staging.yml diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml new file mode 100644 index 0000000000..89cc7c4848 --- /dev/null +++ b/.github/workflows/bump_collab_staging.yml @@ -0,0 +1,23 @@ +name: Bump collab-staging Tag + +on: + push: + branches: + - main + +jobs: + update-collab-staging-tag: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + fetch-depth: 0 + + - name: Update collab-staging tag + run: | + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f collab-staging + git push origin collab-staging --force diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 6801be2a54..c4193adcd2 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -8,7 +8,6 @@ on: env: DOCKER_BUILDKIT: 1 - DIGITALOCEAN_ACCESS_TOKEN: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} jobs: style: @@ -61,11 +60,12 @@ jobs: - style - tests runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} - name: Sign into DigitalOcean docker registry run: doctl registry login @@ -89,10 +89,19 @@ jobs: needs: - publish runs-on: - - self-hosted - - deploy + - buildjet-16vcpu-ubuntu-2204 steps: + - name: Checkout repo + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + clean: false + + - name: Install doctl + uses: digitalocean/action-doctl@v2 + with: + token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }} + - name: Sign into Kubernetes run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }} From e9f2e72ff03c60f8a9a1ce9612cc51a368813cf0 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Thu, 19 Sep 2024 17:51:28 +0200 Subject: [PATCH 31/96] Workspace persistence for SSH projects (#17996) TODOs: - [x] Add tests to `workspace/src/persistence.rs` - [x] Add a icon for ssh projects - [x] Fix all `TODO` comments - [x] Use `port` if it's passed in the ssh connection options In next PRs: - Make sure unsaved buffers are persisted/restored, along with other items/layout - Handle multiple paths/worktrees correctly Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- Cargo.lock | 1 + crates/recent_projects/src/dev_servers.rs | 7 +- crates/recent_projects/src/recent_projects.rs | 108 +++-- crates/recent_projects/src/ssh_connections.rs | 67 ++-- crates/remote/src/ssh_session.rs | 5 + crates/sqlez/src/bindable.rs | 16 + crates/sqlez/src/typed_statements.rs | 2 +- crates/workspace/Cargo.toml | 1 + crates/workspace/src/persistence.rs | 374 +++++++++++++++--- crates/workspace/src/persistence/model.rs | 66 +++- crates/workspace/src/workspace.rs | 80 +++- crates/zed/src/main.rs | 6 +- 12 files changed, 592 insertions(+), 141 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ca5d68881f..16ee627d2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14096,6 +14096,7 @@ dependencies = [ "parking_lot", "postage", "project", + "remote", "schemars", "serde", "serde_json", diff --git a/crates/recent_projects/src/dev_servers.rs b/crates/recent_projects/src/dev_servers.rs index 491f378f30..af5f51f14f 100644 --- a/crates/recent_projects/src/dev_servers.rs +++ b/crates/recent_projects/src/dev_servers.rs @@ -39,7 +39,6 @@ use ui::{ RadioWithLabel, Tooltip, }; use ui_input::{FieldLabelLayout, TextField}; -use util::paths::PathWithPosition; use util::ResultExt; use workspace::notifications::NotifyResultExt; use workspace::OpenOptions; @@ -987,11 +986,7 @@ impl DevServerProjects { cx.spawn(|_, mut cx| async move { let result = open_ssh_project( server.into(), - project - .paths - .into_iter() - .map(|path| PathWithPosition::from_path(PathBuf::from(path))) - .collect(), + project.paths.into_iter().map(PathBuf::from).collect(), app_state, OpenOptions::default(), &mut cx, diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 182cec4614..cb3d3ab659 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -2,6 +2,7 @@ mod dev_servers; pub mod disconnected_overlay; mod ssh_connections; mod ssh_remotes; +use remote::SshConnectionOptions; pub use ssh_connections::open_ssh_project; use client::{DevServerProjectId, ProjectId}; @@ -32,8 +33,8 @@ use ui::{ }; use util::{paths::PathExt, ResultExt}; use workspace::{ - AppState, CloseIntent, ModalView, SerializedWorkspaceLocation, Workspace, WorkspaceId, - WORKSPACE_DB, + AppState, CloseIntent, ModalView, OpenOptions, SerializedWorkspaceLocation, Workspace, + WorkspaceId, WORKSPACE_DB, }; #[derive(PartialEq, Clone, Deserialize, Default)] @@ -172,7 +173,7 @@ pub struct RecentProjectsDelegate { create_new_window: bool, // Flag to reset index when there is a new query vs not reset index when user delete an item reset_selected_match_index: bool, - has_any_dev_server_projects: bool, + has_any_non_local_projects: bool, } impl RecentProjectsDelegate { @@ -185,16 +186,16 @@ impl RecentProjectsDelegate { create_new_window, render_paths, reset_selected_match_index: true, - has_any_dev_server_projects: false, + has_any_non_local_projects: false, } } pub fn set_workspaces(&mut self, workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation)>) { self.workspaces = workspaces; - self.has_any_dev_server_projects = self + self.has_any_non_local_projects = !self .workspaces .iter() - .any(|(_, location)| matches!(location, SerializedWorkspaceLocation::DevServer(_))); + .all(|(_, location)| matches!(location, SerializedWorkspaceLocation::Local(_, _))); } } impl EventEmitter for RecentProjectsDelegate {} @@ -258,6 +259,23 @@ impl PickerDelegate for RecentProjectsDelegate { dev_server_project.paths.join("") ) } + SerializedWorkspaceLocation::Ssh(ssh_project) => { + format!( + "{}{}{}{}", + ssh_project.host, + ssh_project + .port + .as_ref() + .map(|port| port.to_string()) + .unwrap_or_default(), + ssh_project.path, + ssh_project + .user + .as_ref() + .map(|user| user.to_string()) + .unwrap_or_default() + ) + } }; StringMatchCandidate::new(id, combined_string) @@ -364,6 +382,33 @@ impl PickerDelegate for RecentProjectsDelegate { }; open_dev_server_project(replace_current_window, dev_server_project.id, project_id, cx) } + SerializedWorkspaceLocation::Ssh(ssh_project) => { + let app_state = workspace.app_state().clone(); + + let replace_window = if replace_current_window { + cx.window_handle().downcast::() + } else { + None + }; + + let open_options = OpenOptions { + replace_window, + ..Default::default() + }; + + let connection_options = SshConnectionOptions { + host: ssh_project.host.clone(), + username: ssh_project.user.clone(), + port: ssh_project.port, + password: None, + }; + + let paths = vec![PathBuf::from(ssh_project.path.clone())]; + + cx.spawn(|_, mut cx| async move { + open_ssh_project(connection_options, paths, app_state, open_options, &mut cx).await + }) + } } } }) @@ -392,7 +437,6 @@ impl PickerDelegate for RecentProjectsDelegate { let (_, location) = self.workspaces.get(hit.candidate_id)?; - let is_remote = matches!(location, SerializedWorkspaceLocation::DevServer(_)); let dev_server_status = if let SerializedWorkspaceLocation::DevServer(dev_server_project) = location { let store = dev_server_projects::Store::global(cx).read(cx); @@ -416,6 +460,9 @@ impl PickerDelegate for RecentProjectsDelegate { .filter_map(|i| paths.paths().get(*i).cloned()) .collect(), ), + SerializedWorkspaceLocation::Ssh(ssh_project) => { + Arc::new(vec![PathBuf::from(ssh_project.ssh_url())]) + } SerializedWorkspaceLocation::DevServer(dev_server_project) => { Arc::new(vec![PathBuf::from(format!( "{}:{}", @@ -457,29 +504,34 @@ impl PickerDelegate for RecentProjectsDelegate { h_flex() .flex_grow() .gap_3() - .when(self.has_any_dev_server_projects, |this| { - this.child(if is_remote { - // if disabled, Color::Disabled - let indicator_color = match dev_server_status { - Some(DevServerStatus::Online) => Color::Created, - Some(DevServerStatus::Offline) => Color::Hidden, - _ => unreachable!(), - }; - IconWithIndicator::new( - Icon::new(IconName::Server).color(Color::Muted), - Some(Indicator::dot()), - ) - .indicator_color(indicator_color) - .indicator_border_color(if selected { - Some(cx.theme().colors().element_selected) - } else { - None - }) - .into_any_element() - } else { - Icon::new(IconName::Screen) + .when(self.has_any_non_local_projects, |this| { + this.child(match location { + SerializedWorkspaceLocation::Local(_, _) => { + Icon::new(IconName::Screen) + .color(Color::Muted) + .into_any_element() + } + SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Screen) .color(Color::Muted) + .into_any_element(), + SerializedWorkspaceLocation::DevServer(_) => { + let indicator_color = match dev_server_status { + Some(DevServerStatus::Online) => Color::Created, + Some(DevServerStatus::Offline) => Color::Hidden, + _ => unreachable!(), + }; + IconWithIndicator::new( + Icon::new(IconName::Server).color(Color::Muted), + Some(Indicator::dot()), + ) + .indicator_color(indicator_color) + .indicator_border_color(if selected { + Some(cx.theme().colors().element_selected) + } else { + None + }) .into_any_element() + } }) }) .child({ diff --git a/crates/recent_projects/src/ssh_connections.rs b/crates/recent_projects/src/ssh_connections.rs index 8da4284b7f..ad23a5c896 100644 --- a/crates/recent_projects/src/ssh_connections.rs +++ b/crates/recent_projects/src/ssh_connections.rs @@ -19,7 +19,6 @@ use ui::{ h_flex, v_flex, FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon, Styled, StyledExt as _, ViewContext, VisualContext, WindowContext, }; -use util::paths::PathWithPosition; use workspace::{AppState, ModalView, Workspace}; #[derive(Deserialize)] @@ -358,24 +357,29 @@ pub fn connect_over_ssh( pub async fn open_ssh_project( connection_options: SshConnectionOptions, - paths: Vec, + paths: Vec, app_state: Arc, - _open_options: workspace::OpenOptions, + open_options: workspace::OpenOptions, cx: &mut AsyncAppContext, ) -> Result<()> { let options = cx.update(|cx| (app_state.build_window_options)(None, cx))?; - let window = cx.open_window(options, |cx| { - let project = project::Project::local( - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - None, - cx, - ); - cx.new_view(|cx| Workspace::new(None, project, app_state.clone(), cx)) - })?; + + let window = if let Some(window) = open_options.replace_window { + window + } else { + cx.open_window(options, |cx| { + let project = project::Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + cx, + ); + cx.new_view(|cx| Workspace::new(None, project, app_state.clone(), cx)) + })? + }; let result = window .update(cx, |workspace, cx| { @@ -387,40 +391,17 @@ pub async fn open_ssh_project( .read(cx) .prompt .clone(); - connect_over_ssh(connection_options, ui, cx) + connect_over_ssh(connection_options.clone(), ui, cx) })? .await; if result.is_err() { window.update(cx, |_, cx| cx.remove_window()).ok(); } - let session = result?; - let project = cx.update(|cx| { - project::Project::ssh( - session, - app_state.client.clone(), - app_state.node_runtime.clone(), - app_state.user_store.clone(), - app_state.languages.clone(), - app_state.fs.clone(), - cx, - ) - })?; - - for path in paths { - project - .update(cx, |project, cx| { - project.find_or_create_worktree(&path.path, true, cx) - })? - .await?; - } - - window.update(cx, |_, cx| { - cx.replace_root_view(|cx| Workspace::new(None, project, app_state, cx)) - })?; - window.update(cx, |_, cx| cx.activate_window())?; - - Ok(()) + cx.update(|cx| { + workspace::open_ssh_project(window, connection_options, session, app_state, paths, cx) + })? + .await } diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 7556b38f3e..4aab731e64 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -33,6 +33,11 @@ use std::{ }; use tempfile::TempDir; +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, +)] +pub struct SshProjectId(pub u64); + #[derive(Clone)] pub struct SshSocket { connection_options: SshConnectionOptions, diff --git a/crates/sqlez/src/bindable.rs b/crates/sqlez/src/bindable.rs index e8b9679936..8cf4329f92 100644 --- a/crates/sqlez/src/bindable.rs +++ b/crates/sqlez/src/bindable.rs @@ -196,6 +196,22 @@ impl Column for u32 { } } +impl StaticColumnCount for u16 {} +impl Bind for u16 { + fn bind(&self, statement: &Statement, start_index: i32) -> Result { + (*self as i64) + .bind(statement, start_index) + .with_context(|| format!("Failed to bind usize at index {start_index}")) + } +} + +impl Column for u16 { + fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { + let result = statement.column_int64(start_index)?; + Ok((result as u16, start_index + 1)) + } +} + impl StaticColumnCount for usize {} impl Bind for usize { fn bind(&self, statement: &Statement, start_index: i32) -> Result { diff --git a/crates/sqlez/src/typed_statements.rs b/crates/sqlez/src/typed_statements.rs index d7f25cde51..95f4f829ec 100644 --- a/crates/sqlez/src/typed_statements.rs +++ b/crates/sqlez/src/typed_statements.rs @@ -74,7 +74,7 @@ impl Connection { } /// Prepare a statement which takes a binding and selects a single row - /// from the database. WIll return none if no rows are returned and will + /// from the database. Will return none if no rows are returned and will /// error if more than 1 row is returned. /// /// Note: If there are multiple statements that depend upon each other diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 7f5c1ccce8..1b998eeabe 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -51,6 +51,7 @@ postage.workspace = true project.workspace = true dev_server_projects.workspace = true task.workspace = true +remote.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index 88ede4228d..034328a30b 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -7,6 +7,7 @@ use client::DevServerProjectId; use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; use gpui::{point, size, Axis, Bounds, WindowBounds, WindowId}; +use remote::ssh_session::SshProjectId; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, @@ -20,7 +21,7 @@ use crate::WorkspaceId; use model::{ GroupId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup, - SerializedWorkspace, + SerializedSshProject, SerializedWorkspace, }; use self::model::{ @@ -354,7 +355,17 @@ define_connection! { ), sql!( ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0; - ) + ), + sql!( + CREATE TABLE ssh_projects ( + id INTEGER PRIMARY KEY, + host TEXT NOT NULL, + port INTEGER, + path TEXT NOT NULL, + user TEXT + ); + ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE; + ), ]; } @@ -374,7 +385,6 @@ impl WorkspaceDb { workspace_id, local_paths, local_paths_order, - dev_server_project_id, window_bounds, display, centered_layout, @@ -384,7 +394,6 @@ impl WorkspaceDb { WorkspaceId, Option, Option, - Option, Option, Option, Option, @@ -396,7 +405,6 @@ impl WorkspaceDb { workspace_id, local_paths, local_paths_order, - dev_server_project_id, window_state, window_x, window_y, @@ -422,28 +430,13 @@ impl WorkspaceDb { .warn_on_err() .flatten()?; - let location = if let Some(dev_server_project_id) = dev_server_project_id { - let dev_server_project: SerializedDevServerProject = self - .select_row_bound(sql! { - SELECT id, path, dev_server_name - FROM dev_server_projects - WHERE id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) - .context("No remote project found") - .warn_on_err() - .flatten()?; - SerializedWorkspaceLocation::DevServer(dev_server_project) - } else if let Some(local_paths) = local_paths { - match local_paths_order { - Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), - None => { - let order = LocalPathsOrder::default_for_paths(&local_paths); - SerializedWorkspaceLocation::Local(local_paths, order) - } + let local_paths = local_paths?; + let location = match local_paths_order { + Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), + None => { + let order = LocalPathsOrder::default_for_paths(&local_paths); + SerializedWorkspaceLocation::Local(local_paths, order) } - } else { - return None; }; Some(SerializedWorkspace { @@ -470,8 +463,6 @@ impl WorkspaceDb { // and we've grabbed the most recent workspace let ( workspace_id, - local_paths, - local_paths_order, dev_server_project_id, window_bounds, display, @@ -480,8 +471,6 @@ impl WorkspaceDb { window_id, ): ( WorkspaceId, - Option, - Option, Option, Option, Option, @@ -492,8 +481,6 @@ impl WorkspaceDb { .select_row_bound(sql! { SELECT workspace_id, - local_paths, - local_paths_order, dev_server_project_id, window_state, window_x, @@ -520,29 +507,20 @@ impl WorkspaceDb { .warn_on_err() .flatten()?; - let location = if let Some(dev_server_project_id) = dev_server_project_id { - let dev_server_project: SerializedDevServerProject = self - .select_row_bound(sql! { - SELECT id, path, dev_server_name - FROM dev_server_projects - WHERE id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) - .context("No remote project found") - .warn_on_err() - .flatten()?; - SerializedWorkspaceLocation::DevServer(dev_server_project) - } else if let Some(local_paths) = local_paths { - match local_paths_order { - Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), - None => { - let order = LocalPathsOrder::default_for_paths(&local_paths); - SerializedWorkspaceLocation::Local(local_paths, order) - } - } - } else { - return None; - }; + let dev_server_project_id = dev_server_project_id?; + + let dev_server_project: SerializedDevServerProject = self + .select_row_bound(sql! { + SELECT id, path, dev_server_name + FROM dev_server_projects + WHERE id = ? + }) + .and_then(|mut prepared_statement| (prepared_statement)(dev_server_project_id)) + .context("No remote project found") + .warn_on_err() + .flatten()?; + + let location = SerializedWorkspaceLocation::DevServer(dev_server_project); Some(SerializedWorkspace { id: workspace_id, @@ -560,6 +538,62 @@ impl WorkspaceDb { }) } + pub(crate) fn workspace_for_ssh_project( + &self, + ssh_project: &SerializedSshProject, + ) -> Option { + let (workspace_id, window_bounds, display, centered_layout, docks, window_id): ( + WorkspaceId, + Option, + Option, + Option, + DockStructure, + Option, + ) = self + .select_row_bound(sql! { + SELECT + workspace_id, + window_state, + window_x, + window_y, + window_width, + window_height, + display, + centered_layout, + left_dock_visible, + left_dock_active_panel, + left_dock_zoom, + right_dock_visible, + right_dock_active_panel, + right_dock_zoom, + bottom_dock_visible, + bottom_dock_active_panel, + bottom_dock_zoom, + window_id + FROM workspaces + WHERE ssh_project_id = ? + }) + .and_then(|mut prepared_statement| (prepared_statement)(ssh_project.id.0)) + .context("No workspaces found") + .warn_on_err() + .flatten()?; + + Some(SerializedWorkspace { + id: workspace_id, + location: SerializedWorkspaceLocation::Ssh(ssh_project.clone()), + center_group: self + .get_center_pane_group(workspace_id) + .context("Getting center group") + .log_err()?, + window_bounds, + centered_layout: centered_layout.unwrap_or(false), + display, + docks, + session_id: None, + window_id, + }) + } + /// Saves a workspace using the worktree roots. Will garbage collect any workspaces /// that used this workspace previously pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) { @@ -674,6 +708,49 @@ impl WorkspaceDb { workspace.docks, )) .context("Updating workspace")?; + }, + SerializedWorkspaceLocation::Ssh(ssh_project) => { + conn.exec_bound(sql!( + DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? + ))?((ssh_project.id.0, workspace.id)) + .context("clearing out old locations")?; + + // Upsert + conn.exec_bound(sql!( + INSERT INTO workspaces( + workspace_id, + ssh_project_id, + left_dock_visible, + left_dock_active_panel, + left_dock_zoom, + right_dock_visible, + right_dock_active_panel, + right_dock_zoom, + bottom_dock_visible, + bottom_dock_active_panel, + bottom_dock_zoom, + timestamp + ) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP) + ON CONFLICT DO + UPDATE SET + ssh_project_id = ?2, + left_dock_visible = ?3, + left_dock_active_panel = ?4, + left_dock_zoom = ?5, + right_dock_visible = ?6, + right_dock_active_panel = ?7, + right_dock_zoom = ?8, + bottom_dock_visible = ?9, + bottom_dock_active_panel = ?10, + bottom_dock_zoom = ?11, + timestamp = CURRENT_TIMESTAMP + ))?(( + workspace.id, + ssh_project.id.0, + workspace.docks, + )) + .context("Updating workspace")?; } } @@ -688,6 +765,46 @@ impl WorkspaceDb { .await; } + pub(crate) async fn get_or_create_ssh_project( + &self, + host: String, + port: Option, + path: String, + user: Option, + ) -> Result { + if let Some(project) = self + .get_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await? + { + Ok(project) + } else { + self.insert_ssh_project(host, port, path, user) + .await? + .ok_or_else(|| anyhow!("failed to insert ssh project")) + } + } + + query! { + async fn get_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + SELECT id, host, port, path, user + FROM ssh_projects + WHERE host IS ? AND port IS ? AND path IS ? AND user IS ? + LIMIT 1 + } + } + + query! { + async fn insert_ssh_project(host: String, port: Option, path: String, user: Option) -> Result> { + INSERT INTO ssh_projects( + host, + port, + path, + user + ) VALUES (?1, ?2, ?3, ?4) + RETURNING id, host, port, path, user + } + } + query! { pub async fn next_id() -> Result { INSERT INTO workspaces DEFAULT VALUES RETURNING workspace_id @@ -695,10 +812,12 @@ impl WorkspaceDb { } query! { - fn recent_workspaces() -> Result)>> { - SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id + fn recent_workspaces() -> Result, Option)>> { + SELECT workspace_id, local_paths, local_paths_order, dev_server_project_id, ssh_project_id FROM workspaces - WHERE local_paths IS NOT NULL OR dev_server_project_id IS NOT NULL + WHERE local_paths IS NOT NULL + OR dev_server_project_id IS NOT NULL + OR ssh_project_id IS NOT NULL ORDER BY timestamp DESC } } @@ -719,6 +838,13 @@ impl WorkspaceDb { } } + query! { + fn ssh_projects() -> Result> { + SELECT id, host, port, path, user + FROM ssh_projects + } + } + pub(crate) fn last_window( &self, ) -> anyhow::Result<(Option, Option)> { @@ -768,8 +894,11 @@ impl WorkspaceDb { let mut result = Vec::new(); let mut delete_tasks = Vec::new(); let dev_server_projects = self.dev_server_projects()?; + let ssh_projects = self.ssh_projects()?; - for (id, location, order, dev_server_project_id) in self.recent_workspaces()? { + for (id, location, order, dev_server_project_id, ssh_project_id) in + self.recent_workspaces()? + { if let Some(dev_server_project_id) = dev_server_project_id.map(DevServerProjectId) { if let Some(dev_server_project) = dev_server_projects .iter() @@ -782,6 +911,15 @@ impl WorkspaceDb { continue; } + if let Some(ssh_project_id) = ssh_project_id.map(SshProjectId) { + if let Some(ssh_project) = ssh_projects.iter().find(|rp| rp.id == ssh_project_id) { + result.push((id, SerializedWorkspaceLocation::Ssh(ssh_project.clone()))); + } else { + delete_tasks.push(self.delete_workspace_by_id(id)); + } + continue; + } + if location.paths().iter().all(|path| path.exists()) && location.paths().iter().any(|path| path.is_dir()) { @@ -802,7 +940,9 @@ impl WorkspaceDb { .into_iter() .filter_map(|(_, location)| match location { SerializedWorkspaceLocation::Local(local_paths, _) => Some(local_paths), + // Do not automatically reopen Dev Server and SSH workspaces SerializedWorkspaceLocation::DevServer(_) => None, + SerializedWorkspaceLocation::Ssh(_) => None, }) .next()) } @@ -1512,6 +1652,122 @@ mod tests { assert_eq!(have[3], LocalPaths::new([dir1.path().to_str().unwrap()])); } + #[gpui::test] + async fn test_get_or_create_ssh_project() { + let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project").await); + + let (host, port, path, user) = ( + "example.com".to_string(), + Some(22_u16), + "/home/user".to_string(), + Some("user".to_string()), + ); + + let project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.host, host); + assert_eq!(project.path, path); + assert_eq!(project.user, user); + + // Test that calling the function again with the same parameters returns the same project + let same_project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.id, same_project.id); + + // Test with different parameters + let (host2, path2, user2) = ( + "otherexample.com".to_string(), + "/home/otheruser".to_string(), + Some("otheruser".to_string()), + ); + + let different_project = db + .get_or_create_ssh_project(host2.clone(), None, path2.clone(), user2.clone()) + .await + .unwrap(); + + assert_ne!(project.id, different_project.id); + assert_eq!(different_project.host, host2); + assert_eq!(different_project.path, path2); + assert_eq!(different_project.user, user2); + } + + #[gpui::test] + async fn test_get_or_create_ssh_project_with_null_user() { + let db = WorkspaceDb(open_test_db("test_get_or_create_ssh_project_with_null_user").await); + + let (host, port, path, user) = ( + "example.com".to_string(), + None, + "/home/user".to_string(), + None, + ); + + let project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), None) + .await + .unwrap(); + + assert_eq!(project.host, host); + assert_eq!(project.path, path); + assert_eq!(project.user, None); + + // Test that calling the function again with the same parameters returns the same project + let same_project = db + .get_or_create_ssh_project(host.clone(), port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(project.id, same_project.id); + } + + #[gpui::test] + async fn test_get_ssh_projects() { + let db = WorkspaceDb(open_test_db("test_get_ssh_projects").await); + + let projects = vec![ + ( + "example.com".to_string(), + None, + "/home/user".to_string(), + None, + ), + ( + "anotherexample.com".to_string(), + Some(123_u16), + "/home/user2".to_string(), + Some("user2".to_string()), + ), + ( + "yetanother.com".to_string(), + Some(345_u16), + "/home/user3".to_string(), + None, + ), + ]; + + for (host, port, path, user) in projects.iter() { + let project = db + .get_or_create_ssh_project(host.clone(), *port, path.clone(), user.clone()) + .await + .unwrap(); + + assert_eq!(&project.host, host); + assert_eq!(&project.port, port); + assert_eq!(&project.path, path); + assert_eq!(&project.user, user); + } + + let stored_projects = db.ssh_projects().unwrap(); + assert_eq!(stored_projects.len(), projects.len()); + } + #[gpui::test] async fn test_simple_split() { env_logger::try_init().ok(); diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index d6f8001f25..0ad3fa5e60 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -11,6 +11,7 @@ use db::sqlez::{ }; use gpui::{AsyncWindowContext, Model, View, WeakView}; use project::Project; +use remote::ssh_session::SshProjectId; use serde::{Deserialize, Serialize}; use std::{ path::{Path, PathBuf}, @@ -20,6 +21,69 @@ use ui::SharedString; use util::ResultExt; use uuid::Uuid; +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +pub struct SerializedSshProject { + pub id: SshProjectId, + pub host: String, + pub port: Option, + pub path: String, + pub user: Option, +} + +impl SerializedSshProject { + pub fn ssh_url(&self) -> String { + let mut result = String::from("ssh://"); + if let Some(user) = &self.user { + result.push_str(user); + result.push('@'); + } + result.push_str(&self.host); + if let Some(port) = &self.port { + result.push(':'); + result.push_str(&port.to_string()); + } + result.push_str(&self.path); + result + } +} + +impl StaticColumnCount for SerializedSshProject { + fn column_count() -> usize { + 5 + } +} + +impl Bind for &SerializedSshProject { + fn bind(&self, statement: &Statement, start_index: i32) -> Result { + let next_index = statement.bind(&self.id.0, start_index)?; + let next_index = statement.bind(&self.host, next_index)?; + let next_index = statement.bind(&self.port, next_index)?; + let next_index = statement.bind(&self.path, next_index)?; + statement.bind(&self.user, next_index) + } +} + +impl Column for SerializedSshProject { + fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { + let id = statement.column_int64(start_index)?; + let host = statement.column_text(start_index + 1)?.to_string(); + let (port, _) = Option::::column(statement, start_index + 2)?; + let path = statement.column_text(start_index + 3)?.to_string(); + let (user, _) = Option::::column(statement, start_index + 4)?; + + Ok(( + Self { + id: SshProjectId(id as u64), + host, + port, + path, + user, + }, + start_index + 5, + )) + } +} + #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct SerializedDevServerProject { pub id: DevServerProjectId, @@ -58,7 +122,6 @@ impl Column for LocalPaths { fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { let path_blob = statement.column_blob(start_index)?; let paths: Arc> = if path_blob.is_empty() { - println!("path blog is empty"); Default::default() } else { bincode::deserialize(path_blob).context("Bincode deserialization of paths failed")? @@ -146,6 +209,7 @@ impl Column for SerializedDevServerProject { #[derive(Debug, PartialEq, Clone)] pub enum SerializedWorkspaceLocation { Local(LocalPaths, LocalPathsOrder), + Ssh(SerializedSshProject), DevServer(SerializedDevServerProject), } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 98ac49992d..5855dcce1e 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -49,15 +49,19 @@ use node_runtime::NodeRuntime; use notifications::{simple_message_notification::MessageNotification, NotificationHandle}; pub use pane::*; pub use pane_group::*; -use persistence::{model::SerializedWorkspace, SerializedWindowBounds, DB}; pub use persistence::{ model::{ItemId, LocalPaths, SerializedDevServerProject, SerializedWorkspaceLocation}, WorkspaceDb, DB as WORKSPACE_DB, }; +use persistence::{ + model::{SerializedSshProject, SerializedWorkspace}, + SerializedWindowBounds, DB, +}; use postage::stream::Stream; use project::{ DirectoryLister, Project, ProjectEntryId, ProjectPath, ResolvedPath, Worktree, WorktreeId, }; +use remote::{SshConnectionOptions, SshSession}; use serde::Deserialize; use session::AppSession; use settings::Settings; @@ -756,6 +760,7 @@ pub struct Workspace { render_disconnected_overlay: Option) -> AnyElement>>, serializable_items_tx: UnboundedSender>, + serialized_ssh_project: Option, _items_serializer: Task>, session_id: Option, } @@ -1054,6 +1059,7 @@ impl Workspace { serializable_items_tx, _items_serializer, session_id: Some(session_id), + serialized_ssh_project: None, } } @@ -1440,6 +1446,10 @@ impl Workspace { self.on_prompt_for_open_path = Some(prompt) } + pub fn set_serialized_ssh_project(&mut self, serialized_ssh_project: SerializedSshProject) { + self.serialized_ssh_project = Some(serialized_ssh_project); + } + pub fn set_render_disconnected_overlay( &mut self, render: impl Fn(&mut Self, &mut ViewContext) -> AnyElement + 'static, @@ -4097,7 +4107,9 @@ impl Workspace { } } - let location = if let Some(local_paths) = self.local_paths(cx) { + let location = if let Some(ssh_project) = &self.serialized_ssh_project { + Some(SerializedWorkspaceLocation::Ssh(ssh_project.clone())) + } else if let Some(local_paths) = self.local_paths(cx) { if !local_paths.is_empty() { Some(SerializedWorkspaceLocation::from_local_paths(local_paths)) } else { @@ -5476,6 +5488,70 @@ pub fn join_hosted_project( }) } +pub fn open_ssh_project( + window: WindowHandle, + connection_options: SshConnectionOptions, + session: Arc, + app_state: Arc, + paths: Vec, + cx: &mut AppContext, +) -> Task> { + cx.spawn(|mut cx| async move { + // TODO: Handle multiple paths + let path = paths.iter().next().cloned().unwrap_or_default(); + + let serialized_ssh_project = persistence::DB + .get_or_create_ssh_project( + connection_options.host.clone(), + connection_options.port, + path.to_string_lossy().to_string(), + connection_options.username.clone(), + ) + .await?; + + let project = cx.update(|cx| { + project::Project::ssh( + session, + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + cx, + ) + })?; + + for path in paths { + project + .update(&mut cx, |project, cx| { + project.find_or_create_worktree(&path, true, cx) + })? + .await?; + } + + let serialized_workspace = + persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); + + let workspace_id = + if let Some(workspace_id) = serialized_workspace.map(|workspace| workspace.id) { + workspace_id + } else { + persistence::DB.next_id().await? + }; + + cx.update_window(window.into(), |_, cx| { + cx.replace_root_view(|cx| { + let mut workspace = + Workspace::new(Some(workspace_id), project, app_state.clone(), cx); + workspace.set_serialized_ssh_project(serialized_ssh_project); + workspace + }); + })?; + + window.update(&mut cx, |_, cx| cx.activate_window()) + }) +} + pub fn join_dev_server_project( dev_server_project_id: DevServerProjectId, project_id: ProjectId, diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index c127a975a9..3104001f99 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -667,7 +667,11 @@ fn handle_open_request( cx.spawn(|mut cx| async move { open_ssh_project( connection_info, - request.open_paths, + request + .open_paths + .into_iter() + .map(|path| path.path) + .collect::>(), app_state, workspace::OpenOptions::default(), &mut cx, From 3fd690ade401f7d665448977c674db4780e23165 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 19 Sep 2024 12:00:13 -0400 Subject: [PATCH 32/96] docs: Update lsp.settings examples for yaml-language-server (#18081) --- docs/src/configuring-zed.md | 21 ++++++++- docs/src/languages/typescript.md | 26 +++++------ docs/src/languages/yaml.md | 80 ++++++++++++++++++++++++++++++-- 3 files changed, 108 insertions(+), 19 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 382c33c216..1befa7d93a 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -575,8 +575,13 @@ Each option controls displaying of a particular toolbar element. If all elements The following settings can be overridden for specific language servers: - `initialization_options` +- `settings` -To override settings for a language, add an entry for that language server's name to the `lsp` value. Example: +To override configuration for a language server, add an entry for that language server's name to the `lsp` value. + +Some options are passed via `initialization_options` to the language server. These are for options which must be specified at language server startup and when changed will require restarting the language server. + +For example to pass the `check` option to `rust-analyzer`, use the following configuration: ```json "lsp": { @@ -590,6 +595,20 @@ To override settings for a language, add an entry for that language server's nam } ``` +While other options may be changed at a runtime and should be placed under `settings`: + +```json +"lsp": { + "yaml-language-server": { + "settings": { + "yaml": { + "keyOrdering": true // Enforces alphabetical ordering of keys in maps + } + } + } +} +``` + ## Format On Save - Description: Whether or not to perform a buffer format before saving. diff --git a/docs/src/languages/typescript.md b/docs/src/languages/typescript.md index feb7d76622..080d41efb3 100644 --- a/docs/src/languages/typescript.md +++ b/docs/src/languages/typescript.md @@ -72,20 +72,20 @@ You can override these settings in your configuration file: ```json "lsp": { - "$LANGUAGE_SERVER_NAME": { - "initialization_options": { - "preferences": { - "includeInlayParameterNameHints": "all", - "includeInlayParameterNameHintsWhenArgumentMatchesName": true, - "includeInlayFunctionParameterTypeHints": true, - "includeInlayVariableTypeHints": true, - "includeInlayVariableTypeHintsWhenTypeMatchesName": true, - "includeInlayPropertyDeclarationTypeHints": true, - "includeInlayFunctionLikeReturnTypeHints": true, - "includeInlayEnumMemberValueHints": true, - } - } + "$LANGUAGE_SERVER_NAME": { + "initialization_options": { + "preferences": { + "includeInlayParameterNameHints": "all", + "includeInlayParameterNameHintsWhenArgumentMatchesName": true, + "includeInlayFunctionParameterTypeHints": true, + "includeInlayVariableTypeHints": true, + "includeInlayVariableTypeHintsWhenTypeMatchesName": true, + "includeInlayPropertyDeclarationTypeHints": true, + "includeInlayFunctionLikeReturnTypeHints": true, + "includeInlayEnumMemberValueHints": true, + } } + } } ``` diff --git a/docs/src/languages/yaml.md b/docs/src/languages/yaml.md index 5ef614394c..7b840d0825 100644 --- a/docs/src/languages/yaml.md +++ b/docs/src/languages/yaml.md @@ -12,7 +12,7 @@ You can configure various [yaml-language-server settings](https://github.com/red ```json "lsp": { "yaml-language-server": { - "initialization_options": { + "settings": { "yaml": { "keyOrdering": true, "format": { @@ -32,9 +32,9 @@ Note, settings keys must be nested, so `yaml.keyOrdering` becomes `{"yaml": { "k ## Schemas -By default yaml-language-server will attempt to determine the correct schema for a given yaml file and retrieve the appropriate JSON Schema from [Json Schema Store]. +By default yaml-language-server will attempt to determine the correct schema for a given yaml file and retrieve the appropriate JSON Schema from [Json Schema Store](https://schemastore.org/). -You can override this by [using an inlined schema] reference via a modeline comment at the top of your yaml file: +You can override any auto-detected schema via the `schemas` settings key (demonstrated above) or by providing an [inlined schema](https://github.com/redhat-developer/yaml-language-server#using-inlined-schema) reference via a modeline comment at the top of your yaml file: ```yaml # yaml-language-server: $schema=https://json.schemastore.org/github-action.json @@ -44,12 +44,12 @@ on: types: [oppened] ``` -You can disable this functionality entirely if desired: +You can disable the automatic detection and retrieval of schemas from the JSON Schema if desired: ```json "lsp": { "yaml-language-server": { - "initialization_options": { + "settings": { "yaml": { "schemaStore": { "enable": false @@ -59,3 +59,73 @@ You can disable this functionality entirely if desired: } } ``` + +## Custom Tags + +Yaml-language-server supports [custom tags](https://github.com/redhat-developer/yaml-language-server#adding-custom-tags) which can be used to inject custom application functionality at runtime into your yaml files. + +For example Amazon CloudFormation YAML uses a number of custom tags, to support these you can add the following to your settings.json: + +```json + "lsp": { + "yaml-language-server": { + "settings": { + "yaml": { + "customTags": [ + "!And scalar", + "!And mapping", + "!And sequence", + "!If scalar", + "!If mapping", + "!If sequence", + "!Not scalar", + "!Not mapping", + "!Not sequence", + "!Equals scalar", + "!Equals mapping", + "!Equals sequence", + "!Or scalar", + "!Or mapping", + "!Or sequence", + "!FindInMap scalar", + "!FindInMap mapping", + "!FindInMap sequence", + "!Base64 scalar", + "!Base64 mapping", + "!Base64 sequence", + "!Cidr scalar", + "!Cidr mapping", + "!Cidr sequence", + "!Ref scalar", + "!Ref mapping", + "!Ref sequence", + "!Sub scalar", + "!Sub mapping", + "!Sub sequence", + "!GetAtt scalar", + "!GetAtt mapping", + "!GetAtt sequence", + "!GetAZs scalar", + "!GetAZs mapping", + "!GetAZs sequence", + "!ImportValue scalar", + "!ImportValue mapping", + "!ImportValue sequence", + "!Select scalar", + "!Select mapping", + "!Select sequence", + "!Split scalar", + "!Split mapping", + "!Split sequence", + "!Join scalar", + "!Join mapping", + "!Join sequence", + "!Condition scalar", + "!Condition mapping", + "!Condition sequence" + ] + } + } + } + } +``` From 713b39bac0702b008c461c87764a603010562b65 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 10:13:55 -0600 Subject: [PATCH 33/96] Auto deploy collab staging daily (#18085) This should avoid us breaking the collab build and not noticing for a month Release Notes: - N/A --- .github/workflows/bump_collab_staging.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml index 89cc7c4848..224325d53f 100644 --- a/.github/workflows/bump_collab_staging.yml +++ b/.github/workflows/bump_collab_staging.yml @@ -1,9 +1,9 @@ name: Bump collab-staging Tag on: - push: - branches: - - main + schedule: + # Fire every day at 16:00 UTC (At the start of the US workday) + - cron: "0 16 * * *" jobs: update-collab-staging-tag: From 3986bcf9dc23cd32b5155310136f53d9d8a5cc73 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:14:37 -0600 Subject: [PATCH 34/96] Update Rust crate async-trait to v0.1.82 (#18038) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [async-trait](https://redirect.github.com/dtolnay/async-trait) | workspace.dependencies | patch | `0.1.81` -> `0.1.82` | --- ### Release Notes
dtolnay/async-trait (async-trait) ### [`v0.1.82`](https://redirect.github.com/dtolnay/async-trait/releases/tag/0.1.82) [Compare Source](https://redirect.github.com/dtolnay/async-trait/compare/0.1.81...0.1.82) - Prevent elided_named_lifetimes lint being produced in generated code ([#​276](https://redirect.github.com/dtolnay/async-trait/issues/276))
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 16ee627d2c..38c3e74ce1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -894,9 +894,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.81" +version = "0.1.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" +checksum = "a27b8a3a6e1a44fa4c8baf1f653e4172e81486d4941f2237e20dc2d0cf4ddff1" dependencies = [ "proc-macro2", "quote", From 6670c9eb3b5033caf4b78d59aecab9bee5cb09d1 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:15:31 -0600 Subject: [PATCH 35/96] Update Rust crate backtrace to v0.3.74 (#18039) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [backtrace](https://redirect.github.com/rust-lang/backtrace-rs) | dependencies | patch | `0.3.73` -> `0.3.74` | | [backtrace](https://redirect.github.com/rust-lang/backtrace-rs) | dev-dependencies | patch | `0.3.73` -> `0.3.74` | --- ### Release Notes
rust-lang/backtrace-rs (backtrace) ### [`v0.3.74`](https://redirect.github.com/rust-lang/backtrace-rs/releases/tag/0.3.74) [Compare Source](https://redirect.github.com/rust-lang/backtrace-rs/compare/0.3.73...0.3.74) #### What's Changed - QNX Neutrino 7.0 support, thanks to [@​nyurik](https://redirect.github.com/nyurik) in [https://github.com/rust-lang/backtrace-rs/pull/648](https://redirect.github.com/rust-lang/backtrace-rs/pull/648) - Cleaned up our Android support. This should massively improve backtraces for ones with the API level sufficient to ship with libunwind, etc. Unfortunately, it comes at the cost of dropping support for older ones! Thanks to [@​fengys](https://redirect.github.com/fengys) in [https://github.com/rust-lang/backtrace-rs/pull/656](https://redirect.github.com/rust-lang/backtrace-rs/pull/656) - Made PrintFmt, which was using the `Enum::__NonExhaustiveVariant` pattern, use `#[non_exhaustive]` for real. Don't @​ me if you were matching on that! Thanks to [@​nyurik](https://redirect.github.com/nyurik) in [https://github.com/rust-lang/backtrace-rs/pull/651](https://redirect.github.com/rust-lang/backtrace-rs/pull/651) - Massively cleaned up the windows code! We moved from winapi to windows-sys with windows-targets thanks to [@​CraftSpider](https://redirect.github.com/CraftSpider) and [@​ChrisDenton](https://redirect.github.com/ChrisDenton) in - Don't cast HANDLE to usize and back by [@​CraftSpider](https://redirect.github.com/CraftSpider) in [https://github.com/rust-lang/backtrace-rs/pull/635](https://redirect.github.com/rust-lang/backtrace-rs/pull/635) - Switch from `winapi` to `windows-sys` by [@​CraftSpider](https://redirect.github.com/CraftSpider) in [https://github.com/rust-lang/backtrace-rs/pull/641](https://redirect.github.com/rust-lang/backtrace-rs/pull/641) - Update windows bindings and use windows-targets by [@​ChrisDenton](https://redirect.github.com/ChrisDenton) in [https://github.com/rust-lang/backtrace-rs/pull/653](https://redirect.github.com/rust-lang/backtrace-rs/pull/653) - A bunch of updated dependencies. Thanks [@​djc](https://redirect.github.com/djc) and [@​khuey](https://redirect.github.com/khuey)! - Sorry if you were testing this code in miri! It started yelling about sussy casts. A lot. We did a bunch of internal cleanups that should make it quiet down, thanks to [@​workingjubilee](https://redirect.github.com/workingjubilee) in [https://github.com/rust-lang/backtrace-rs/pull/641](https://redirect.github.com/rust-lang/backtrace-rs/pull/641) - Uhhh we had to tweak `dl_iterate_phdr` in [https://github.com/rust-lang/backtrace-rs/pull/660](https://redirect.github.com/rust-lang/backtrace-rs/pull/660) after Android revealed it was... kind of unsound actually and not doing things like checking for null pointers before making slices! WHOOPS! Thanks to [@​saethlin](https://redirect.github.com/saethlin) for implementing detection for precisely that in rustc! It's really hard to find soundness issues in inherited codebases like this one... #### New Contributors - [@​CraftSpider](https://redirect.github.com/CraftSpider) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/635](https://redirect.github.com/rust-lang/backtrace-rs/pull/635) - [@​fengys1996](https://redirect.github.com/fengys1996) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/656](https://redirect.github.com/rust-lang/backtrace-rs/pull/656) - [@​djc](https://redirect.github.com/djc) made their first contribution in [https://github.com/rust-lang/backtrace-rs/pull/657](https://redirect.github.com/rust-lang/backtrace-rs/pull/657) **Full Changelog**: https://github.com/rust-lang/backtrace-rs/compare/0.3.73...0.3.74
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about these updates again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38c3e74ce1..4a1a584469 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21,11 +21,11 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" dependencies = [ - "gimli", + "gimli 0.31.0", ] [[package]] @@ -1493,17 +1493,17 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", - "miniz_oxide 0.7.4", + "miniz_oxide 0.8.0", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -3083,7 +3083,7 @@ dependencies = [ "cranelift-control", "cranelift-entity", "cranelift-isle", - "gimli", + "gimli 0.29.0", "hashbrown 0.14.5", "log", "regalloc2", @@ -4873,6 +4873,12 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "gimli" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" + [[package]] name = "git" version = "0.1.0" @@ -13108,7 +13114,7 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli", + "gimli 0.29.0", "log", "object", "target-lexicon", @@ -13128,7 +13134,7 @@ dependencies = [ "cpp_demangle", "cranelift-bitset", "cranelift-entity", - "gimli", + "gimli 0.29.0", "indexmap 2.4.0", "log", "object", @@ -13242,7 +13248,7 @@ checksum = "2a25199625effa4c13dd790d64bd56884b014c69829431bfe43991c740bd5bc1" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.29.0", "object", "target-lexicon", "wasmparser 0.215.0", @@ -13539,7 +13545,7 @@ checksum = "073efe897d9ead7fc609874f94580afc831114af5149b6a90ee0a3a39b497fe0" dependencies = [ "anyhow", "cranelift-codegen", - "gimli", + "gimli 0.29.0", "regalloc2", "smallvec", "target-lexicon", From 157c57aa8d3e35a5d7d750ae552740b412b5911b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:15:46 -0600 Subject: [PATCH 36/96] Update Rust crate clap to v4.5.17 (#18041) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [clap](https://redirect.github.com/clap-rs/clap) | workspace.dependencies | patch | `4.5.16` -> `4.5.17` | --- ### Release Notes
clap-rs/clap (clap) ### [`v4.5.17`](https://redirect.github.com/clap-rs/clap/blob/HEAD/CHANGELOG.md#4517---2024-09-04) [Compare Source](https://redirect.github.com/clap-rs/clap/compare/v4.5.16...v4.5.17) ##### Fixes - *(help)* Style required argument groups - *(derive)* Improve error messages when unsupported fields are used
--- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4a1a584469..68625d5520 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2282,9 +2282,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.16" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6719fffa43d0d87e5fd8caeab59be1554fb028cd30edc88fc4369b17971019" +checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac" dependencies = [ "clap_builder", "clap_derive", @@ -2292,9 +2292,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.15" +version = "4.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73" dependencies = [ "anstream", "anstyle", From ce4f07bd3cbfa20a95e14af112e83002bfd486d4 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:16:31 -0600 Subject: [PATCH 37/96] Update Rust crate globset to v0.4.15 (#18042) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [globset](https://redirect.github.com/BurntSushi/ripgrep/tree/master/crates/globset) ([source](https://redirect.github.com/BurntSushi/ripgrep/tree/HEAD/crates/globset)) | workspace.dependencies | patch | `0.4.14` -> `0.4.15` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68625d5520..7c298c2a9a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4946,9 +4946,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" dependencies = [ "aho-corasick", "bstr", From c3bdc1c178190dd223d6b4718905f86822329da3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 10:18:14 -0600 Subject: [PATCH 38/96] Update Rust crate ignore to v0.4.23 (#18044) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [ignore](https://redirect.github.com/BurntSushi/ripgrep/tree/master/crates/ignore) ([source](https://redirect.github.com/BurntSushi/ripgrep/tree/HEAD/crates/ignore)) | workspace.dependencies | patch | `0.4.22` -> `0.4.23` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 3pm on Wednesday" in timezone America/New_York, Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- Release Notes: - N/A Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7c298c2a9a..26b8847041 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5688,9 +5688,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46810df39e66e925525d6e38ce1e7f6e1d208f72dc39757880fcb66e2c58af1" +checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" dependencies = [ "crossbeam-deque", "globset", @@ -6474,7 +6474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -13528,7 +13528,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] From ac0d5d3152fe09201b907210c917e82fee62d450 Mon Sep 17 00:00:00 2001 From: Junkui Zhang <364772080@qq.com> Date: Fri, 20 Sep 2024 00:19:13 +0800 Subject: [PATCH 39/96] windows: Fix regional indicator symbols broken (#18053) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #18027 Unlike macOS, not all glyphs in color fonts are color glyphs, such as `🇩🇪` in `Segoe UI Emoji`. As a result, attempting to retrieve color information for these glyphs can cause an error, preventing the glyph from being rendered. This PR addresses the issue by setting the `is_emoji` variable to `false` for non-color glyphs within color fonts. Release Notes: - N/A --- .../gpui/src/platform/windows/direct_write.rs | 42 ++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index 6253881f5a..fb53a833d6 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -1063,7 +1063,7 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl { // This `cast()` action here should never fail since we are running on Win10+, and // `IDWriteFontFace3` requires Win10 let font_face = &font_face.cast::().unwrap(); - let Some((font_identifier, font_struct, is_emoji)) = + let Some((font_identifier, font_struct, color_font)) = get_font_identifier_and_font_struct(font_face, &self.locale) else { return Ok(()); @@ -1084,6 +1084,8 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl { context .index_converter .advance_to_utf16_ix(context.utf16_index); + let is_emoji = color_font + && is_color_glyph(font_face, id, &context.text_system.components.factory); glyphs.push(ShapedGlyph { id, position: point(px(context.width), px(0.0)), @@ -1446,6 +1448,44 @@ fn get_render_target_property( } } +// One would think that with newer DirectWrite method: IDWriteFontFace4::GetGlyphImageFormats +// but that doesn't seem to work for some glyphs, say ❤ +fn is_color_glyph( + font_face: &IDWriteFontFace3, + glyph_id: GlyphId, + factory: &IDWriteFactory5, +) -> bool { + let glyph_run = DWRITE_GLYPH_RUN { + fontFace: unsafe { std::mem::transmute_copy(font_face) }, + fontEmSize: 14.0, + glyphCount: 1, + glyphIndices: &(glyph_id.0 as u16), + glyphAdvances: &0.0, + glyphOffsets: &DWRITE_GLYPH_OFFSET { + advanceOffset: 0.0, + ascenderOffset: 0.0, + }, + isSideways: BOOL(0), + bidiLevel: 0, + }; + unsafe { + factory.TranslateColorGlyphRun( + D2D_POINT_2F::default(), + &glyph_run as _, + None, + DWRITE_GLYPH_IMAGE_FORMATS_COLR + | DWRITE_GLYPH_IMAGE_FORMATS_SVG + | DWRITE_GLYPH_IMAGE_FORMATS_PNG + | DWRITE_GLYPH_IMAGE_FORMATS_JPEG + | DWRITE_GLYPH_IMAGE_FORMATS_PREMULTIPLIED_B8G8R8A8, + DWRITE_MEASURING_MODE_NATURAL, + None, + 0, + ) + } + .is_ok() +} + const DEFAULT_LOCALE_NAME: PCWSTR = windows::core::w!("en-US"); const BRUSH_COLOR: D2D1_COLOR_F = D2D1_COLOR_F { r: 1.0, From 8074fba76b4352077fed94364fcfb9d095f177a9 Mon Sep 17 00:00:00 2001 From: Nate Butler Date: Thu, 19 Sep 2024 12:31:40 -0400 Subject: [PATCH 40/96] Update List to support UI Density (#18079) Tracking issue: #18078 Improve UI Density support for List. UI density is an unstable feature. You can read more about it in the above issue! | Before Normal - Before Dense - After Normal - After Dense | |--------------------------------------------------------| | ![Group 8](https://github.com/user-attachments/assets/bb896fcf-e4a6-4776-9308-1405906d2dbe) | | | | | Before Normal - Before Dense - After Normal - After Dense | |--------------------------------------------------------| | ![Group 9](https://github.com/user-attachments/assets/00815a1b-071b-4d02-96bc-36bf37b5ae8b) | Release Notes: - N/A --- crates/ui/src/components/list/list.rs | 12 ++-- crates/ui/src/components/list/list_header.rs | 15 +++-- crates/ui/src/components/list/list_item.rs | 8 +-- .../ui/src/components/list/list_separator.rs | 2 +- .../ui/src/components/list/list_sub_header.rs | 57 +++++++++++-------- 5 files changed, 55 insertions(+), 39 deletions(-) diff --git a/crates/ui/src/components/list/list.rs b/crates/ui/src/components/list/list.rs index a09abf92e4..4bf157ef40 100644 --- a/crates/ui/src/components/list/list.rs +++ b/crates/ui/src/components/list/list.rs @@ -52,13 +52,15 @@ impl ParentElement for List { } impl RenderOnce for List { - fn render(self, _cx: &mut WindowContext) -> impl IntoElement { - v_flex().w_full().py_1().children(self.header).map(|this| { - match (self.children.is_empty(), self.toggle) { + fn render(self, cx: &mut WindowContext) -> impl IntoElement { + v_flex() + .w_full() + .py(Spacing::Small.rems(cx)) + .children(self.header) + .map(|this| match (self.children.is_empty(), self.toggle) { (false, _) => this.children(self.children), (true, Some(false)) => this, (true, _) => this.child(Label::new(self.empty_message.clone()).color(Color::Muted)), - } - }) + }) } } diff --git a/crates/ui/src/components/list/list_header.rs b/crates/ui/src/components/list/list_header.rs index 4377efbc46..3b15f8cd3d 100644 --- a/crates/ui/src/components/list/list_header.rs +++ b/crates/ui/src/components/list/list_header.rs @@ -2,6 +2,8 @@ use std::sync::Arc; use crate::{h_flex, prelude::*, Disclosure, Label}; use gpui::{AnyElement, ClickEvent}; +use settings::Settings; +use theme::ThemeSettings; #[derive(IntoElement)] pub struct ListHeader { @@ -78,6 +80,8 @@ impl Selectable for ListHeader { impl RenderOnce for ListHeader { fn render(self, cx: &mut WindowContext) -> impl IntoElement { + let ui_density = ThemeSettings::get_global(cx).ui_density; + h_flex() .id(self.label.clone()) .w_full() @@ -85,7 +89,10 @@ impl RenderOnce for ListHeader { .group("list_header") .child( div() - .h_7() + .map(|this| match ui_density { + theme::UiDensity::Comfortable => this.h_5(), + _ => this.h_7(), + }) .when(self.inset, |this| this.px_2()) .when(self.selected, |this| { this.bg(cx.theme().colors().ghost_element_selected) @@ -95,10 +102,10 @@ impl RenderOnce for ListHeader { .items_center() .justify_between() .w_full() - .gap_1() + .gap(Spacing::Small.rems(cx)) .child( h_flex() - .gap_1() + .gap(Spacing::Small.rems(cx)) .children(self.toggle.map(|is_open| { Disclosure::new("toggle", is_open).on_toggle(self.on_toggle.clone()) })) @@ -106,7 +113,7 @@ impl RenderOnce for ListHeader { div() .id("label_container") .flex() - .gap_1() + .gap(Spacing::Small.rems(cx)) .items_center() .children(self.start_slot) .child(Label::new(self.label.clone()).color(Color::Muted)) diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index 6b38b7f963..e1c90894fd 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -162,7 +162,7 @@ impl RenderOnce for ListItem { // When an item is inset draw the indent spacing outside of the item .when(self.inset, |this| { this.ml(self.indent_level as f32 * self.indent_step_size) - .px_1() + .px(Spacing::Small.rems(cx)) }) .when(!self.inset && !self.disabled, |this| { this @@ -185,7 +185,7 @@ impl RenderOnce for ListItem { .w_full() .relative() .gap_1() - .px_1p5() + .px(Spacing::Medium.rems(cx)) .map(|this| match self.spacing { ListItemSpacing::Dense => this, ListItemSpacing::Sparse => this.py_1(), @@ -238,7 +238,7 @@ impl RenderOnce for ListItem { .flex_grow() .flex_shrink_0() .flex_basis(relative(0.25)) - .gap_1() + .gap(Spacing::Small.rems(cx)) .overflow_hidden() .children(self.start_slot) .children(self.children), @@ -260,7 +260,7 @@ impl RenderOnce for ListItem { h_flex() .h_full() .absolute() - .right_1p5() + .right(Spacing::Medium.rems(cx)) .top_0() .visible_on_hover("list_item") .child(end_hover_slot), diff --git a/crates/ui/src/components/list/list_separator.rs b/crates/ui/src/components/list/list_separator.rs index b53dc7a043..0d5fdf8d49 100644 --- a/crates/ui/src/components/list/list_separator.rs +++ b/crates/ui/src/components/list/list_separator.rs @@ -8,7 +8,7 @@ impl RenderOnce for ListSeparator { div() .h_px() .w_full() - .my_1p5() + .my(Spacing::Medium.rems(cx)) .bg(cx.theme().colors().border_variant) } } diff --git a/crates/ui/src/components/list/list_sub_header.rs b/crates/ui/src/components/list/list_sub_header.rs index 2aa9387129..0ed072ebbf 100644 --- a/crates/ui/src/components/list/list_sub_header.rs +++ b/crates/ui/src/components/list/list_sub_header.rs @@ -39,30 +39,37 @@ impl Selectable for ListSubHeader { impl RenderOnce for ListSubHeader { fn render(self, cx: &mut WindowContext) -> impl IntoElement { - h_flex().flex_1().w_full().relative().pb_1().px_0p5().child( - div() - .h_6() - .when(self.inset, |this| this.px_2()) - .when(self.selected, |this| { - this.bg(cx.theme().colors().ghost_element_selected) - }) - .flex() - .flex_1() - .w_full() - .gap_1() - .items_center() - .justify_between() - .child( - div() - .flex() - .gap_1() - .items_center() - .children( - self.start_slot - .map(|i| Icon::new(i).color(Color::Muted).size(IconSize::Small)), - ) - .child(Label::new(self.label.clone()).color(Color::Muted)), - ), - ) + h_flex() + .flex_1() + .w_full() + .relative() + .pb(Spacing::Small.rems(cx)) + .px(Spacing::XSmall.rems(cx)) + .child( + div() + .h_6() + .when(self.inset, |this| this.px_2()) + .when(self.selected, |this| { + this.bg(cx.theme().colors().ghost_element_selected) + }) + .flex() + .flex_1() + .w_full() + .gap_1() + .items_center() + .justify_between() + .child( + div() + .flex() + .gap_1() + .items_center() + .children( + self.start_slot.map(|i| { + Icon::new(i).color(Color::Muted).size(IconSize::Small) + }), + ) + .child(Label::new(self.label.clone()).color(Color::Muted)), + ), + ) } } From 1fc391f696a828780b6a651df0b797be91aee91e Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 19 Sep 2024 13:14:15 -0400 Subject: [PATCH 41/96] Make `Buffer::apply_ops` infallible (#18089) This PR makes the `Buffer::apply_ops` method infallible for `text::Buffer` and `language::Buffer`. We discovered that `text::Buffer::apply_ops` was only fallible due to `apply_undo`, which didn't actually need to be fallible. Release Notes: - N/A --- crates/assistant/src/context.rs | 8 ++-- crates/assistant/src/context/context_tests.rs | 8 +--- crates/assistant/src/context_store.rs | 6 +-- crates/channel/src/channel_buffer.rs | 4 +- crates/channel/src/channel_store.rs | 2 +- crates/collab/src/db/queries/buffers.rs | 4 +- crates/collab/src/db/tests/buffer_tests.rs | 18 ++++---- crates/language/src/buffer.rs | 5 +- crates/language/src/buffer_tests.rs | 46 ++++++++----------- crates/multi_buffer/src/multi_buffer.rs | 12 ++--- crates/project/src/buffer_store.rs | 9 ++-- crates/text/src/tests.rs | 32 ++++++------- crates/text/src/text.rs | 39 +++++++--------- 13 files changed, 85 insertions(+), 108 deletions(-) diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index d72b04e3cd..830c098049 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -683,7 +683,7 @@ impl Context { buffer.set_text(saved_context.text.as_str(), cx) }); let operations = saved_context.into_ops(&this.buffer, cx); - this.apply_ops(operations, cx).unwrap(); + this.apply_ops(operations, cx); this } @@ -756,7 +756,7 @@ impl Context { &mut self, ops: impl IntoIterator, cx: &mut ModelContext, - ) -> Result<()> { + ) { let mut buffer_ops = Vec::new(); for op in ops { match op { @@ -765,10 +765,8 @@ impl Context { } } self.buffer - .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx))?; + .update(cx, |buffer, cx| buffer.apply_ops(buffer_ops, cx)); self.flush_ops(cx); - - Ok(()) } fn flush_ops(&mut self, cx: &mut ModelContext) { diff --git a/crates/assistant/src/context/context_tests.rs b/crates/assistant/src/context/context_tests.rs index 842ac05078..2d6a2894c9 100644 --- a/crates/assistant/src/context/context_tests.rs +++ b/crates/assistant/src/context/context_tests.rs @@ -1166,9 +1166,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std ); network.lock().broadcast(replica_id, ops_to_send); - context - .update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops_to_receive, cx)); } else if rng.gen_bool(0.1) && replica_id != 0 { log::info!("Context {}: disconnecting", context_index); network.lock().disconnect_peer(replica_id); @@ -1180,9 +1178,7 @@ async fn test_random_context_collaboration(cx: &mut TestAppContext, mut rng: Std .map(ContextOperation::from_proto) .collect::>>() .unwrap(); - context - .update(cx, |context, cx| context.apply_ops(ops, cx)) - .unwrap(); + context.update(cx, |context, cx| context.apply_ops(ops, cx)); } } } diff --git a/crates/assistant/src/context_store.rs b/crates/assistant/src/context_store.rs index 867d906791..f57a2fbca6 100644 --- a/crates/assistant/src/context_store.rs +++ b/crates/assistant/src/context_store.rs @@ -223,7 +223,7 @@ impl ContextStore { if let Some(context) = this.loaded_context_for_id(&context_id, cx) { let operation_proto = envelope.payload.operation.context("invalid operation")?; let operation = ContextOperation::from_proto(operation_proto)?; - context.update(cx, |context, cx| context.apply_ops([operation], cx))?; + context.update(cx, |context, cx| context.apply_ops([operation], cx)); } Ok(()) })? @@ -394,7 +394,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context @@ -531,7 +531,7 @@ impl ContextStore { .collect::>>() }) .await?; - context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??; + context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?; this.update(&mut cx, |this, cx| { if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) { existing_context diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index df3e66483f..755e7400e1 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -66,7 +66,7 @@ impl ChannelBuffer { let capability = channel_store.read(cx).channel_capability(channel.id); language::Buffer::remote(buffer_id, response.replica_id as u16, capability, base_text) })?; - buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))??; + buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; let subscription = client.subscribe_to_entity(channel.id.0)?; @@ -151,7 +151,7 @@ impl ChannelBuffer { cx.notify(); this.buffer .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) - })??; + })?; Ok(()) } diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 9bd5fd564f..fc5b12cfae 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -1007,7 +1007,7 @@ impl ChannelStore { .into_iter() .map(language::proto::deserialize_operation) .collect::>>()?; - buffer.apply_ops(incoming_operations, cx)?; + buffer.apply_ops(incoming_operations, cx); anyhow::Ok(outgoing_operations) }) .log_err(); diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 7b19dee315..06ad2b4594 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -689,9 +689,7 @@ impl Database { } let mut text_buffer = text::Buffer::new(0, text::BufferId::new(1).unwrap(), base_text); - text_buffer - .apply_ops(operations.into_iter().filter_map(operation_from_wire)) - .unwrap(); + text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire)); let base_text = text_buffer.text(); let epoch = buffer.epoch + 1; diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index 55a8f216c4..adc571580a 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -96,16 +96,14 @@ async fn test_channel_buffers(db: &Arc) { text::BufferId::new(1).unwrap(), buffer_response_b.base_text, ); - buffer_b - .apply_ops(buffer_response_b.operations.into_iter().map(|operation| { - let operation = proto::deserialize_operation(operation).unwrap(); - if let language::Operation::Buffer(operation) = operation { - operation - } else { - unreachable!() - } - })) - .unwrap(); + buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + })); assert_eq!(buffer_b.text(), "hello, cruel world"); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 43fe1565ac..08fc1ccdb4 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1972,7 +1972,7 @@ impl Buffer { &mut self, ops: I, cx: &mut ModelContext, - ) -> Result<()> { + ) { self.pending_autoindent.take(); let was_dirty = self.is_dirty(); let old_version = self.version.clone(); @@ -1991,14 +1991,13 @@ impl Buffer { } }) .collect::>(); - self.text.apply_ops(buffer_ops)?; + self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); // Notify independently of whether the buffer was edited as the operations could include a // selection update. cx.notify(); - Ok(()) } fn flush_deferred_ops(&mut self, cx: &mut ModelContext) { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 50dea8d256..23faa33316 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -308,7 +308,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) { // Incorporating a set of remote ops emits a single edited event, // followed by a dirty changed event. buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap(); + buffer.apply_ops(buffer1_ops.lock().drain(..), cx); }); assert_eq!( mem::take(&mut *buffer_1_events.lock()), @@ -332,7 +332,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) { // Incorporating the remote ops again emits a single edited event, // followed by a dirty changed event. buffer2.update(cx, |buffer, cx| { - buffer.apply_ops(buffer1_ops.lock().drain(..), cx).unwrap(); + buffer.apply_ops(buffer1_ops.lock().drain(..), cx); }); assert_eq!( mem::take(&mut *buffer_1_events.lock()), @@ -2274,13 +2274,11 @@ fn test_serialization(cx: &mut gpui::AppContext) { .block(buffer1.read(cx).serialize_ops(None, cx)); let buffer2 = cx.new_model(|cx| { let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| proto::deserialize_operation(op).unwrap()), + cx, + ); buffer }); assert_eq!(buffer2.read(cx).text(), "abcDF"); @@ -2401,13 +2399,11 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { .block(base_buffer.read(cx).serialize_ops(None, cx)); let mut buffer = Buffer::from_proto(i as ReplicaId, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| proto::deserialize_operation(op).unwrap()), + cx, + ); buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { @@ -2523,14 +2519,12 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { None, ) .unwrap(); - new_buffer - .apply_ops( - old_buffer_ops - .into_iter() - .map(|op| deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + new_buffer.apply_ops( + old_buffer_ops + .into_iter() + .map(|op| deserialize_operation(op).unwrap()), + cx, + ); log::info!( "New replica {} text: {:?}", new_buffer.replica_id(), @@ -2570,7 +2564,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { ops ); new_buffer.update(cx, |new_buffer, cx| { - new_buffer.apply_ops(ops, cx).unwrap(); + new_buffer.apply_ops(ops, cx); }); } } @@ -2598,7 +2592,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { ops.len(), ops ); - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx).unwrap()); + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } } _ => {} diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 9dee092dea..29bd9a8068 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5019,13 +5019,11 @@ mod tests { .background_executor() .block(host_buffer.read(cx).serialize_ops(None, cx)); let mut buffer = Buffer::from_proto(1, Capability::ReadWrite, state, None).unwrap(); - buffer - .apply_ops( - ops.into_iter() - .map(|op| language::proto::deserialize_operation(op).unwrap()), - cx, - ) - .unwrap(); + buffer.apply_ops( + ops.into_iter() + .map(|op| language::proto::deserialize_operation(op).unwrap()), + cx, + ); buffer }); let multibuffer = cx.new_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx)); diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index ead3235997..0045aba2e8 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -644,7 +644,7 @@ impl BufferStore { } hash_map::Entry::Occupied(mut entry) => { if let OpenBuffer::Operations(operations) = entry.get_mut() { - buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?; + buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx)); } else if entry.get().upgrade().is_some() { if is_remote { return Ok(()); @@ -1051,12 +1051,12 @@ impl BufferStore { match this.opened_buffers.entry(buffer_id) { hash_map::Entry::Occupied(mut e) => match e.get_mut() { OpenBuffer::Strong(buffer) => { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops), OpenBuffer::Weak(buffer) => { if let Some(buffer) = buffer.upgrade() { - buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?; + buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx)); } } }, @@ -1217,7 +1217,8 @@ impl BufferStore { .into_iter() .map(language::proto::deserialize_operation) .collect::>>()?; - buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)) + buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx)); + anyhow::Ok(()) }); if let Err(error) = result { diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index 6f748fb588..8c5d7014ee 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -515,25 +515,25 @@ fn test_undo_redo() { let entries = buffer.history.undo_stack.clone(); assert_eq!(entries.len(), 3); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1cdef234"); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1abcdx234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abx234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abcdef234"); - buffer.undo_or_redo(entries[2].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[2].transaction.clone()); assert_eq!(buffer.text(), "1abyzef234"); - buffer.undo_or_redo(entries[0].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[0].transaction.clone()); assert_eq!(buffer.text(), "1yzef234"); - buffer.undo_or_redo(entries[1].transaction.clone()).unwrap(); + buffer.undo_or_redo(entries[1].transaction.clone()); assert_eq!(buffer.text(), "1234"); } @@ -692,12 +692,12 @@ fn test_concurrent_edits() { let buf3_op = buffer3.edit([(5..6, "56")]); assert_eq!(buffer3.text(), "abcde56"); - buffer1.apply_op(buf2_op.clone()).unwrap(); - buffer1.apply_op(buf3_op.clone()).unwrap(); - buffer2.apply_op(buf1_op.clone()).unwrap(); - buffer2.apply_op(buf3_op).unwrap(); - buffer3.apply_op(buf1_op).unwrap(); - buffer3.apply_op(buf2_op).unwrap(); + buffer1.apply_op(buf2_op.clone()); + buffer1.apply_op(buf3_op.clone()); + buffer2.apply_op(buf1_op.clone()); + buffer2.apply_op(buf3_op); + buffer3.apply_op(buf1_op); + buffer3.apply_op(buf2_op); assert_eq!(buffer1.text(), "a12c34e56"); assert_eq!(buffer2.text(), "a12c34e56"); @@ -756,7 +756,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { replica_id, ops.len() ); - buffer.apply_ops(ops).unwrap(); + buffer.apply_ops(ops); } } _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 9630ec5b80..8d2cd97aac 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -38,7 +38,6 @@ pub use subscription::*; pub use sum_tree::Bias; use sum_tree::{FilterCursor, SumTree, TreeMap}; use undo_map::UndoMap; -use util::ResultExt; #[cfg(any(test, feature = "test-support"))] use util::RandomCharIter; @@ -927,23 +926,22 @@ impl Buffer { self.snapshot.line_ending = line_ending; } - pub fn apply_ops>(&mut self, ops: I) -> Result<()> { + pub fn apply_ops>(&mut self, ops: I) { let mut deferred_ops = Vec::new(); for op in ops { self.history.push(op.clone()); if self.can_apply_op(&op) { - self.apply_op(op)?; + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - self.flush_deferred_ops()?; - Ok(()) + self.flush_deferred_ops(); } - fn apply_op(&mut self, op: Operation) -> Result<()> { + fn apply_op(&mut self, op: Operation) { match op { Operation::Edit(edit) => { if !self.version.observed(edit.timestamp) { @@ -960,7 +958,7 @@ impl Buffer { } Operation::Undo(undo) => { if !self.version.observed(undo.timestamp) { - self.apply_undo(&undo)?; + self.apply_undo(&undo); self.snapshot.version.observe(undo.timestamp); self.lamport_clock.observe(undo.timestamp); } @@ -974,7 +972,6 @@ impl Buffer { true } }); - Ok(()) } fn apply_remote_edit( @@ -1217,7 +1214,7 @@ impl Buffer { fragment_ids } - fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> { + fn apply_undo(&mut self, undo: &UndoOperation) { self.snapshot.undo_map.insert(undo); let mut edits = Patch::default(); @@ -1268,22 +1265,20 @@ impl Buffer { self.snapshot.visible_text = visible_text; self.snapshot.deleted_text = deleted_text; self.subscriptions.publish_mut(&edits); - Ok(()) } - fn flush_deferred_ops(&mut self) -> Result<()> { + fn flush_deferred_ops(&mut self) { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { - self.apply_op(op)?; + self.apply_op(op); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - Ok(()) } fn can_apply_op(&self, op: &Operation) -> bool { @@ -1352,7 +1347,7 @@ impl Buffer { if let Some(entry) = self.history.pop_undo() { let transaction = entry.transaction.clone(); let transaction_id = transaction.id; - let op = self.undo_or_redo(transaction).unwrap(); + let op = self.undo_or_redo(transaction); Some((transaction_id, op)) } else { None @@ -1365,7 +1360,7 @@ impl Buffer { .remove_from_undo(transaction_id)? .transaction .clone(); - self.undo_or_redo(transaction).log_err() + Some(self.undo_or_redo(transaction)) } pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec { @@ -1378,7 +1373,7 @@ impl Buffer { transactions .into_iter() - .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .map(|transaction| self.undo_or_redo(transaction)) .collect() } @@ -1394,7 +1389,7 @@ impl Buffer { if let Some(entry) = self.history.pop_redo() { let transaction = entry.transaction.clone(); let transaction_id = transaction.id; - let op = self.undo_or_redo(transaction).unwrap(); + let op = self.undo_or_redo(transaction); Some((transaction_id, op)) } else { None @@ -1411,11 +1406,11 @@ impl Buffer { transactions .into_iter() - .map(|transaction| self.undo_or_redo(transaction).unwrap()) + .map(|transaction| self.undo_or_redo(transaction)) .collect() } - fn undo_or_redo(&mut self, transaction: Transaction) -> Result { + fn undo_or_redo(&mut self, transaction: Transaction) -> Operation { let mut counts = HashMap::default(); for edit_id in transaction.edit_ids { counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1); @@ -1426,11 +1421,11 @@ impl Buffer { version: self.version(), counts, }; - self.apply_undo(&undo)?; + self.apply_undo(&undo); self.snapshot.version.observe(undo.timestamp); let operation = Operation::Undo(undo); self.history.push(operation.clone()); - Ok(operation) + operation } pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) { @@ -1762,7 +1757,7 @@ impl Buffer { self.replica_id, transaction ); - ops.push(self.undo_or_redo(transaction).unwrap()); + ops.push(self.undo_or_redo(transaction)); } } ops From 27c1106fadef97dc56d17d9359fd7514b71c8643 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 13:26:14 -0400 Subject: [PATCH 42/96] Fix bug where copying from assistant panel appends extra newline to clipboard (#18090) Closes https://github.com/zed-industries/zed/issues/17661 Release Notes: - Fixed a bug where copying from the assistant panel appended an additional newline to the end of the clipboard contents. --- crates/assistant/src/assistant_panel.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index da176ebeee..364c6f9663 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -3533,7 +3533,9 @@ impl ContextEditor { for chunk in context.buffer().read(cx).text_for_range(range) { text.push_str(chunk); } - text.push('\n'); + if message.offset_range.end < selection.range().end { + text.push('\n'); + } } } } From 00b1c81c9f8f209667140036da4c9ac578031546 Mon Sep 17 00:00:00 2001 From: David Soria Parra <167242713+dsp-ant@users.noreply.github.com> Date: Thu, 19 Sep 2024 20:51:48 +0100 Subject: [PATCH 43/96] context_servers: Remove context_type from ResourceContent (#18097) This is removed in the protocol Release Notes: - N/A --- crates/context_servers/src/types.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/context_servers/src/types.rs b/crates/context_servers/src/types.rs index c0e9a79f15..cd95ecd7ad 100644 --- a/crates/context_servers/src/types.rs +++ b/crates/context_servers/src/types.rs @@ -239,7 +239,6 @@ pub struct Resource { pub struct ResourceContent { pub uri: Url, pub mime_type: Option, - pub content_type: String, pub text: Option, pub data: Option, } From fbbf0393cbe9b2094bbdd496a5d5d15419eeaeb3 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 14:04:46 -0600 Subject: [PATCH 44/96] ssh-remoting: Fix go to definition out of worktree (#18094) Release Notes: - ssh-remoting: Fixed go to definition outside of worktree --------- Co-authored-by: Mikayla --- crates/client/src/client.rs | 4 + crates/project/src/lsp_store.rs | 21 ++- crates/project/src/project.rs | 181 ++++++++----------- crates/project/src/worktree_store.rs | 173 ++++++++++++++---- crates/remote/src/ssh_session.rs | 7 +- crates/remote_server/src/headless_project.rs | 17 +- crates/rpc/src/proto_client.rs | 6 + 7 files changed, 251 insertions(+), 158 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 09286300d9..a8387f7c5a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1621,6 +1621,10 @@ impl ProtoClient for Client { fn message_handler_set(&self) -> &parking_lot::Mutex { &self.handler_set } + + fn goes_via_collab(&self) -> bool { + true + } } #[derive(Serialize, Deserialize)] diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 58d9ba8926..5c32c9030d 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -534,6 +534,9 @@ impl LspStore { } WorktreeStoreEvent::WorktreeRemoved(_, id) => self.remove_worktree(*id, cx), WorktreeStoreEvent::WorktreeOrderChanged => {} + WorktreeStoreEvent::WorktreeUpdateSent(worktree) => { + worktree.update(cx, |worktree, _cx| self.send_diagnostic_summaries(worktree)); + } } } @@ -764,24 +767,22 @@ impl LspStore { self.active_entry = active_entry; } - pub(crate) fn send_diagnostic_summaries( - &self, - worktree: &mut Worktree, - ) -> Result<(), anyhow::Error> { + pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { if let Some(client) = self.downstream_client.clone() { if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { for (path, summaries) in summaries { for (&server_id, summary) in summaries { - client.send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, - worktree_id: worktree.id().to_proto(), - summary: Some(summary.to_proto(server_id, path)), - })?; + client + .send(proto::UpdateDiagnosticSummary { + project_id: self.project_id, + worktree_id: worktree.id().to_proto(), + summary: Some(summary.to_proto(server_id, path)), + }) + .log_err(); } } } } - Ok(()) } pub fn request_lsp( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index f4816cf0cd..fcf10d11c2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -31,7 +31,7 @@ pub use environment::ProjectEnvironment; use futures::{ channel::mpsc::{self, UnboundedReceiver}, future::try_join_all, - AsyncWriteExt, FutureExt, StreamExt, + AsyncWriteExt, StreamExt, }; use git::{blame::Blame, repository::GitRepository}; @@ -152,7 +152,7 @@ pub struct Project { _subscriptions: Vec, buffers_needing_diff: HashSet>, git_diff_debouncer: DebouncedDelay, - remotely_created_buffers: Arc>, + remotely_created_models: Arc>, terminals: Terminals, node: Option>, tasks: Model, @@ -169,26 +169,28 @@ pub struct Project { } #[derive(Default)] -struct RemotelyCreatedBuffers { +struct RemotelyCreatedModels { + worktrees: Vec>, buffers: Vec>, retain_count: usize, } -struct RemotelyCreatedBufferGuard { - remote_buffers: std::sync::Weak>, +struct RemotelyCreatedModelGuard { + remote_models: std::sync::Weak>, } -impl Drop for RemotelyCreatedBufferGuard { +impl Drop for RemotelyCreatedModelGuard { fn drop(&mut self) { - if let Some(remote_buffers) = self.remote_buffers.upgrade() { - let mut remote_buffers = remote_buffers.lock(); + if let Some(remote_models) = self.remote_models.upgrade() { + let mut remote_models = remote_models.lock(); assert!( - remote_buffers.retain_count > 0, - "RemotelyCreatedBufferGuard dropped too many times" + remote_models.retain_count > 0, + "RemotelyCreatedModelGuard dropped too many times" ); - remote_buffers.retain_count -= 1; - if remote_buffers.retain_count == 0 { - remote_buffers.buffers.clear(); + remote_models.retain_count -= 1; + if remote_models.retain_count == 0 { + remote_models.buffers.clear(); + remote_models.worktrees.clear(); } } } @@ -620,7 +622,7 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| WorktreeStore::new(false, fs.clone())); + let worktree_store = cx.new_model(|_| WorktreeStore::new(None, false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -687,7 +689,7 @@ impl Project { dev_server_project_id: None, search_history: Self::new_search_history(), environment, - remotely_created_buffers: Default::default(), + remotely_created_models: Default::default(), last_formatting_failure: None, buffers_being_formatted: Default::default(), search_included_history: Self::new_search_history(), @@ -714,11 +716,8 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| { - let mut worktree_store = WorktreeStore::new(false, fs.clone()); - worktree_store.set_upstream_client(ssh.clone().into()); - worktree_store - }); + let worktree_store = + cx.new_model(|_| WorktreeStore::new(Some(ssh.clone().into()), false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -773,7 +772,7 @@ impl Project { dev_server_project_id: None, search_history: Self::new_search_history(), environment, - remotely_created_buffers: Default::default(), + remotely_created_models: Default::default(), last_formatting_failure: None, buffers_being_formatted: Default::default(), search_included_history: Self::new_search_history(), @@ -787,8 +786,9 @@ impl Project { ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.worktree_store); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.lsp_store); ssh.subscribe_to_entity(SSH_PROJECT_ID, &this.settings_observer); - client.add_model_message_handler(Self::handle_update_worktree); client.add_model_message_handler(Self::handle_create_buffer_for_peer); + client.add_model_message_handler(Self::handle_update_worktree); + client.add_model_message_handler(Self::handle_update_project); client.add_model_request_handler(BufferStore::handle_update_buffer); BufferStore::init(&client); LspStore::init(&client); @@ -867,8 +867,7 @@ impl Project { let role = response.payload.role(); let worktree_store = cx.new_model(|_| { - let mut store = WorktreeStore::new(true, fs.clone()); - store.set_upstream_client(client.clone().into()); + let mut store = WorktreeStore::new(Some(client.clone().into()), true, fs.clone()); if let Some(dev_server_project_id) = response.payload.dev_server_project_id { store.set_dev_server_project_id(DevServerProjectId(dev_server_project_id)); } @@ -955,7 +954,7 @@ impl Project { search_included_history: Self::new_search_history(), search_excluded_history: Self::new_search_history(), environment: ProjectEnvironment::new(&worktree_store, None, cx), - remotely_created_buffers: Arc::new(Mutex::new(RemotelyCreatedBuffers::default())), + remotely_created_models: Arc::new(Mutex::new(RemotelyCreatedModels::default())), last_formatting_failure: None, buffers_being_formatted: Default::default(), }; @@ -1259,43 +1258,6 @@ impl Project { } } - fn metadata_changed(&mut self, cx: &mut ModelContext) { - cx.notify(); - - let ProjectClientState::Shared { remote_id } = self.client_state else { - return; - }; - let project_id = remote_id; - - let update_project = self.client.request(proto::UpdateProject { - project_id, - worktrees: self.worktree_metadata_protos(cx), - }); - cx.spawn(|this, mut cx| async move { - update_project.await?; - this.update(&mut cx, |this, cx| { - let client = this.client.clone(); - let worktrees = this.worktree_store.read(cx).worktrees().collect::>(); - - for worktree in worktrees { - worktree.update(cx, |worktree, cx| { - let client = client.clone(); - worktree.observe_updates(project_id, cx, { - move |update| client.request(update).map(|result| result.is_ok()) - }); - - this.lsp_store.update(cx, |lsp_store, _| { - lsp_store.send_diagnostic_summaries(worktree) - }) - })?; - } - - anyhow::Ok(()) - }) - }) - .detach_and_log_err(cx); - } - pub fn task_inventory(&self) -> &Model { &self.tasks } @@ -1513,7 +1475,7 @@ impl Project { buffer_store.shared(project_id, self.client.clone().into(), cx) }); self.worktree_store.update(cx, |worktree_store, cx| { - worktree_store.set_shared(true, cx); + worktree_store.shared(project_id, self.client.clone().into(), cx); }); self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.shared(project_id, self.client.clone().into(), cx) @@ -1526,7 +1488,6 @@ impl Project { remote_id: project_id, }; - self.metadata_changed(cx); cx.emit(Event::RemoteIdChanged(Some(project_id))); cx.notify(); Ok(()) @@ -1540,7 +1501,11 @@ impl Project { self.buffer_store .update(cx, |buffer_store, _| buffer_store.forget_shared_buffers()); self.set_collaborators_from_proto(message.collaborators, cx)?; - self.metadata_changed(cx); + + self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.send_project_updates(cx); + }); + cx.notify(); cx.emit(Event::Reshared); Ok(()) } @@ -1576,7 +1541,6 @@ impl Project { pub fn unshare(&mut self, cx: &mut ModelContext) -> Result<()> { self.unshare_internal(cx)?; - self.metadata_changed(cx); cx.notify(); Ok(()) } @@ -1598,7 +1562,7 @@ impl Project { self.collaborators.clear(); self.client_subscriptions.clear(); self.worktree_store.update(cx, |store, cx| { - store.set_shared(false, cx); + store.unshared(cx); }); self.buffer_store.update(cx, |buffer_store, cx| { buffer_store.forget_shared_buffers(); @@ -1867,9 +1831,9 @@ impl Project { cx: &mut ModelContext, ) -> Result<()> { { - let mut remotely_created_buffers = self.remotely_created_buffers.lock(); - if remotely_created_buffers.retain_count > 0 { - remotely_created_buffers.buffers.push(buffer.clone()) + let mut remotely_created_models = self.remotely_created_models.lock(); + if remotely_created_models.retain_count > 0 { + remotely_created_models.buffers.push(buffer.clone()) } } @@ -2110,10 +2074,17 @@ impl Project { cx.emit(Event::WorktreeRemoved(*id)); } WorktreeStoreEvent::WorktreeOrderChanged => cx.emit(Event::WorktreeOrderChanged), + WorktreeStoreEvent::WorktreeUpdateSent(_) => {} } } fn on_worktree_added(&mut self, worktree: &Model, cx: &mut ModelContext) { + { + let mut remotely_created_models = self.remotely_created_models.lock(); + if remotely_created_models.retain_count > 0 { + remotely_created_models.worktrees.push(worktree.clone()) + } + } cx.observe(worktree, |_, _, cx| cx.notify()).detach(); cx.subscribe(worktree, |this, worktree, event, cx| { let is_local = worktree.read(cx).is_local(); @@ -2140,7 +2111,7 @@ impl Project { } }) .detach(); - self.metadata_changed(cx); + cx.notify(); } fn on_worktree_removed(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext) { @@ -2171,7 +2142,7 @@ impl Project { inventory.remove_worktree_sources(id_to_remove); }); - self.metadata_changed(cx); + cx.notify(); } fn on_buffer_event( @@ -3012,7 +2983,7 @@ impl Project { #[inline(never)] fn definition_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3025,7 +2996,7 @@ impl Project { ) } pub fn definition( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3035,7 +3006,7 @@ impl Project { } fn declaration_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3049,7 +3020,7 @@ impl Project { } pub fn declaration( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3059,7 +3030,7 @@ impl Project { } fn type_definition_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3073,7 +3044,7 @@ impl Project { } pub fn type_definition( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3083,7 +3054,7 @@ impl Project { } pub fn implementation( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3098,7 +3069,7 @@ impl Project { } pub fn references( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3113,7 +3084,7 @@ impl Project { } fn document_highlights_impl( - &self, + &mut self, buffer: &Model, position: PointUtf16, cx: &mut ModelContext, @@ -3127,7 +3098,7 @@ impl Project { } pub fn document_highlights( - &self, + &mut self, buffer: &Model, position: T, cx: &mut ModelContext, @@ -3514,7 +3485,7 @@ impl Project { query: Some(query.to_proto()), limit: limit as _, }); - let guard = self.retain_remotely_created_buffers(cx); + let guard = self.retain_remotely_created_models(cx); cx.spawn(move |this, mut cx| async move { let response = request.await?; @@ -3536,7 +3507,7 @@ impl Project { } pub fn request_lsp( - &self, + &mut self, buffer_handle: Model, server: LanguageServerToQuery, request: R, @@ -3546,8 +3517,14 @@ impl Project { ::Result: Send, ::Params: Send, { - self.lsp_store.update(cx, |lsp_store, cx| { + let guard = self.retain_remotely_created_models(cx); + let task = self.lsp_store.update(cx, |lsp_store, cx| { lsp_store.request_lsp(buffer_handle, server, request, cx) + }); + cx.spawn(|_, _| async move { + let result = task.await; + drop(guard); + result }) } @@ -4095,6 +4072,7 @@ impl Project { })? } + // Collab sends UpdateWorktree protos as messages async fn handle_update_worktree( this: Model, envelope: TypedEnvelope, @@ -4130,19 +4108,21 @@ impl Project { BufferStore::handle_update_buffer(buffer_store, envelope, cx).await } - fn retain_remotely_created_buffers( + fn retain_remotely_created_models( &mut self, cx: &mut ModelContext, - ) -> RemotelyCreatedBufferGuard { + ) -> RemotelyCreatedModelGuard { { - let mut remotely_created_buffers = self.remotely_created_buffers.lock(); - if remotely_created_buffers.retain_count == 0 { - remotely_created_buffers.buffers = self.buffer_store.read(cx).buffers().collect(); + let mut remotely_create_models = self.remotely_created_models.lock(); + if remotely_create_models.retain_count == 0 { + remotely_create_models.buffers = self.buffer_store.read(cx).buffers().collect(); + remotely_create_models.worktrees = + self.worktree_store.read(cx).worktrees().collect(); } - remotely_created_buffers.retain_count += 1; + remotely_create_models.retain_count += 1; } - RemotelyCreatedBufferGuard { - remote_buffers: Arc::downgrade(&self.remotely_created_buffers), + RemotelyCreatedModelGuard { + remote_models: Arc::downgrade(&self.remotely_created_models), } } @@ -4637,16 +4617,11 @@ impl Project { worktrees: Vec, cx: &mut ModelContext, ) -> Result<()> { - self.metadata_changed(cx); - self.worktree_store.update(cx, |worktree_store, cx| { - worktree_store.set_worktrees_from_proto( - worktrees, - self.replica_id(), - self.remote_id().ok_or_else(|| anyhow!("invalid project"))?, - self.client.clone().into(), - cx, - ) - }) + cx.notify(); + let result = self.worktree_store.update(cx, |worktree_store, cx| { + worktree_store.set_worktrees_from_proto(worktrees, self.replica_id(), cx) + }); + result } fn set_collaborators_from_proto( diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 07764d4a05..7fae8b9e1d 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -39,8 +39,10 @@ struct MatchingEntry { pub struct WorktreeStore { next_entry_id: Arc, upstream_client: Option, + downstream_client: Option, + remote_id: u64, dev_server_project_id: Option, - is_shared: bool, + retain_worktrees: bool, worktrees: Vec, worktrees_reordered: bool, #[allow(clippy::type_complexity)] @@ -53,6 +55,7 @@ pub enum WorktreeStoreEvent { WorktreeAdded(Model), WorktreeRemoved(EntityId, WorktreeId), WorktreeOrderChanged, + WorktreeUpdateSent(Model), } impl EventEmitter for WorktreeStore {} @@ -66,23 +69,25 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_expand_project_entry); } - pub fn new(retain_worktrees: bool, fs: Arc) -> Self { + pub fn new( + upstream_client: Option, + retain_worktrees: bool, + fs: Arc, + ) -> Self { Self { next_entry_id: Default::default(), loading_worktrees: Default::default(), - upstream_client: None, dev_server_project_id: None, - is_shared: retain_worktrees, + downstream_client: None, worktrees: Vec::new(), worktrees_reordered: false, + retain_worktrees, + remote_id: 0, + upstream_client, fs, } } - pub fn set_upstream_client(&mut self, client: AnyProtoClient) { - self.upstream_client = Some(client); - } - pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) { self.dev_server_project_id = Some(id); } @@ -201,6 +206,13 @@ impl WorktreeStore { path: abs_path.clone(), }) .await?; + + if let Some(existing_worktree) = this.read_with(&cx, |this, cx| { + this.worktree_for_id(WorktreeId::from_proto(response.worktree_id), cx) + })? { + return Ok(existing_worktree); + } + let worktree = cx.update(|cx| { Worktree::remote( 0, @@ -302,7 +314,10 @@ impl WorktreeStore { } pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { - let push_strong_handle = self.is_shared || worktree.read(cx).is_visible(); + let worktree_id = worktree.read(cx).id(); + debug_assert!(!self.worktrees().any(|w| w.read(cx).id() == worktree_id)); + + let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible(); let handle = if push_strong_handle { WorktreeHandle::Strong(worktree.clone()) } else { @@ -322,13 +337,15 @@ impl WorktreeStore { } cx.emit(WorktreeStoreEvent::WorktreeAdded(worktree.clone())); + self.send_project_updates(cx); let handle_id = worktree.entity_id(); - cx.observe_release(worktree, move |_, worktree, cx| { + cx.observe_release(worktree, move |this, worktree, cx| { cx.emit(WorktreeStoreEvent::WorktreeRemoved( handle_id, worktree.id(), )); + this.send_project_updates(cx); }) .detach(); } @@ -349,6 +366,7 @@ impl WorktreeStore { false } }); + self.send_project_updates(cx); } pub fn set_worktrees_reordered(&mut self, worktrees_reordered: bool) { @@ -359,8 +377,6 @@ impl WorktreeStore { &mut self, worktrees: Vec, replica_id: ReplicaId, - remote_id: u64, - client: AnyProtoClient, cx: &mut ModelContext, ) -> Result<()> { let mut old_worktrees_by_id = self @@ -372,18 +388,31 @@ impl WorktreeStore { }) .collect::>(); + let client = self + .upstream_client + .clone() + .ok_or_else(|| anyhow!("invalid project"))?; + for worktree in worktrees { if let Some(old_worktree) = old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id)) { - self.worktrees.push(WorktreeHandle::Strong(old_worktree)); + let push_strong_handle = + self.retain_worktrees || old_worktree.read(cx).is_visible(); + let handle = if push_strong_handle { + WorktreeHandle::Strong(old_worktree.clone()) + } else { + WorktreeHandle::Weak(old_worktree.downgrade()) + }; + self.worktrees.push(handle); } else { self.add( - &Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx), + &Worktree::remote(self.remote_id, replica_id, worktree, client.clone(), cx), cx, ); } } + self.send_project_updates(cx); Ok(()) } @@ -446,33 +475,109 @@ impl WorktreeStore { } } - pub fn set_shared(&mut self, is_shared: bool, cx: &mut ModelContext) { - self.is_shared = is_shared; + pub fn send_project_updates(&mut self, cx: &mut ModelContext) { + let Some(downstream_client) = self.downstream_client.clone() else { + return; + }; + let project_id = self.remote_id; + + let update = proto::UpdateProject { + project_id, + worktrees: self.worktree_metadata_protos(cx), + }; + + // collab has bad concurrency guarantees, so we send requests in serial. + let update_project = if downstream_client.goes_via_collab() { + Some(downstream_client.request(update)) + } else { + downstream_client.send(update).log_err(); + None + }; + cx.spawn(|this, mut cx| async move { + if let Some(update_project) = update_project { + update_project.await?; + } + + this.update(&mut cx, |this, cx| { + let worktrees = this.worktrees().collect::>(); + + for worktree in worktrees { + worktree.update(cx, |worktree, cx| { + let client = downstream_client.clone(); + worktree.observe_updates(project_id, cx, { + move |update| { + let client = client.clone(); + async move { + if client.goes_via_collab() { + client.request(update).map(|result| result.is_ok()).await + } else { + client.send(update).is_ok() + } + } + } + }); + }); + + cx.emit(WorktreeStoreEvent::WorktreeUpdateSent(worktree.clone())) + } + + anyhow::Ok(()) + }) + }) + .detach_and_log_err(cx); + } + + pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec { + self.worktrees() + .map(|worktree| { + let worktree = worktree.read(cx); + proto::WorktreeMetadata { + id: worktree.id().to_proto(), + root_name: worktree.root_name().into(), + visible: worktree.is_visible(), + abs_path: worktree.abs_path().to_string_lossy().into(), + } + }) + .collect() + } + + pub fn shared( + &mut self, + remote_id: u64, + downsteam_client: AnyProtoClient, + cx: &mut ModelContext, + ) { + self.retain_worktrees = true; + self.remote_id = remote_id; + self.downstream_client = Some(downsteam_client); // When shared, retain all worktrees - if is_shared { - for worktree_handle in self.worktrees.iter_mut() { - match worktree_handle { - WorktreeHandle::Strong(_) => {} - WorktreeHandle::Weak(worktree) => { - if let Some(worktree) = worktree.upgrade() { - *worktree_handle = WorktreeHandle::Strong(worktree); - } + for worktree_handle in self.worktrees.iter_mut() { + match worktree_handle { + WorktreeHandle::Strong(_) => {} + WorktreeHandle::Weak(worktree) => { + if let Some(worktree) = worktree.upgrade() { + *worktree_handle = WorktreeHandle::Strong(worktree); } } } } + self.send_project_updates(cx); + } + + pub fn unshared(&mut self, cx: &mut ModelContext) { + self.retain_worktrees = false; + self.downstream_client.take(); + // When not shared, only retain the visible worktrees - else { - for worktree_handle in self.worktrees.iter_mut() { - if let WorktreeHandle::Strong(worktree) = worktree_handle { - let is_visible = worktree.update(cx, |worktree, _| { - worktree.stop_observing_updates(); - worktree.is_visible() - }); - if !is_visible { - *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); - } + for worktree_handle in self.worktrees.iter_mut() { + if let WorktreeHandle::Strong(worktree) = worktree_handle { + let is_visible = worktree.update(cx, |worktree, _| { + worktree.stop_observing_updates(); + worktree.is_visible() + }); + if !is_visible { + *worktree_handle = WorktreeHandle::Weak(worktree.downgrade()); } } } diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 4aab731e64..10608b74f3 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -247,7 +247,8 @@ impl SshSession { let line_ix = start_ix + ix; let content = &stderr_buffer[start_ix..line_ix]; start_ix = line_ix + 1; - if let Ok(record) = serde_json::from_slice::(content) { + if let Ok(mut record) = serde_json::from_slice::(content) { + record.message = format!("(remote) {}", record.message); record.log(log::logger()) } else { eprintln!("(remote) {}", String::from_utf8_lossy(content)); @@ -469,6 +470,10 @@ impl ProtoClient for SshSession { fn message_handler_set(&self) -> &Mutex { &self.state } + + fn goes_via_collab(&self) -> bool { + false + } } impl SshClientState { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index bbd82281d8..54f48e3626 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -44,7 +44,11 @@ impl HeadlessProject { pub fn new(session: Arc, fs: Arc, cx: &mut ModelContext) -> Self { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); - let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone())); + let worktree_store = cx.new_model(|cx| { + let mut store = WorktreeStore::new(None, true, fs.clone()); + store.shared(SSH_PROJECT_ID, session.clone().into(), cx); + store + }); let buffer_store = cx.new_model(|cx| { let mut buffer_store = BufferStore::new(worktree_store.clone(), Some(SSH_PROJECT_ID), cx); @@ -196,18 +200,11 @@ impl HeadlessProject { .await?; this.update(&mut cx, |this, cx| { - let session = this.session.clone(); this.worktree_store.update(cx, |worktree_store, cx| { worktree_store.add(&worktree, cx); }); - worktree.update(cx, |worktree, cx| { - worktree.observe_updates(0, cx, move |update| { - session.send(update).ok(); - futures::future::ready(true) - }); - proto::AddWorktreeResponse { - worktree_id: worktree.id().to_proto(), - } + worktree.update(cx, |worktree, _| proto::AddWorktreeResponse { + worktree_id: worktree.id().to_proto(), }) }) } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 4a990a8433..89ef580cdf 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -27,6 +27,8 @@ pub trait ProtoClient: Send + Sync { fn send_response(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>; fn message_handler_set(&self) -> &parking_lot::Mutex; + + fn goes_via_collab(&self) -> bool; } #[derive(Default)] @@ -139,6 +141,10 @@ impl AnyProtoClient { Self(client) } + pub fn goes_via_collab(&self) -> bool { + self.0.goes_via_collab() + } + pub fn request( &self, request: T, From 28a54ce122fdd5efb2e23cc77a5efab78c07061c Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Thu, 19 Sep 2024 14:16:01 -0600 Subject: [PATCH 45/96] Add diagnostic information to context of inline assistant (#18096) Release Notes: - Added Diagnostic information to inline assistant. This enables users to just say "Fix this" and have the model know what the errors are. --- assets/prompts/content_prompt.hbs | 11 +++++++++++ crates/assistant/src/prompts.rs | 23 +++++++++++++++++++++-- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/assets/prompts/content_prompt.hbs b/assets/prompts/content_prompt.hbs index cf4141349b..e944e230f5 100644 --- a/assets/prompts/content_prompt.hbs +++ b/assets/prompts/content_prompt.hbs @@ -47,6 +47,17 @@ And here's the section to rewrite based on that prompt again for reference: {{{rewrite_section}}} + +{{#if diagnostic_errors}} +{{#each diagnostic_errors}} + + {{line_number}} + {{error_message}} + {{code_content}} + +{{/each}} +{{/if}} + {{/if}} Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved. diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index ae2ab4787e..7d99a70d14 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -4,13 +4,20 @@ use fs::Fs; use futures::StreamExt; use gpui::AssetSource; use handlebars::{Handlebars, RenderError}; -use language::{BufferSnapshot, LanguageName}; +use language::{BufferSnapshot, LanguageName, Point}; use parking_lot::Mutex; use serde::Serialize; use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration}; use text::LineEnding; use util::ResultExt; +#[derive(Serialize)] +pub struct ContentPromptDiagnosticContext { + pub line_number: usize, + pub error_message: String, + pub code_content: String, +} + #[derive(Serialize)] pub struct ContentPromptContext { pub content_type: String, @@ -20,6 +27,7 @@ pub struct ContentPromptContext { pub document_content: String, pub user_prompt: String, pub rewrite_section: Option, + pub diagnostic_errors: Vec, } #[derive(Serialize)] @@ -261,6 +269,17 @@ impl PromptBuilder { } else { None }; + let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false); + let diagnostic_errors: Vec = diagnostics + .map(|entry| { + let start = entry.range.start; + ContentPromptDiagnosticContext { + line_number: (start.row + 1) as usize, + error_message: entry.diagnostic.message.clone(), + code_content: buffer.text_for_range(entry.range.clone()).collect(), + } + }) + .collect(); let context = ContentPromptContext { content_type: content_type.to_string(), @@ -270,8 +289,8 @@ impl PromptBuilder { document_content, user_prompt, rewrite_section, + diagnostic_errors, }; - self.handlebars.lock().render("content_prompt", &context) } From 82e6b1e0e5fe4f2a04fba2fb6f3e7d1aae0974a1 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Thu, 19 Sep 2024 17:22:11 -0400 Subject: [PATCH 46/96] docs: Update glibc requirements for current binaries (#18101) --- docs/src/linux.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index 812a3707d0..17334c325c 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -16,7 +16,7 @@ The Zed installed by the script works best on systems that: - have a Vulkan compatible GPU available (for example Linux on an M-series macBook) - have a system-wide glibc (NixOS and Alpine do not by default) - - x86_64 (Intel/AMD): glibc version >= 2.29 (Ubuntu 20 and newer; Amazon Linux >2023) + - x86_64 (Intel/AMD): glibc version >= 2.35 (Ubuntu 22 and newer) - aarch64 (ARM): glibc version >= 2.35 (Ubuntu 22 and newer) Both Nix and Alpine have third-party Zed packages available (though they are currently a few weeks out of date). If you'd like to use our builds they do work if you install a glibc compatibility layer. On NixOS you can try [nix-ld](https://github.com/Mic92/nix-ld), and on Alpine [gcompat](https://wiki.alpinelinux.org/wiki/Running_glibc_programs). @@ -24,7 +24,7 @@ Both Nix and Alpine have third-party Zed packages available (though they are cur You will need to build from source for: - architectures other than 64-bit Intel or 64-bit ARM (for example a 32-bit or RISC-V machine) -- Amazon Linux 2 on x86_64 +- Amazon Linux - Rocky Linux 9.3 ## Other ways to install Zed on Linux From edf2c192500194192320ff21e86a2846e5089d48 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 19 Sep 2024 15:28:30 -0600 Subject: [PATCH 47/96] Hide GPU problems from Slack (#18087) Release Notes: - N/A --------- Co-authored-by: Marshall Co-authored-by: Marshall Bowers --- crates/collab/src/api/events.rs | 31 +++++++++++++++++++++++++++---- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 1be8f9c37b..008c76e048 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -18,8 +18,8 @@ use sha2::{Digest, Sha256}; use std::sync::{Arc, OnceLock}; use telemetry_events::{ ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event, - EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent, - SettingEvent, + EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, Panic, + ReplEvent, SettingEvent, }; use uuid::Uuid; @@ -296,10 +296,11 @@ pub async fn post_panic( version = %panic.app_version, os_name = %panic.os_name, os_version = %panic.os_version.clone().unwrap_or_default(), - installation_id = %panic.installation_id.unwrap_or_default(), + installation_id = %panic.installation_id.clone().unwrap_or_default(), description = %panic.payload, backtrace = %panic.backtrace.join("\n"), - "panic report"); + "panic report" + ); let backtrace = if panic.backtrace.len() > 25 { let total = panic.backtrace.len(); @@ -317,6 +318,11 @@ pub async fn post_panic( } else { panic.backtrace.join("\n") }; + + if !report_to_slack(&panic) { + return Ok(()); + } + let backtrace_with_summary = panic.payload + "\n" + &backtrace; if let Some(slack_panics_webhook) = app.config.slack_panics_webhook.clone() { @@ -357,6 +363,23 @@ pub async fn post_panic( Ok(()) } +fn report_to_slack(panic: &Panic) -> bool { + if panic.os_name == "Linux" { + if panic.payload.contains("ERROR_SURFACE_LOST_KHR") { + return false; + } + + if panic + .payload + .contains("GPU has crashed, and no debug information is available") + { + return false; + } + } + + true +} + pub async fn post_events( Extension(app): Extension>, TypedHeader(ZedChecksumHeader(checksum)): TypedHeader, From 740803d745e1fe3b711c3c1a05ce3a2616f123cb Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Thu, 19 Sep 2024 19:43:32 -0400 Subject: [PATCH 48/96] Bump release_notes to v2 endpoint (#18108) Partially addresses https://github.com/zed-industries/zed/issues/17527 SCR-20240919-rcik Release Notes: - Enhanced the `auto update: view release notes locally` feature to display release notes for each patch version associated with the installed minor version. --- crates/auto_update/src/auto_update.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index cfda6d6e58..1fe89cce0f 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -268,7 +268,7 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext Date: Thu, 19 Sep 2024 17:49:22 -0600 Subject: [PATCH 49/96] Fix prompt reloading in dev mode (#18095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I think I nulled out the repo path to test the non dev mode case and then forgot to reenable it 🤦‍♂️ . Release Notes: - N/A --- crates/assistant/src/prompts.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 7d99a70d14..3b9f75bac9 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -90,10 +90,9 @@ impl PromptBuilder { /// and application context. /// * `handlebars` - An `Arc>` for registering and updating templates. fn watch_fs_for_template_overrides( - mut params: PromptLoadingParams, + params: PromptLoadingParams, handlebars: Arc>>, ) { - params.repo_path = None; let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref()); params.cx.background_executor() .spawn(async move { From 15b4130fa551e38841b69f135218ef75cfb010db Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Thu, 19 Sep 2024 17:50:00 -0600 Subject: [PATCH 50/96] Introduce the ability to cycle between alternative inline assists (#18098) Release Notes: - Added a new `assistant.inline_alternatives` setting to configure additional models that will be used to perform inline assists in parallel. --------- Co-authored-by: Nathan Co-authored-by: Roy Co-authored-by: Adam --- assets/keymaps/default-linux.json | 7 + assets/keymaps/default-macos.json | 7 + crates/assistant/src/assistant.rs | 13 + crates/assistant/src/assistant_settings.rs | 13 +- crates/assistant/src/inline_assistant.rs | 702 ++++++++++++++++----- crates/language_model/src/registry.rs | 32 + crates/multi_buffer/src/multi_buffer.rs | 20 + docs/src/assistant/configuration.md | 26 + 8 files changed, 642 insertions(+), 178 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 542f6c2df4..f15c4dfe22 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -520,6 +520,13 @@ "alt-enter": "editor::Newline" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 77fac3254b..a58112b3c0 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -527,6 +527,13 @@ "ctrl-enter": "assistant::InlineAssist" } }, + { + "context": "PromptEditor", + "bindings": { + "ctrl-[": "assistant::CyclePreviousInlineAssist", + "ctrl-]": "assistant::CycleNextInlineAssist" + } + }, { "context": "ProjectSearchBar && !in_replace", "bindings": { diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index d7466878c9..8b9c66ee55 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -69,6 +69,8 @@ actions!( ConfirmCommand, NewContext, ToggleModelSelector, + CycleNextInlineAssist, + CyclePreviousInlineAssist ] ); @@ -359,8 +361,19 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) { let settings = AssistantSettings::get_global(cx); let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone()); let model_id = LanguageModelId::from(settings.default_model.model.clone()); + let inline_alternatives = settings + .inline_alternatives + .iter() + .map(|alternative| { + ( + LanguageModelProviderId::from(alternative.provider.clone()), + LanguageModelId::from(alternative.model.clone()), + ) + }) + .collect::>(); LanguageModelRegistry::global(cx).update(cx, |registry, cx| { registry.select_active_model(&provider_name, &model_id, cx); + registry.select_inline_alternative_models(inline_alternatives, cx); }); } diff --git a/crates/assistant/src/assistant_settings.rs b/crates/assistant/src/assistant_settings.rs index e2c6a8eb24..5aa379bae3 100644 --- a/crates/assistant/src/assistant_settings.rs +++ b/crates/assistant/src/assistant_settings.rs @@ -59,6 +59,7 @@ pub struct AssistantSettings { pub default_width: Pixels, pub default_height: Pixels, pub default_model: LanguageModelSelection, + pub inline_alternatives: Vec, pub using_outdated_settings_version: bool, } @@ -236,6 +237,7 @@ impl AssistantSettingsContent { }) } }), + inline_alternatives: None, }, VersionedAssistantSettingsContent::V2(settings) => settings.clone(), }, @@ -254,6 +256,7 @@ impl AssistantSettingsContent { .id() .to_string(), }), + inline_alternatives: None, }, } } @@ -369,6 +372,7 @@ impl Default for VersionedAssistantSettingsContent { default_width: None, default_height: None, default_model: None, + inline_alternatives: None, }) } } @@ -397,6 +401,8 @@ pub struct AssistantSettingsContentV2 { default_height: Option, /// The default model to use when creating new contexts. default_model: Option, + /// Additional models with which to generate alternatives when performing inline assists. + inline_alternatives: Option>, } #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] @@ -517,10 +523,8 @@ impl Settings for AssistantSettings { &mut settings.default_height, value.default_height.map(Into::into), ); - merge( - &mut settings.default_model, - value.default_model.map(Into::into), - ); + merge(&mut settings.default_model, value.default_model); + merge(&mut settings.inline_alternatives, value.inline_alternatives); // merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference } @@ -574,6 +578,7 @@ mod tests { provider: "test-provider".into(), model: "gpt-99".into(), }), + inline_alternatives: None, enabled: None, button: None, dock: None, diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index c9360213ae..428b33f3bb 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1,7 +1,7 @@ use crate::{ assistant_settings::AssistantSettings, humanize_token_count, prompts::PromptBuilder, - AssistantPanel, AssistantPanelEvent, CharOperation, LineDiff, LineOperation, ModelSelector, - StreamingDiff, + AssistantPanel, AssistantPanelEvent, CharOperation, CycleNextInlineAssist, + CyclePreviousInlineAssist, LineDiff, LineOperation, ModelSelector, StreamingDiff, }; use anyhow::{anyhow, Context as _, Result}; use client::{telemetry::Telemetry, ErrorExt}; @@ -25,13 +25,13 @@ use futures::{ SinkExt, Stream, StreamExt, }; use gpui::{ - anchored, deferred, point, AppContext, ClickEvent, EventEmitter, FocusHandle, FocusableView, - FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, TextStyle, - UpdateGlobal, View, ViewContext, WeakView, WindowContext, + anchored, deferred, point, AnyElement, AppContext, ClickEvent, EventEmitter, FocusHandle, + FocusableView, FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task, + TextStyle, UpdateGlobal, View, ViewContext, WeakView, WindowContext, }; use language::{Buffer, IndentKind, Point, Selection, TransactionId}; use language_model::{ - LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, + LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; @@ -41,7 +41,7 @@ use smol::future::FutureExt; use std::{ cmp, future::{self, Future}, - mem, + iter, mem, ops::{Range, RangeInclusive}, pin::Pin, sync::Arc, @@ -85,7 +85,7 @@ pub struct InlineAssistant { async_watch::Receiver, ), >, - confirmed_assists: HashMap>, + confirmed_assists: HashMap>, prompt_history: VecDeque, prompt_builder: Arc, telemetry: Option>, @@ -157,7 +157,7 @@ impl InlineAssistant { if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = &assist.codegen.read(cx).status { + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { self.finish_assist(assist_id, false, cx) } } @@ -553,7 +553,7 @@ impl InlineAssistant { let assist_range = assist.range.to_offset(&buffer); if assist_range.contains(&selection.start) && assist_range.contains(&selection.end) { - if matches!(assist.codegen.read(cx).status, CodegenStatus::Pending) { + if matches!(assist.codegen.read(cx).status(cx), CodegenStatus::Pending) { self.dismiss_assist(*assist_id, cx); } else { self.finish_assist(*assist_id, false, cx); @@ -671,7 +671,7 @@ impl InlineAssistant { for assist_id in editor_assists.assist_ids.clone() { let assist = &self.assists[&assist_id]; if matches!( - assist.codegen.read(cx).status, + assist.codegen.read(cx).status(cx), CodegenStatus::Error(_) | CodegenStatus::Done ) { let assist_range = assist.range.to_offset(&snapshot); @@ -774,7 +774,9 @@ impl InlineAssistant { if undo { assist.codegen.update(cx, |codegen, cx| codegen.undo(cx)); } else { - self.confirmed_assists.insert(assist_id, assist.codegen); + let confirmed_alternative = assist.codegen.read(cx).active_alternative().clone(); + self.confirmed_assists + .insert(assist_id, confirmed_alternative); } } @@ -978,12 +980,7 @@ impl InlineAssistant { assist .codegen .update(cx, |codegen, cx| { - codegen.start( - assist.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + codegen.start(user_prompt, assistant_panel_context, cx) }) .log_err(); @@ -1008,7 +1005,7 @@ impl InlineAssistant { pub fn assist_status(&self, assist_id: InlineAssistId, cx: &AppContext) -> InlineAssistStatus { if let Some(assist) = self.assists.get(&assist_id) { - match &assist.codegen.read(cx).status { + match assist.codegen.read(cx).status(cx) { CodegenStatus::Idle => InlineAssistStatus::Idle, CodegenStatus::Pending => InlineAssistStatus::Pending, CodegenStatus::Done => InlineAssistStatus::Done, @@ -1037,16 +1034,16 @@ impl InlineAssistant { for assist_id in assist_ids { if let Some(assist) = self.assists.get(assist_id) { let codegen = assist.codegen.read(cx); - let buffer = codegen.buffer.read(cx).read(cx); - foreground_ranges.extend(codegen.last_equal_ranges().iter().cloned()); + let buffer = codegen.buffer(cx).read(cx).read(cx); + foreground_ranges.extend(codegen.last_equal_ranges(cx).iter().cloned()); let pending_range = - codegen.edit_position.unwrap_or(assist.range.start)..assist.range.end; + codegen.edit_position(cx).unwrap_or(assist.range.start)..assist.range.end; if pending_range.end.to_offset(&buffer) > pending_range.start.to_offset(&buffer) { gutter_pending_ranges.push(pending_range); } - if let Some(edit_position) = codegen.edit_position { + if let Some(edit_position) = codegen.edit_position(cx) { let edited_range = assist.range.start..edit_position; if edited_range.end.to_offset(&buffer) > edited_range.start.to_offset(&buffer) { gutter_transformed_ranges.push(edited_range); @@ -1054,7 +1051,8 @@ impl InlineAssistant { } if assist.decorations.is_some() { - inserted_row_ranges.extend(codegen.diff.inserted_row_ranges.iter().cloned()); + inserted_row_ranges + .extend(codegen.diff(cx).inserted_row_ranges.iter().cloned()); } } } @@ -1125,9 +1123,9 @@ impl InlineAssistant { }; let codegen = assist.codegen.read(cx); - let old_snapshot = codegen.snapshot.clone(); - let old_buffer = codegen.old_buffer.clone(); - let deleted_row_ranges = codegen.diff.deleted_row_ranges.clone(); + let old_snapshot = codegen.snapshot(cx); + let old_buffer = codegen.old_buffer(cx); + let deleted_row_ranges = codegen.diff(cx).deleted_row_ranges.clone(); editor.update(cx, |editor, cx| { let old_blocks = mem::take(&mut decorations.removed_line_block_ids); @@ -1406,8 +1404,15 @@ impl EventEmitter for PromptEditor {} impl Render for PromptEditor { fn render(&mut self, cx: &mut ViewContext) -> impl IntoElement { let gutter_dimensions = *self.gutter_dimensions.lock(); - let status = &self.codegen.read(cx).status; - let buttons = match status { + let codegen = self.codegen.read(cx); + + let mut buttons = Vec::new(); + if codegen.alternative_count(cx) > 1 { + buttons.push(self.render_cycle_controls(cx)); + } + + let status = codegen.status(cx); + buttons.extend(match status { CodegenStatus::Idle => { vec![ IconButton::new("cancel", IconName::Close) @@ -1416,14 +1421,16 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("start", IconName::SparkleAlt) .icon_color(Color::Muted) .shape(IconButtonShape::Square) .tooltip(|cx| Tooltip::for_action("Transform", &menu::Confirm, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StartRequested)), - ), + ) + .into_any_element(), ] } CodegenStatus::Pending => { @@ -1434,7 +1441,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::text("Cancel Assist", cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), IconButton::new("stop", IconName::Stop) .icon_color(Color::Error) .shape(IconButtonShape::Square) @@ -1446,9 +1454,8 @@ impl Render for PromptEditor { cx, ) }) - .on_click( - cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested)), - ), + .on_click(cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::StopRequested))) + .into_any_element(), ] } CodegenStatus::Error(_) | CodegenStatus::Done => { @@ -1459,7 +1466,8 @@ impl Render for PromptEditor { .tooltip(|cx| Tooltip::for_action("Cancel Assist", &menu::Cancel, cx)) .on_click( cx.listener(|_, _, cx| cx.emit(PromptEditorEvent::CancelRequested)), - ), + ) + .into_any_element(), if self.edited_since_done || matches!(status, CodegenStatus::Error(_)) { IconButton::new("restart", IconName::RotateCw) .icon_color(Color::Info) @@ -1475,6 +1483,7 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::StartRequested); })) + .into_any_element() } else { IconButton::new("confirm", IconName::Check) .icon_color(Color::Info) @@ -1483,12 +1492,14 @@ impl Render for PromptEditor { .on_click(cx.listener(|_, _, cx| { cx.emit(PromptEditorEvent::ConfirmRequested); })) + .into_any_element() }, ] } - }; + }); h_flex() + .key_context("PromptEditor") .bg(cx.theme().colors().editor_background) .border_y_1() .border_color(cx.theme().status().info_border) @@ -1498,6 +1509,8 @@ impl Render for PromptEditor { .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::move_up)) .on_action(cx.listener(Self::move_down)) + .capture_action(cx.listener(Self::cycle_prev)) + .capture_action(cx.listener(Self::cycle_next)) .child( h_flex() .w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0)) @@ -1532,7 +1545,7 @@ impl Render for PromptEditor { ), ) .map(|el| { - let CodegenStatus::Error(error) = &self.codegen.read(cx).status else { + let CodegenStatus::Error(error) = self.codegen.read(cx).status(cx) else { return el; }; @@ -1776,7 +1789,7 @@ impl PromptEditor { } fn handle_codegen_changed(&mut self, _: Model, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { self.editor .update(cx, |editor, _| editor.set_read_only(false)); @@ -1807,7 +1820,7 @@ impl PromptEditor { } fn cancel(&mut self, _: &editor::actions::Cancel, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle | CodegenStatus::Done | CodegenStatus::Error(_) => { cx.emit(PromptEditorEvent::CancelRequested); } @@ -1818,7 +1831,7 @@ impl PromptEditor { } fn confirm(&mut self, _: &menu::Confirm, cx: &mut ViewContext) { - match &self.codegen.read(cx).status { + match self.codegen.read(cx).status(cx) { CodegenStatus::Idle => { cx.emit(PromptEditorEvent::StartRequested); } @@ -1878,6 +1891,79 @@ impl PromptEditor { } } + fn cycle_prev(&mut self, _: &CyclePreviousInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)); + } + + fn cycle_next(&mut self, _: &CycleNextInlineAssist, cx: &mut ViewContext) { + self.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)); + } + + fn render_cycle_controls(&self, cx: &ViewContext) -> AnyElement { + let codegen = self.codegen.read(cx); + let disabled = matches!(codegen.status(cx), CodegenStatus::Idle); + + h_flex() + .child( + IconButton::new("previous", IconName::ChevronLeft) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Previous Alternative", + &CyclePreviousInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_prev(cx)) + })), + ) + .child( + Label::new(format!( + "{}/{}", + codegen.active_alternative + 1, + codegen.alternative_count(cx) + )) + .size(LabelSize::Small) + .color(if disabled { + Color::Disabled + } else { + Color::Muted + }), + ) + .child( + IconButton::new("next", IconName::ChevronRight) + .icon_color(Color::Muted) + .disabled(disabled) + .shape(IconButtonShape::Square) + .tooltip({ + let focus_handle = self.editor.focus_handle(cx); + move |cx| { + Tooltip::for_action_in( + "Next Alternative", + &CycleNextInlineAssist, + &focus_handle, + cx, + ) + } + }) + .on_click(cx.listener(|this, _, cx| { + this.codegen + .update(cx, |codegen, cx| codegen.cycle_next(cx)) + })), + ) + .into_any_element() + } + fn render_token_count(&self, cx: &mut ViewContext) -> Option { let model = LanguageModelRegistry::read_global(cx).active_model()?; let token_counts = self.token_counts?; @@ -2124,7 +2210,7 @@ impl InlineAssist { return; }; - if let CodegenStatus::Error(error) = &codegen.read(cx).status { + if let CodegenStatus::Error(error) = codegen.read(cx).status(cx) { if assist.decorations.is_none() { if let Some(workspace) = assist .workspace @@ -2185,12 +2271,9 @@ impl InlineAssist { return future::ready(Err(anyhow!("no user prompt"))).boxed(); }; let assistant_panel_context = self.assistant_panel_context(cx); - self.codegen.read(cx).count_tokens( - self.range.clone(), - user_prompt, - assistant_panel_context, - cx, - ) + self.codegen + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) } } @@ -2201,19 +2284,216 @@ struct InlineAssistDecorations { end_block_id: CustomBlockId, } -#[derive(Debug)] +#[derive(Copy, Clone, Debug)] pub enum CodegenEvent { Finished, Undone, } pub struct Codegen { + alternatives: Vec>, + active_alternative: usize, + subscriptions: Vec, + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, +} + +impl Codegen { + pub fn new( + buffer: Model, + range: Range, + initial_transaction_id: Option, + telemetry: Option>, + builder: Arc, + cx: &mut ModelContext, + ) -> Self { + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + telemetry.clone(), + builder.clone(), + cx, + ) + }); + let mut this = Self { + alternatives: vec![codegen], + active_alternative: 0, + subscriptions: Vec::new(), + buffer, + range, + initial_transaction_id, + telemetry, + builder, + }; + this.activate(0, cx); + this + } + + fn subscribe_to_alternative(&mut self, cx: &mut ModelContext) { + let codegen = self.active_alternative().clone(); + self.subscriptions.clear(); + self.subscriptions + .push(cx.observe(&codegen, |_, _, cx| cx.notify())); + self.subscriptions + .push(cx.subscribe(&codegen, |_, _, event, cx| cx.emit(*event))); + } + + fn active_alternative(&self) -> &Model { + &self.alternatives[self.active_alternative] + } + + fn status<'a>(&self, cx: &'a AppContext) -> &'a CodegenStatus { + &self.active_alternative().read(cx).status + } + + fn alternative_count(&self, cx: &AppContext) -> usize { + LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .len() + + 1 + } + + pub fn cycle_prev(&mut self, cx: &mut ModelContext) { + let next_active_ix = if self.active_alternative == 0 { + self.alternatives.len() - 1 + } else { + self.active_alternative - 1 + }; + self.activate(next_active_ix, cx); + } + + pub fn cycle_next(&mut self, cx: &mut ModelContext) { + let next_active_ix = (self.active_alternative + 1) % self.alternatives.len(); + self.activate(next_active_ix, cx); + } + + fn activate(&mut self, index: usize, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(false, cx)); + self.active_alternative = index; + self.active_alternative() + .update(cx, |codegen, cx| codegen.set_active(true, cx)); + self.subscribe_to_alternative(cx); + cx.notify(); + } + + pub fn start( + &mut self, + user_prompt: String, + assistant_panel_context: Option, + cx: &mut ModelContext, + ) -> Result<()> { + let alternative_models = LanguageModelRegistry::read_global(cx) + .inline_alternative_models() + .to_vec(); + + self.active_alternative() + .update(cx, |alternative, cx| alternative.undo(cx)); + self.activate(0, cx); + self.alternatives.truncate(1); + + for _ in 0..alternative_models.len() { + self.alternatives.push(cx.new_model(|cx| { + CodegenAlternative::new( + self.buffer.clone(), + self.range.clone(), + false, + self.telemetry.clone(), + self.builder.clone(), + cx, + ) + })); + } + + let primary_model = LanguageModelRegistry::read_global(cx) + .active_model() + .context("no active model")?; + + for (model, alternative) in iter::once(primary_model) + .chain(alternative_models) + .zip(&self.alternatives) + { + alternative.update(cx, |alternative, cx| { + alternative.start( + user_prompt.clone(), + assistant_panel_context.clone(), + model.clone(), + cx, + ) + })?; + } + + Ok(()) + } + + pub fn stop(&mut self, cx: &mut ModelContext) { + for codegen in &self.alternatives { + codegen.update(cx, |codegen, cx| codegen.stop(cx)); + } + } + + pub fn undo(&mut self, cx: &mut ModelContext) { + self.active_alternative() + .update(cx, |codegen, cx| codegen.undo(cx)); + + self.buffer.update(cx, |buffer, cx| { + if let Some(transaction_id) = self.initial_transaction_id.take() { + buffer.undo_transaction(transaction_id, cx); + buffer.refresh_preview(cx); + } + }); + } + + pub fn count_tokens( + &self, + user_prompt: String, + assistant_panel_context: Option, + cx: &AppContext, + ) -> BoxFuture<'static, Result> { + self.active_alternative() + .read(cx) + .count_tokens(user_prompt, assistant_panel_context, cx) + } + + pub fn buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).buffer.clone() + } + + pub fn old_buffer(&self, cx: &AppContext) -> Model { + self.active_alternative().read(cx).old_buffer.clone() + } + + pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { + self.active_alternative().read(cx).snapshot.clone() + } + + pub fn edit_position(&self, cx: &AppContext) -> Option { + self.active_alternative().read(cx).edit_position + } + + fn diff<'a>(&self, cx: &'a AppContext) -> &'a Diff { + &self.active_alternative().read(cx).diff + } + + pub fn last_equal_ranges<'a>(&self, cx: &'a AppContext) -> &'a [Range] { + self.active_alternative().read(cx).last_equal_ranges() + } +} + +impl EventEmitter for Codegen {} + +pub struct CodegenAlternative { buffer: Model, old_buffer: Model, snapshot: MultiBufferSnapshot, edit_position: Option, + range: Range, last_equal_ranges: Vec>, - initial_transaction_id: Option, transformation_transaction_id: Option, status: CodegenStatus, generation: Task<()>, @@ -2221,6 +2501,9 @@ pub struct Codegen { telemetry: Option>, _subscription: gpui::Subscription, builder: Arc, + active: bool, + edits: Vec<(Range, String)>, + line_operations: Vec, } enum CodegenStatus { @@ -2242,13 +2525,13 @@ impl Diff { } } -impl EventEmitter for Codegen {} +impl EventEmitter for CodegenAlternative {} -impl Codegen { +impl CodegenAlternative { pub fn new( buffer: Model, range: Range, - initial_transaction_id: Option, + active: bool, telemetry: Option>, builder: Arc, cx: &mut ModelContext, @@ -2287,8 +2570,33 @@ impl Codegen { diff: Diff::default(), telemetry, _subscription: cx.subscribe(&buffer, Self::handle_buffer_event), - initial_transaction_id, builder, + active, + edits: Vec::new(), + line_operations: Vec::new(), + range, + } + } + + fn set_active(&mut self, active: bool, cx: &mut ModelContext) { + if active != self.active { + self.active = active; + + if self.active { + let edits = self.edits.clone(); + self.apply_edits(edits, cx); + if matches!(self.status, CodegenStatus::Pending) { + let line_operations = self.line_operations.clone(); + self.reapply_line_based_diff(line_operations, cx); + } else { + self.reapply_batch_diff(cx).detach(); + } + } else if let Some(transaction_id) = self.transformation_transaction_id.take() { + self.buffer.update(cx, |buffer, cx| { + buffer.undo_transaction(transaction_id, cx); + buffer.forget_transaction(transaction_id, cx); + }); + } } } @@ -2313,14 +2621,12 @@ impl Codegen { pub fn count_tokens( &self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, cx: &AppContext, ) -> BoxFuture<'static, Result> { if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() { - let request = - self.build_request(user_prompt, assistant_panel_context.clone(), edit_range, cx); + let request = self.build_request(user_prompt, assistant_panel_context.clone(), cx); match request { Ok(request) => { let total_count = model.count_tokens(request.clone(), cx); @@ -2345,39 +2651,31 @@ impl Codegen { pub fn start( &mut self, - edit_range: Range, user_prompt: String, assistant_panel_context: Option, + model: Arc, cx: &mut ModelContext, ) -> Result<()> { - let model = LanguageModelRegistry::read_global(cx) - .active_model() - .context("no active model")?; - if let Some(transformation_transaction_id) = self.transformation_transaction_id.take() { self.buffer.update(cx, |buffer, cx| { buffer.undo_transaction(transformation_transaction_id, cx); }); } - self.edit_position = Some(edit_range.start.bias_right(&self.snapshot)); + self.edit_position = Some(self.range.start.bias_right(&self.snapshot)); let telemetry_id = model.telemetry_id(); - let chunks: LocalBoxFuture>>> = if user_prompt - .trim() - .to_lowercase() - == "delete" - { - async { Ok(stream::empty().boxed()) }.boxed_local() - } else { - let request = - self.build_request(user_prompt, assistant_panel_context, edit_range.clone(), cx)?; + let chunks: LocalBoxFuture>>> = + if user_prompt.trim().to_lowercase() == "delete" { + async { Ok(stream::empty().boxed()) }.boxed_local() + } else { + let request = self.build_request(user_prompt, assistant_panel_context, cx)?; - let chunks = - cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); - async move { Ok(chunks.await?.boxed()) }.boxed_local() - }; - self.handle_stream(telemetry_id, edit_range, chunks, cx); + let chunks = cx + .spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await }); + async move { Ok(chunks.await?.boxed()) }.boxed_local() + }; + self.handle_stream(telemetry_id, chunks, cx); Ok(()) } @@ -2385,11 +2683,10 @@ impl Codegen { &self, user_prompt: String, assistant_panel_context: Option, - edit_range: Range, cx: &AppContext, ) -> Result { let buffer = self.buffer.read(cx).snapshot(cx); - let language = buffer.language_at(edit_range.start); + let language = buffer.language_at(self.range.start); let language_name = if let Some(language) = language.as_ref() { if Arc::ptr_eq(language, &language::PLAIN_TEXT) { None @@ -2401,8 +2698,8 @@ impl Codegen { }; let language_name = language_name.as_ref(); - let start = buffer.point_to_buffer_offset(edit_range.start); - let end = buffer.point_to_buffer_offset(edit_range.end); + let start = buffer.point_to_buffer_offset(self.range.start); + let end = buffer.point_to_buffer_offset(self.range.end); let (buffer, range) = if let Some((start, end)) = start.zip(end) { let (start_buffer, start_buffer_offset) = start; let (end_buffer, end_buffer_offset) = end; @@ -2442,16 +2739,15 @@ impl Codegen { pub fn handle_stream( &mut self, model_telemetry_id: String, - edit_range: Range, stream: impl 'static + Future>>>, cx: &mut ModelContext, ) { let snapshot = self.snapshot.clone(); let selected_text = snapshot - .text_for_range(edit_range.start..edit_range.end) + .text_for_range(self.range.start..self.range.end) .collect::(); - let selection_start = edit_range.start.to_point(&snapshot); + let selection_start = self.range.start.to_point(&snapshot); // Start with the indentation of the first line in the selection let mut suggested_line_indent = snapshot @@ -2462,7 +2758,7 @@ impl Codegen { // If the first line in the selection does not have indentation, check the following lines if suggested_line_indent.len == 0 && suggested_line_indent.kind == IndentKind::Space { - for row in selection_start.row..=edit_range.end.to_point(&snapshot).row { + for row in selection_start.row..=self.range.end.to_point(&snapshot).row { let line_indent = snapshot.indent_size_for_line(MultiBufferRow(row)); // Prefer tabs if a line in the selection uses tabs as indentation if line_indent.kind == IndentKind::Tab { @@ -2475,7 +2771,7 @@ impl Codegen { let telemetry = self.telemetry.clone(); self.diff = Diff::default(); self.status = CodegenStatus::Pending; - let mut edit_start = edit_range.start.to_offset(&snapshot); + let mut edit_start = self.range.start.to_offset(&snapshot); self.generation = cx.spawn(|codegen, mut cx| { async move { let chunks = stream.await; @@ -2597,68 +2893,42 @@ impl Codegen { Ok(()) }); - while let Some((char_ops, line_diff)) = diff_rx.next().await { + while let Some((char_ops, line_ops)) = diff_rx.next().await { codegen.update(&mut cx, |codegen, cx| { codegen.last_equal_ranges.clear(); - let transaction = codegen.buffer.update(cx, |buffer, cx| { - // Avoid grouping assistant edits with user edits. - buffer.finalize_last_transaction(cx); + let edits = char_ops + .into_iter() + .filter_map(|operation| match operation { + CharOperation::Insert { text } => { + let edit_start = snapshot.anchor_after(edit_start); + Some((edit_start..edit_start, text)) + } + CharOperation::Delete { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + Some((edit_range, String::new())) + } + CharOperation::Keep { bytes } => { + let edit_end = edit_start + bytes; + let edit_range = snapshot.anchor_after(edit_start) + ..snapshot.anchor_before(edit_end); + edit_start = edit_end; + codegen.last_equal_ranges.push(edit_range); + None + } + }) + .collect::>(); - buffer.start_transaction(cx); - buffer.edit( - char_ops - .into_iter() - .filter_map(|operation| match operation { - CharOperation::Insert { text } => { - let edit_start = snapshot.anchor_after(edit_start); - Some((edit_start..edit_start, text)) - } - CharOperation::Delete { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - Some((edit_range, String::new())) - } - CharOperation::Keep { bytes } => { - let edit_end = edit_start + bytes; - let edit_range = snapshot.anchor_after(edit_start) - ..snapshot.anchor_before(edit_end); - edit_start = edit_end; - codegen.last_equal_ranges.push(edit_range); - None - } - }), - None, - cx, - ); - codegen.edit_position = Some(snapshot.anchor_after(edit_start)); - - buffer.end_transaction(cx) - }); - - if let Some(transaction) = transaction { - if let Some(first_transaction) = - codegen.transformation_transaction_id - { - // Group all assistant edits into the first transaction. - codegen.buffer.update(cx, |buffer, cx| { - buffer.merge_transactions( - transaction, - first_transaction, - cx, - ) - }); - } else { - codegen.transformation_transaction_id = Some(transaction); - codegen.buffer.update(cx, |buffer, cx| { - buffer.finalize_last_transaction(cx) - }); - } + if codegen.active { + codegen.apply_edits(edits.iter().cloned(), cx); + codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx); } - - codegen.reapply_line_based_diff(edit_range.clone(), line_diff, cx); + codegen.edits.extend(edits); + codegen.line_operations = line_ops; + codegen.edit_position = Some(snapshot.anchor_after(edit_start)); cx.notify(); })?; @@ -2667,9 +2937,8 @@ impl Codegen { // Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer. // That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff. // It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`. - let batch_diff_task = codegen.update(&mut cx, |codegen, cx| { - codegen.reapply_batch_diff(edit_range.clone(), cx) - })?; + let batch_diff_task = + codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?; let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task); line_based_stream_diff?; @@ -2713,24 +2982,45 @@ impl Codegen { buffer.undo_transaction(transaction_id, cx); buffer.refresh_preview(cx); } - - if let Some(transaction_id) = self.initial_transaction_id.take() { - buffer.undo_transaction(transaction_id, cx); - buffer.refresh_preview(cx); - } }); } + fn apply_edits( + &mut self, + edits: impl IntoIterator, String)>, + cx: &mut ModelContext, + ) { + let transaction = self.buffer.update(cx, |buffer, cx| { + // Avoid grouping assistant edits with user edits. + buffer.finalize_last_transaction(cx); + buffer.start_transaction(cx); + buffer.edit(edits, None, cx); + buffer.end_transaction(cx) + }); + + if let Some(transaction) = transaction { + if let Some(first_transaction) = self.transformation_transaction_id { + // Group all assistant edits into the first transaction. + self.buffer.update(cx, |buffer, cx| { + buffer.merge_transactions(transaction, first_transaction, cx) + }); + } else { + self.transformation_transaction_id = Some(transaction); + self.buffer + .update(cx, |buffer, cx| buffer.finalize_last_transaction(cx)); + } + } + } + fn reapply_line_based_diff( &mut self, - edit_range: Range, - line_operations: Vec, + line_operations: impl IntoIterator, cx: &mut ModelContext, ) { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); let mut old_row = old_range.start.row; let mut new_row = new_range.start.row; @@ -2781,15 +3071,11 @@ impl Codegen { } } - fn reapply_batch_diff( - &mut self, - edit_range: Range, - cx: &mut ModelContext, - ) -> Task<()> { + fn reapply_batch_diff(&mut self, cx: &mut ModelContext) -> Task<()> { let old_snapshot = self.snapshot.clone(); - let old_range = edit_range.to_point(&old_snapshot); + let old_range = self.range.to_point(&old_snapshot); let new_snapshot = self.buffer.read(cx).snapshot(cx); - let new_range = edit_range.to_point(&new_snapshot); + let new_range = self.range.to_point(&new_snapshot); cx.spawn(|codegen, mut cx| async move { let (deleted_row_ranges, inserted_row_ranges) = cx @@ -3073,10 +3359,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3087,7 +3373,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range, future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3145,10 +3430,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3159,7 +3444,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3220,10 +3504,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3234,7 +3518,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3294,10 +3577,10 @@ mod tests { }); let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); let codegen = cx.new_model(|cx| { - Codegen::new( + CodegenAlternative::new( buffer.clone(), range.clone(), - None, + true, None, prompt_builder, cx, @@ -3308,7 +3591,6 @@ mod tests { codegen.update(cx, |codegen, cx| { codegen.handle_stream( String::new(), - range.clone(), future::ready(Ok(chunks_rx.map(Ok).boxed())), cx, ) @@ -3338,6 +3620,78 @@ mod tests { ); } + #[gpui::test] + async fn test_inactive_codegen_alternative(cx: &mut TestAppContext) { + cx.update(LanguageModelRegistry::test); + cx.set_global(cx.update(SettingsStore::test)); + cx.update(language_settings::init); + + let text = indoc! {" + fn main() { + let x = 0; + } + "}; + let buffer = + cx.new_model(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx)); + let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx)); + let range = buffer.read_with(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 14)) + }); + let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap()); + let codegen = cx.new_model(|cx| { + CodegenAlternative::new( + buffer.clone(), + range.clone(), + false, + None, + prompt_builder, + cx, + ) + }); + + let (chunks_tx, chunks_rx) = mpsc::unbounded(); + codegen.update(cx, |codegen, cx| { + codegen.handle_stream( + String::new(), + future::ready(Ok(chunks_rx.map(Ok).boxed())), + cx, + ) + }); + + chunks_tx + .unbounded_send("let mut x = 0;\nx += 1;".to_string()) + .unwrap(); + drop(chunks_tx); + cx.run_until_parked(); + + // The codegen is inactive, so the buffer doesn't get modified. + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + + // Activating the codegen applies the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(true, cx)); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + indoc! {" + fn main() { + let mut x = 0; + x += 1; + } + "} + ); + + // Deactivating the codegen undoes the changes. + codegen.update(cx, |codegen, cx| codegen.set_active(false, cx)); + cx.run_until_parked(); + assert_eq!( + buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()), + text + ); + } + #[gpui::test] async fn test_strip_invalid_spans_from_codeblock() { assert_chunks("Lorem ipsum dolor", "Lorem ipsum dolor").await; diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index b3c8ef5f57..e1ba1c5886 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -76,6 +76,7 @@ impl Global for GlobalLanguageModelRegistry {} pub struct LanguageModelRegistry { active_model: Option, providers: BTreeMap>, + inline_alternatives: Vec>, } pub struct ActiveModel { @@ -229,6 +230,37 @@ impl LanguageModelRegistry { pub fn active_model(&self) -> Option> { self.active_model.as_ref()?.model.clone() } + + /// Selects and sets the inline alternatives for language models based on + /// provider name and id. + pub fn select_inline_alternative_models( + &mut self, + alternatives: impl IntoIterator, + cx: &mut ModelContext, + ) { + let mut selected_alternatives = Vec::new(); + + for (provider_id, model_id) in alternatives { + if let Some(provider) = self.providers.get(&provider_id) { + if let Some(model) = provider + .provided_models(cx) + .iter() + .find(|m| m.id() == model_id) + { + selected_alternatives.push(model.clone()); + } + } + } + + self.inline_alternatives = selected_alternatives; + } + + /// The models to use for inline assists. Returns the union of the active + /// model and all inline alternatives. When there are multiple models, the + /// user will be able to cycle through results. + pub fn inline_alternative_models(&self) -> &[Arc] { + &self.inline_alternatives + } } #[cfg(test)] diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 29bd9a8068..c163dbc07a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1106,6 +1106,26 @@ impl MultiBuffer { } } + pub fn forget_transaction( + &mut self, + transaction_id: TransactionId, + cx: &mut ModelContext, + ) { + if let Some(buffer) = self.as_singleton() { + buffer.update(cx, |buffer, _| { + buffer.forget_transaction(transaction_id); + }); + } else if let Some(transaction) = self.history.forget(transaction_id) { + for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions { + if let Some(state) = self.buffers.borrow_mut().get_mut(&buffer_id) { + state.buffer.update(cx, |buffer, _| { + buffer.forget_transaction(buffer_transaction_id); + }); + } + } + } + } + pub fn stream_excerpts_with_context_lines( &mut self, buffer: Model, diff --git a/docs/src/assistant/configuration.md b/docs/src/assistant/configuration.md index bcdf461e2c..17b52a27d8 100644 --- a/docs/src/assistant/configuration.md +++ b/docs/src/assistant/configuration.md @@ -20,6 +20,7 @@ To further customize providers, you can use `settings.json` to do that as follow - [Configuring endpoints](#custom-endpoint) - [Configuring timeouts](#provider-timeout) - [Configuring default model](#default-model) +- [Configuring alternative models for inline assists](#alternative-assists) ### Zed AI {#zed-ai} @@ -264,6 +265,31 @@ You can also manually edit the `default_model` object in your settings: } ``` +#### Configuring alternative models for inline assists {#alternative-assists} + +You can configure additional models that will be used to perform inline assists in parallel. When you do this, +the inline assist UI will surface controls to cycle between the alternatives generated by each model. The models +you specify here are always used in _addition_ to your default model. For example, the following configuration +will generate two outputs for every assist. One with Claude 3.5 Sonnet, and one with GPT-4o. + +```json +{ + "assistant": { + "default_model": { + "provider": "zed.dev", + "model": "claude-3-5-sonnet" + }, + "inline_alternatives": [ + { + "provider": "zed.dev", + "model": "gpt-4o" + } + ], + "version": "2" + } +} +``` + #### Common Panel Settings | key | type | default | description | From 8103ac12bfc596f0f32f041239e0e26f9c2ee4cc Mon Sep 17 00:00:00 2001 From: Stanislav Alekseev <43210583+WeetHet@users.noreply.github.com> Date: Fri, 20 Sep 2024 06:36:50 +0300 Subject: [PATCH 51/96] ssh-remoting: Tidy up the code a bit after #18094 (#18102) Release Notes: - N/A --- crates/client/src/client.rs | 2 +- crates/project/src/worktree_store.rs | 7 ++++--- crates/remote/src/ssh_session.rs | 2 +- crates/rpc/src/proto_client.rs | 6 +++--- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index a8387f7c5a..48bd646d8a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1622,7 +1622,7 @@ impl ProtoClient for Client { &self.handler_set } - fn goes_via_collab(&self) -> bool { + fn is_via_collab(&self) -> bool { true } } diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 7fae8b9e1d..5c3b2a00a9 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -313,9 +313,10 @@ impl WorktreeStore { }) } + #[track_caller] pub fn add(&mut self, worktree: &Model, cx: &mut ModelContext) { let worktree_id = worktree.read(cx).id(); - debug_assert!(!self.worktrees().any(|w| w.read(cx).id() == worktree_id)); + debug_assert!(self.worktrees().all(|w| w.read(cx).id() != worktree_id)); let push_strong_handle = self.retain_worktrees || worktree.read(cx).is_visible(); let handle = if push_strong_handle { @@ -487,7 +488,7 @@ impl WorktreeStore { }; // collab has bad concurrency guarantees, so we send requests in serial. - let update_project = if downstream_client.goes_via_collab() { + let update_project = if downstream_client.is_via_collab() { Some(downstream_client.request(update)) } else { downstream_client.send(update).log_err(); @@ -508,7 +509,7 @@ impl WorktreeStore { move |update| { let client = client.clone(); async move { - if client.goes_via_collab() { + if client.is_via_collab() { client.request(update).map(|result| result.is_ok()).await } else { client.send(update).is_ok() diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 10608b74f3..2bd18aa37e 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -471,7 +471,7 @@ impl ProtoClient for SshSession { &self.state } - fn goes_via_collab(&self) -> bool { + fn is_via_collab(&self) -> bool { false } } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 89ef580cdf..8809910276 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -28,7 +28,7 @@ pub trait ProtoClient: Send + Sync { fn message_handler_set(&self) -> &parking_lot::Mutex; - fn goes_via_collab(&self) -> bool; + fn is_via_collab(&self) -> bool; } #[derive(Default)] @@ -141,8 +141,8 @@ impl AnyProtoClient { Self(client) } - pub fn goes_via_collab(&self) -> bool { - self.0.goes_via_collab() + pub fn is_via_collab(&self) -> bool { + self.0.is_via_collab() } pub fn request( From 579267f399816ae9e54b79c92949384a0ac8455a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 08:04:26 +0200 Subject: [PATCH 52/96] docs: Update JavaScript docs and remove TBDs (#17989) Release Notes: - N/A --- docs/src/languages/javascript.md | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/docs/src/languages/javascript.md b/docs/src/languages/javascript.md index 8fb84881ad..7e74cbbfae 100644 --- a/docs/src/languages/javascript.md +++ b/docs/src/languages/javascript.md @@ -26,17 +26,15 @@ For example, if you have Prettier installed and on your `PATH`, you can use it t } ``` - +Zed supports JSDoc syntax in JavaScript and TypeScript comments that match the JSDoc syntax. Zed uses [tree-sitter/tree-sitter-jsdoc](https://github.com/tree-sitter/tree-sitter-jsdoc) for parsing and highlighting JSDoc. ## ESLint From 93730983dd31bad1855edd3d5943a617f83f2b40 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 08:04:49 +0200 Subject: [PATCH 53/96] ssh remoting: Restore items/buffers when opening SSH project (#18083) Demo: https://github.com/user-attachments/assets/ab79ed0d-13a6-4ae7-8e76-6365fc322ec4 Release Notes: - N/A Co-authored-by: Bennet --- crates/editor/src/items.rs | 8 ++++++-- crates/workspace/src/workspace.rs | 27 +++++++++++++++++---------- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index e3e8ca604b..3d04eb82d3 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1087,10 +1087,14 @@ impl SerializableItem for Editor { let workspace_id = workspace.database_id()?; let buffer = self.buffer().read(cx).as_singleton()?; + let path = buffer + .read(cx) + .file() + .map(|file| file.full_path(cx)) + .and_then(|full_path| project.read(cx).find_project_path(&full_path, cx)) + .and_then(|project_path| project.read(cx).absolute_path(&project_path, cx)); let is_dirty = buffer.read(cx).is_dirty(); - let local_file = buffer.read(cx).file().and_then(|file| file.as_local()); - let path = local_file.map(|file| file.abs_path(cx)); let mtime = buffer.read(cx).saved_mtime(); let snapshot = buffer.read(cx).snapshot(); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 5855dcce1e..92a85299f4 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1114,18 +1114,16 @@ impl Workspace { } // Get project paths for all of the abs_paths - let mut worktree_roots: HashSet> = Default::default(); let mut project_paths: Vec<(PathBuf, Option)> = Vec::with_capacity(paths_to_open.len()); for path in paths_to_open.into_iter() { - if let Some((worktree, project_entry)) = cx + if let Some((_, project_entry)) = cx .update(|cx| { Workspace::project_path_for_path(project_handle.clone(), &path, true, cx) })? .await .log_err() { - worktree_roots.extend(worktree.update(&mut cx, |tree, _| tree.abs_path()).ok()); project_paths.push((path, Some(project_entry))); } else { project_paths.push((path, None)); @@ -5532,12 +5530,13 @@ pub fn open_ssh_project( let serialized_workspace = persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); - let workspace_id = - if let Some(workspace_id) = serialized_workspace.map(|workspace| workspace.id) { - workspace_id - } else { - persistence::DB.next_id().await? - }; + let workspace_id = if let Some(workspace_id) = + serialized_workspace.as_ref().map(|workspace| workspace.id) + { + workspace_id + } else { + persistence::DB.next_id().await? + }; cx.update_window(window.into(), |_, cx| { cx.replace_root_view(|cx| { @@ -5548,7 +5547,15 @@ pub fn open_ssh_project( }); })?; - window.update(&mut cx, |_, cx| cx.activate_window()) + window + .update(&mut cx, |_, cx| { + cx.activate_window(); + + open_items(serialized_workspace, vec![], app_state, cx) + })? + .await?; + + Ok(()) }) } From ace4d5185dbd53023f8b583df781bc96f891b80a Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 10:53:06 +0200 Subject: [PATCH 54/96] settings: Show notification when user/project settings fail to parse (#18122) Closes #16876 We only ever showed parsing errors, but not if something failed to deserialize. Basically, if you had a stray `,` somewhere, we'd show a notification for user errors, but only squiggly lines if you had a `[]` instead of a `{}`. The squiggly lines would only show up when there were schema errors. In the case of `formatter` settings, for example, if someone put in a `{}` instead of `[]`, we'd never show anything. With this change we always show a notification if parsing user or project settings fails. (Right now, the error message might still be bad, but that's a separate change) Release Notes: - Added a notification to warn users if their user settings or project-local settings failed to deserialize. Demo: https://github.com/user-attachments/assets/e5c48165-f2f7-4b5c-9c6d-6ea74f678683 --- crates/language/src/language_settings.rs | 7 ++ crates/project/src/project.rs | 26 ++++++- crates/project/src/project_settings.rs | 45 ++++++++++-- crates/settings/src/settings.rs | 4 +- crates/settings/src/settings_store.rs | 93 ++++++++++++++++-------- crates/workspace/src/workspace.rs | 19 ++++- crates/zed/src/main.rs | 28 ++++--- 7 files changed, 172 insertions(+), 50 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 77c9a1d18c..6121cb6a39 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -1152,6 +1152,13 @@ mod tests { ); } + #[test] + fn test_formatter_deserialization_invalid() { + let raw_auto = "{\"formatter\": {}}"; + let result: Result = serde_json::from_str(raw_auto); + assert!(result.is_err()); + } + #[test] pub fn test_resolve_language_servers() { fn language_server_names(names: &[&str]) -> Vec { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index fcf10d11c2..435c143024 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -59,12 +59,14 @@ use node_runtime::NodeRuntime; use parking_lot::{Mutex, RwLock}; use paths::{local_tasks_file_relative_path, local_vscode_tasks_file_relative_path}; pub use prettier_store::PrettierStore; -use project_settings::{ProjectSettings, SettingsObserver}; +use project_settings::{ProjectSettings, SettingsObserver, SettingsObserverEvent}; use remote::SshSession; use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient, ErrorCode}; use search::{SearchInputKind, SearchQuery, SearchResult}; use search_history::SearchHistory; -use settings::{watch_config_file, Settings, SettingsLocation, SettingsStore}; +use settings::{ + watch_config_file, InvalidSettingsError, Settings, SettingsLocation, SettingsStore, +}; use smol::channel::Receiver; use snippet::Snippet; use snippet_provider::SnippetProvider; @@ -230,6 +232,7 @@ pub enum Event { LanguageServerRemoved(LanguageServerId), LanguageServerLog(LanguageServerId, LanguageServerLogType, String), Notification(String), + LocalSettingsUpdated(Result<(), InvalidSettingsError>), LanguageServerPrompt(LanguageServerPromptRequest), LanguageNotFound(Model), ActiveEntryChanged(Option), @@ -644,6 +647,8 @@ impl Project { let settings_observer = cx.new_model(|cx| { SettingsObserver::new_local(fs.clone(), worktree_store.clone(), cx) }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let environment = ProjectEnvironment::new(&worktree_store, env, cx); let lsp_store = cx.new_model(|cx| { @@ -729,6 +734,8 @@ impl Project { let settings_observer = cx.new_model(|cx| { SettingsObserver::new_ssh(ssh.clone().into(), worktree_store.clone(), cx) }); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let environment = ProjectEnvironment::new(&worktree_store, None, cx); let lsp_store = cx.new_model(|cx| { @@ -913,6 +920,8 @@ impl Project { cx.subscribe(&buffer_store, Self::on_buffer_store_event) .detach(); cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach(); + cx.subscribe(&settings_observer, Self::on_settings_observer_event) + .detach(); let mut this = Self { buffer_ordered_messages_tx: tx, @@ -2058,6 +2067,19 @@ impl Project { } } + fn on_settings_observer_event( + &mut self, + _: Model, + event: &SettingsObserverEvent, + cx: &mut ModelContext, + ) { + match event { + SettingsObserverEvent::LocalSettingsUpdated(error) => { + cx.emit(Event::LocalSettingsUpdated(error.clone())) + } + } + } + fn on_worktree_store_event( &mut self, _: Model, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 2eeb840896..9a7c80703c 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,11 +1,11 @@ use collections::HashMap; use fs::Fs; -use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Model, ModelContext}; +use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, SettingsStore}; +use settings::{InvalidSettingsError, Settings, SettingsSources, SettingsStore}; use std::{ path::{Path, PathBuf}, sync::Arc, @@ -176,6 +176,13 @@ pub enum SettingsObserverMode { Remote, } +#[derive(Clone, Debug, PartialEq)] +pub enum SettingsObserverEvent { + LocalSettingsUpdated(Result<(), InvalidSettingsError>), +} + +impl EventEmitter for SettingsObserver {} + pub struct SettingsObserver { mode: SettingsObserverMode, downstream_client: Option, @@ -415,11 +422,16 @@ impl SettingsObserver { ) { let worktree_id = worktree.read(cx).id(); let remote_worktree_id = worktree.read(cx).id(); - cx.update_global::(|store, cx| { + + let result = cx.update_global::>(|store, cx| { for (directory, file_content) in settings_contents { - store - .set_local_settings(worktree_id, directory.clone(), file_content.as_deref(), cx) - .log_err(); + store.set_local_settings( + worktree_id, + directory.clone(), + file_content.as_deref(), + cx, + )?; + if let Some(downstream_client) = &self.downstream_client { downstream_client .send(proto::UpdateWorktreeSettings { @@ -431,6 +443,25 @@ impl SettingsObserver { .log_err(); } } - }) + anyhow::Ok(()) + }); + + match result { + Err(error) => { + if let Ok(error) = error.downcast::() { + if let InvalidSettingsError::LocalSettings { + ref path, + ref message, + } = error + { + log::error!("Failed to set local settings in {:?}: {:?}", path, message); + cx.emit(SettingsObserverEvent::LocalSettingsUpdated(Err(error))); + } + } + } + Ok(()) => { + cx.emit(SettingsObserverEvent::LocalSettingsUpdated(Ok(()))); + } + } } } diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 5ece3f867e..f1f8591bba 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -13,7 +13,9 @@ pub use editable_setting_control::*; pub use json_schema::*; pub use keymap_file::KeymapFile; pub use settings_file::*; -pub use settings_store::{Settings, SettingsLocation, SettingsSources, SettingsStore}; +pub use settings_store::{ + InvalidSettingsError, Settings, SettingsLocation, SettingsSources, SettingsStore, +}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] pub struct WorktreeId(usize); diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 3ef8bffe2d..20bf52f2c5 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -3,6 +3,7 @@ use collections::{btree_map, hash_map, BTreeMap, HashMap}; use fs::Fs; use futures::{channel::mpsc, future::LocalBoxFuture, FutureExt, StreamExt}; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Global, Task, UpdateGlobal}; +use paths::local_settings_file_relative_path; use schemars::{gen::SchemaGenerator, schema::RootSchema, JsonSchema}; use serde::{de::DeserializeOwned, Deserialize as _, Serialize}; use smallvec::SmallVec; @@ -10,7 +11,7 @@ use std::{ any::{type_name, Any, TypeId}, fmt::Debug, ops::Range, - path::Path, + path::{Path, PathBuf}, str, sync::{Arc, LazyLock}, }; @@ -694,9 +695,14 @@ impl SettingsStore { .deserialize_setting(&self.raw_extension_settings) .log_err(); - let user_settings = setting_value - .deserialize_setting(&self.raw_user_settings) - .log_err(); + let user_settings = match setting_value.deserialize_setting(&self.raw_user_settings) { + Ok(settings) => Some(settings), + Err(error) => { + return Err(anyhow!(InvalidSettingsError::UserSettings { + message: error.to_string() + })); + } + }; let mut release_channel_settings = None; if let Some(release_settings) = &self @@ -746,34 +752,43 @@ impl SettingsStore { break; } - if let Some(local_settings) = - setting_value.deserialize_setting(local_settings).log_err() - { - paths_stack.push(Some((*root_id, path.as_ref()))); - project_settings_stack.push(local_settings); + match setting_value.deserialize_setting(local_settings) { + Ok(local_settings) => { + paths_stack.push(Some((*root_id, path.as_ref()))); + project_settings_stack.push(local_settings); - // If a local settings file changed, then avoid recomputing local - // settings for any path outside of that directory. - if changed_local_path.map_or(false, |(changed_root_id, changed_local_path)| { - *root_id != changed_root_id || !path.starts_with(changed_local_path) - }) { - continue; - } - - if let Some(value) = setting_value - .load_setting( - SettingsSources { - default: &default_settings, - extensions: extension_settings.as_ref(), - user: user_settings.as_ref(), - release_channel: release_channel_settings.as_ref(), - project: &project_settings_stack.iter().collect::>(), + // If a local settings file changed, then avoid recomputing local + // settings for any path outside of that directory. + if changed_local_path.map_or( + false, + |(changed_root_id, changed_local_path)| { + *root_id != changed_root_id || !path.starts_with(changed_local_path) }, - cx, - ) - .log_err() - { - setting_value.set_local_value(*root_id, path.clone(), value); + ) { + continue; + } + + if let Some(value) = setting_value + .load_setting( + SettingsSources { + default: &default_settings, + extensions: extension_settings.as_ref(), + user: user_settings.as_ref(), + release_channel: release_channel_settings.as_ref(), + project: &project_settings_stack.iter().collect::>(), + }, + cx, + ) + .log_err() + { + setting_value.set_local_value(*root_id, path.clone(), value); + } + } + Err(error) => { + return Err(anyhow!(InvalidSettingsError::LocalSettings { + path: path.join(local_settings_file_relative_path()), + message: error.to_string() + })); } } } @@ -782,6 +797,24 @@ impl SettingsStore { } } +#[derive(Debug, Clone, PartialEq)] +pub enum InvalidSettingsError { + LocalSettings { path: PathBuf, message: String }, + UserSettings { message: String }, +} + +impl std::fmt::Display for InvalidSettingsError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + InvalidSettingsError::LocalSettings { message, .. } + | InvalidSettingsError::UserSettings { message } => { + write!(f, "{}", message) + } + } + } +} +impl std::error::Error for InvalidSettingsError {} + impl Debug for SettingsStore { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("SettingsStore") diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 92a85299f4..1fbeab38a2 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -64,7 +64,7 @@ use project::{ use remote::{SshConnectionOptions, SshSession}; use serde::Deserialize; use session::AppSession; -use settings::Settings; +use settings::{InvalidSettingsError, Settings}; use shared_screen::SharedScreen; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -832,6 +832,23 @@ impl Workspace { } } + project::Event::LocalSettingsUpdated(result) => { + struct LocalSettingsUpdated; + let id = NotificationId::unique::(); + + match result { + Err(InvalidSettingsError::LocalSettings { message, path }) => { + let full_message = + format!("Failed to set local settings in {:?}:\n{}", path, message); + this.show_notification(id, cx, |cx| { + cx.new_view(|_| MessageNotification::new(full_message.clone())) + }) + } + Err(_) => {} + Ok(_) => this.dismiss_notification(&id, cx), + } + } + project::Event::Notification(message) => { struct ProjectNotification; diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 3104001f99..6ecdbb224f 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -34,7 +34,9 @@ use parking_lot::Mutex; use recent_projects::open_ssh_project; use release_channel::{AppCommitSha, AppVersion}; use session::{AppSession, Session}; -use settings::{handle_settings_file_changes, watch_config_file, Settings, SettingsStore}; +use settings::{ + handle_settings_file_changes, watch_config_file, InvalidSettingsError, Settings, SettingsStore, +}; use simplelog::ConfigBuilder; use smol::process::Command; use std::{ @@ -626,20 +628,28 @@ fn handle_settings_changed(error: Option, cx: &mut AppContext) { for workspace in workspace::local_workspace_windows(cx) { workspace - .update(cx, |workspace, cx| match &error { - Some(error) => { - workspace.show_notification(id.clone(), cx, |cx| { - cx.new_view(|_| { - MessageNotification::new(format!("Invalid settings file\n{error}")) + .update(cx, |workspace, cx| { + match error + .as_ref() + .and_then(|error| error.downcast_ref::()) + { + Some(InvalidSettingsError::UserSettings { message }) => { + workspace.show_notification(id.clone(), cx, |cx| { + cx.new_view(|_| { + MessageNotification::new(format!( + "Invalid user settings file\n{message}" + )) .with_click_message("Open settings file") .on_click(|cx| { cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); cx.emit(DismissEvent); }) - }) - }); + }) + }); + } + None => workspace.dismiss_notification(&id, cx), + _ => {} } - None => workspace.dismiss_notification(&id, cx), }) .log_err(); } From 97708fdf43bbd15e3b978412d6682502df2f0d70 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 11:10:19 +0200 Subject: [PATCH 55/96] settings: Follow-up fix to show more errors (#18123) The condition added in #18122 was too strict. Release Notes: - N/A --- crates/zed/src/main.rs | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 6ecdbb224f..d3eb97c9aa 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -629,26 +629,28 @@ fn handle_settings_changed(error: Option, cx: &mut AppContext) { for workspace in workspace::local_workspace_windows(cx) { workspace .update(cx, |workspace, cx| { - match error - .as_ref() - .and_then(|error| error.downcast_ref::()) - { - Some(InvalidSettingsError::UserSettings { message }) => { - workspace.show_notification(id.clone(), cx, |cx| { - cx.new_view(|_| { - MessageNotification::new(format!( - "Invalid user settings file\n{message}" - )) - .with_click_message("Open settings file") - .on_click(|cx| { - cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); - cx.emit(DismissEvent); + match error.as_ref() { + Some(error) => { + if let Some(InvalidSettingsError::LocalSettings { .. }) = + error.downcast_ref::() + { + // Local settings will be displayed by the projects + } else { + workspace.show_notification(id.clone(), cx, |cx| { + cx.new_view(|_| { + MessageNotification::new(format!( + "Invalid user settings file\n{error}" + )) + .with_click_message("Open settings file") + .on_click(|cx| { + cx.dispatch_action(zed_actions::OpenSettings.boxed_clone()); + cx.emit(DismissEvent); + }) }) - }) - }); + }); + } } None => workspace.dismiss_notification(&id, cx), - _ => {} } }) .log_err(); From ca033e647507ab8b31bc3a4a249f501ccecb0f9c Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 08:35:13 -0400 Subject: [PATCH 56/96] Revert "Update nightly tag every night (#17879)" (#18133) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR reverts #17879, as it wasn't working. When a GitHub Action pushes a tag, it does not trigger workflows for push events for that tag: > When you use the repository's `GITHUB_TOKEN` to perform tasks, events triggered by the `GITHUB_TOKEN`, with the exception of `workflow_dispatch` and `repository_dispatch`, will not create a new workflow run. This prevents you from accidentally creating recursive workflow runs. For example, if a workflow run pushes code using the repository's `GITHUB_TOKEN`, a new workflow will not run even when the repository contains a workflow configured to run when `push` events occur. > > — [source](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) This reverts commit 761129e3739efacb7b8763eaa0fa8a109e935447. Release Notes: - N/A --- .github/workflows/bump_nightly_tag.yml | 23 ----------------------- .github/workflows/release_nightly.yml | 3 +++ 2 files changed, 3 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/bump_nightly_tag.yml diff --git a/.github/workflows/bump_nightly_tag.yml b/.github/workflows/bump_nightly_tag.yml deleted file mode 100644 index 0959ae9677..0000000000 --- a/.github/workflows/bump_nightly_tag.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Update Nightly Tag - -on: - schedule: - # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) - - cron: "0 7 * * *" - -jobs: - update-nightly-tag: - if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest - steps: - - name: Checkout repository - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - with: - fetch-depth: 0 - - - name: Update nightly tag - run: | - git config user.name github-actions - git config user.email github-actions@github.com - git tag -f nightly - git push origin nightly --force diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 17db66a264..bcaa60b775 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -1,6 +1,9 @@ name: Release Nightly on: + schedule: + # Fire every day at 7:00am UTC (Roughly before EU workday and after US workday) + - cron: "0 7 * * *" push: tags: - "nightly" From 90a12f55642410e38df65d7f8381d6ecb3d0c1c2 Mon Sep 17 00:00:00 2001 From: Thorsten Ball Date: Fri, 20 Sep 2024 14:35:45 +0200 Subject: [PATCH 57/96] ssh remoting: Do not double-register LspAdapters (#18132) This fixes the bug with hover tooltips appearing multiple times. Turns out everytime we receive the `CreateLanguageServer` message we'd add a new adapter but only have a single server running for all of them. And we send a `CreateLanguageServer` message everytime you open a buffer. What this does is to only add a new adapter if it hasn't already been registered, which is also what we do locally. Release Notes: - N/A --- crates/language/src/language_registry.rs | 34 ++++++++++++++++-- crates/project/src/lsp_store.rs | 44 ++++++++++++------------ 2 files changed, 54 insertions(+), 24 deletions(-) diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 17ebef50e8..e264517d5b 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -326,13 +326,43 @@ impl LanguageRegistry { Some(load_lsp_adapter()) } - pub fn register_lsp_adapter(&self, language_name: LanguageName, adapter: Arc) { + pub fn register_lsp_adapter( + &self, + language_name: LanguageName, + adapter: Arc, + ) -> Arc { + let cached = CachedLspAdapter::new(adapter); self.state .write() .lsp_adapters .entry(language_name) .or_default() - .push(CachedLspAdapter::new(adapter)); + .push(cached.clone()); + cached + } + + pub fn get_or_register_lsp_adapter( + &self, + language_name: LanguageName, + server_name: LanguageServerName, + build_adapter: impl FnOnce() -> Arc + 'static, + ) -> Arc { + let registered = self + .state + .write() + .lsp_adapters + .entry(language_name.clone()) + .or_default() + .iter() + .find(|cached_adapter| cached_adapter.name == server_name) + .cloned(); + + if let Some(found) = registered { + found + } else { + let adapter = build_adapter(); + self.register_lsp_adapter(language_name, adapter) + } } /// Register a fake language server and adapter diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 5c32c9030d..92f37f87af 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -4475,7 +4475,7 @@ impl LspStore { mut cx: AsyncAppContext, ) -> Result { let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id); - let name = LanguageServerName::from_proto(envelope.payload.name); + let server_name = LanguageServerName::from_proto(envelope.payload.name); let binary = envelope .payload @@ -4494,6 +4494,14 @@ impl LspStore { let matcher: LanguageMatcher = serde_json::from_str(&language.matcher)?; this.update(&mut cx, |this, cx| { + let Some(worktree) = this + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + else { + return Err(anyhow!("worktree not found")); + }; + this.languages .register_language(language_name.clone(), None, matcher.clone(), { let language_name = language_name.clone(); @@ -4513,28 +4521,20 @@ impl LspStore { .spawn(this.languages.language_for_name(language_name.0.as_ref())) .detach(); - let adapter = Arc::new(SshLspAdapter::new( - name, - binary, - envelope.payload.initialization_options, - envelope.payload.code_action_kinds, - )); - - this.languages - .register_lsp_adapter(language_name.clone(), adapter.clone()); - let Some(worktree) = this - .worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - else { - return Err(anyhow!("worktree not found")); - }; - this.start_language_server( - &worktree, - CachedLspAdapter::new(adapter), - language_name, - cx, + let adapter = this.languages.get_or_register_lsp_adapter( + language_name.clone(), + server_name.clone(), + || { + Arc::new(SshLspAdapter::new( + server_name, + binary, + envelope.payload.initialization_options, + envelope.payload.code_action_kinds, + )) + }, ); + + this.start_language_server(&worktree, adapter, language_name, cx); Ok(()) })??; Ok(proto::Ack {}) From 16d2afc662ae43cd404279c068bb26557306b9c7 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 08:46:23 -0400 Subject: [PATCH 58/96] ci: Bump `nightly` tag on scheduled Nightly builds (#18134) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR makes it so after a scheduled Nightly build we also update the `nightly` tag to keep things in sync. It's safe to bump the tag within this Action, as it won't trigger another Nightly build due to GitHub's recursive Action protections: > When you use the repository's `GITHUB_TOKEN` to perform tasks, events triggered by the `GITHUB_TOKEN`, with the exception of `workflow_dispatch` and `repository_dispatch`, will not create a new workflow run. This prevents you from accidentally creating recursive workflow runs. For example, if a workflow run pushes code using the repository's `GITHUB_TOKEN`, a new workflow will not run even when the repository contains a workflow configured to run when `push` events occur. > > — [source](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#using-the-github_token-in-a-workflow) Release Notes: - N/A --- .github/workflows/release_nightly.yml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index bcaa60b775..2b973dcddc 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -171,3 +171,28 @@ jobs: - name: Upload Zed Nightly run: script/upload-nightly linux-targz + + update-nightly-tag: + name: Update nightly tag + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + needs: + - bundle-mac + - bundle-linux-x86 + - bundle-linux-arm + steps: + - name: Checkout repo + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + with: + fetch-depth: 0 + + - name: Update nightly tag + run: | + if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then + echo "Nightly tag already points to current commit. Skipping tagging." + exit 0 + fi + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f nightly + git push origin nightly --force From d6c184b494a0c9a9a46d4ffdb5483ba65967ab0b Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 20 Sep 2024 09:23:11 -0400 Subject: [PATCH 59/96] Detect 'MD' extension as Markdown (#18135) --- crates/languages/src/markdown/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/markdown/config.toml b/crates/languages/src/markdown/config.toml index 6b518ec8b6..ce3b294b4e 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/languages/src/markdown/config.toml @@ -1,6 +1,6 @@ name = "Markdown" grammar = "markdown" -path_suffixes = ["md", "mdx", "mdwn", "markdown"] +path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"] word_characters = ["-"] brackets = [ { start = "{", end = "}", close = true, newline = true }, From 5f1046b3cd5290112f6dd464e49bc58661fd2179 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Fri, 20 Sep 2024 10:28:22 -0400 Subject: [PATCH 60/96] Make evals handle failures more gracefully (#18082) Now when an individual project eval fails, instead of panicking we add it to a list of failures that we collect and report at the end (and make the exit code nonzero). Release Notes: - N/A --- crates/evals/src/eval.rs | 315 ++++++++++++++++++++++++--------------- 1 file changed, 195 insertions(+), 120 deletions(-) diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 708cfa7511..0580053373 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -12,13 +12,16 @@ use language::LanguageRegistry; use node_runtime::FakeNodeRuntime; use open_ai::OpenAiEmbeddingModel; use project::Project; -use semantic_index::{OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status}; +use semantic_index::{ + EmbeddingProvider, OpenAiEmbeddingProvider, ProjectIndex, SemanticDb, Status, +}; use serde::{Deserialize, Serialize}; use settings::SettingsStore; use smol::channel::bounded; use smol::io::AsyncReadExt; use smol::Timer; use std::ops::RangeInclusive; +use std::path::PathBuf; use std::time::Duration; use std::{ fs, @@ -237,6 +240,14 @@ async fn fetch_code_search_net_resources(http_client: &dyn HttpClient) -> Result Ok(()) } +#[derive(Default, Debug)] +struct Counts { + covered_results: usize, + overlapped_results: usize, + covered_files: usize, + total_results: usize, +} + async fn run_evaluation( only_repo: Option, executor: &BackgroundExecutor, @@ -297,12 +308,11 @@ async fn run_evaluation( cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx)) .unwrap(); - let mut covered_result_count = 0; - let mut overlapped_result_count = 0; - let mut covered_file_count = 0; - let mut total_result_count = 0; + let mut counts = Counts::default(); eprint!("Running evals."); + let mut failures = Vec::new(); + for evaluation_project in evaluations { if only_repo .as_ref() @@ -314,27 +324,24 @@ async fn run_evaluation( eprint!("\r\x1B[2K"); eprint!( "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. Project: {}...", - covered_result_count, - total_result_count, - overlapped_result_count, - total_result_count, - covered_file_count, - total_result_count, + counts.covered_results, + counts.total_results, + counts.overlapped_results, + counts.total_results, + counts.covered_files, + counts.total_results, evaluation_project.repo ); - let repo_db_path = - db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); - let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider.clone(), cx) - .await - .unwrap(); - let repo_dir = repos_dir.join(&evaluation_project.repo); if !repo_dir.exists() || repo_dir.join(SKIP_EVAL_PATH).exists() { eprintln!("Skipping {}: directory not found", evaluation_project.repo); continue; } + let repo_db_path = + db_path.join(format!("{}.db", evaluation_project.repo.replace('/', "_"))); + let project = cx .update(|cx| { Project::local( @@ -349,125 +356,193 @@ async fn run_evaluation( }) .unwrap(); - let (worktree, _) = project - .update(cx, |project, cx| { - project.find_or_create_worktree(repo_dir, true, cx) - })? - .await?; + let repo = evaluation_project.repo.clone(); + if let Err(err) = run_eval_project( + evaluation_project, + &user_store, + repo_db_path, + &repo_dir, + &mut counts, + project, + embedding_provider.clone(), + fs.clone(), + cx, + ) + .await + { + eprintln!("{repo} eval failed with error: {:?}", err); - worktree - .update(cx, |worktree, _| { - worktree.as_local().unwrap().scan_complete() - }) - .unwrap() - .await; + failures.push((repo, err)); + } + } - let project_index = cx - .update(|cx| semantic_index.create_project_index(project.clone(), cx)) - .unwrap(); - wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + eprintln!( + "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured. {} failed.", + counts.covered_results, + counts.total_results, + counts.overlapped_results, + counts.total_results, + counts.covered_files, + counts.total_results, + failures.len(), + ); - for query in evaluation_project.queries { - let results = cx - .update(|cx| { + if failures.is_empty() { + Ok(()) + } else { + eprintln!("Failures:\n"); + + for (index, (repo, failure)) in failures.iter().enumerate() { + eprintln!("Failure #{} - {repo}\n{:?}", index + 1, failure); + } + + Err(anyhow::anyhow!("Some evals failed.")) + } +} + +#[allow(clippy::too_many_arguments)] +async fn run_eval_project( + evaluation_project: EvaluationProject, + user_store: &Model, + repo_db_path: PathBuf, + repo_dir: &Path, + counts: &mut Counts, + project: Model, + embedding_provider: Arc, + fs: Arc, + cx: &mut AsyncAppContext, +) -> Result<(), anyhow::Error> { + let mut semantic_index = SemanticDb::new(repo_db_path, embedding_provider, cx).await?; + + let (worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(repo_dir, true, cx) + })? + .await?; + + worktree + .update(cx, |worktree, _| { + worktree.as_local().unwrap().scan_complete() + })? + .await; + + let project_index = cx.update(|cx| semantic_index.create_project_index(project.clone(), cx))?; + wait_for_indexing_complete(&project_index, cx, Some(Duration::from_secs(120))).await; + + for query in evaluation_project.queries { + let results = { + // Retry search up to 3 times in case of timeout, network failure, etc. + let mut retries_remaining = 3; + let mut result; + + loop { + match cx.update(|cx| { let project_index = project_index.read(cx); project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) - }) - .unwrap() - .await - .unwrap(); - - let results = SemanticDb::load_results(results, &fs.clone(), &cx) - .await - .unwrap(); - - let mut project_covered_result_count = 0; - let mut project_overlapped_result_count = 0; - let mut project_covered_file_count = 0; - let mut covered_result_indices = Vec::new(); - for expected_result in &query.expected_results { - let mut file_matched = false; - let mut range_overlapped = false; - let mut range_covered = false; - - for (ix, result) in results.iter().enumerate() { - if result.path.as_ref() == Path::new(&expected_result.file) { - file_matched = true; - let start_matched = - result.row_range.contains(&expected_result.lines.start()); - let end_matched = result.row_range.contains(&expected_result.lines.end()); - - if start_matched || end_matched { - range_overlapped = true; - } - - if start_matched && end_matched { - range_covered = true; - covered_result_indices.push(ix); + }) { + Ok(task) => match task.await { + Ok(answer) => { + result = Ok(answer); break; } + Err(err) => { + result = Err(err); + } + }, + Err(err) => { + result = Err(err); } } - if range_covered { - project_covered_result_count += 1 - }; - if range_overlapped { - project_overlapped_result_count += 1 - }; - if file_matched { - project_covered_file_count += 1 - }; + if retries_remaining > 0 { + eprintln!( + "Retrying search after it failed on query {:?} with {:?}", + query, result + ); + retries_remaining -= 1; + } else { + eprintln!( + "Ran out of retries; giving up on search which failed on query {:?} with {:?}", + query, result + ); + break; + } } - let outcome_repo = evaluation_project.repo.clone(); - let query_results = EvaluationQueryOutcome { - repo: outcome_repo, - query: query.query, - total_result_count: query.expected_results.len(), - covered_result_count: project_covered_result_count, - overlapped_result_count: project_overlapped_result_count, - covered_file_count: project_covered_file_count, - expected_results: query.expected_results, - actual_results: results - .iter() - .map(|result| EvaluationSearchResult { - file: result.path.to_string_lossy().to_string(), - lines: result.row_range.clone(), - }) - .collect(), - covered_result_indices, + SemanticDb::load_results(result?, &fs.clone(), &cx).await? + }; + + let mut project_covered_result_count = 0; + let mut project_overlapped_result_count = 0; + let mut project_covered_file_count = 0; + let mut covered_result_indices = Vec::new(); + for expected_result in &query.expected_results { + let mut file_matched = false; + let mut range_overlapped = false; + let mut range_covered = false; + + for (ix, result) in results.iter().enumerate() { + if result.path.as_ref() == Path::new(&expected_result.file) { + file_matched = true; + let start_matched = result.row_range.contains(&expected_result.lines.start()); + let end_matched = result.row_range.contains(&expected_result.lines.end()); + + if start_matched || end_matched { + range_overlapped = true; + } + + if start_matched && end_matched { + range_covered = true; + covered_result_indices.push(ix); + break; + } + } + } + + if range_covered { + project_covered_result_count += 1 + }; + if range_overlapped { + project_overlapped_result_count += 1 + }; + if file_matched { + project_covered_file_count += 1 }; - - overlapped_result_count += query_results.overlapped_result_count; - covered_result_count += query_results.covered_result_count; - covered_file_count += query_results.covered_file_count; - total_result_count += query_results.total_result_count; - - println!("{}", serde_json::to_string(&query_results).unwrap()); } + let outcome_repo = evaluation_project.repo.clone(); - user_store - .update(cx, |_, _| { - drop(semantic_index); - drop(project); - drop(worktree); - drop(project_index); - }) - .unwrap(); + let query_results = EvaluationQueryOutcome { + repo: outcome_repo, + query: query.query, + total_result_count: query.expected_results.len(), + covered_result_count: project_covered_result_count, + overlapped_result_count: project_overlapped_result_count, + covered_file_count: project_covered_file_count, + expected_results: query.expected_results, + actual_results: results + .iter() + .map(|result| EvaluationSearchResult { + file: result.path.to_string_lossy().to_string(), + lines: result.row_range.clone(), + }) + .collect(), + covered_result_indices, + }; + + counts.overlapped_results += query_results.overlapped_result_count; + counts.covered_results += query_results.covered_result_count; + counts.covered_files += query_results.covered_file_count; + counts.total_results += query_results.total_result_count; + + println!("{}", serde_json::to_string(&query_results)?); } - eprint!( - "Running evals. {}/{} covered. {}/{} overlapped. {}/{} files captured.", - covered_result_count, - total_result_count, - overlapped_result_count, - total_result_count, - covered_file_count, - total_result_count, - ); - - Ok(()) + user_store.update(cx, |_, _| { + drop(semantic_index); + drop(project); + drop(worktree); + drop(project_index); + }) } async fn wait_for_indexing_complete( @@ -524,7 +599,7 @@ async fn fetch_eval_repos( let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json"); let evaluations: Vec = serde_json::from_slice(&evaluations).unwrap(); - eprint!("Fetching evaluation repositories..."); + eprintln!("Fetching evaluation repositories..."); executor .scoped(move |scope| { From ab1d466c5f46fbaf84615dc39f142cfe0c0880e3 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 10:48:27 -0400 Subject: [PATCH 61/96] Remove `replica_id` from `MultiBuffer`s (#18141) This PR removes the `replica_id` field from the `MultiBuffer` struct. We were only ever referencing this field to pass when constructing a `MultiBuffer`, and never used it outside of that. Release Notes: - N/A --- crates/assistant/src/assistant_panel.rs | 5 +- crates/assistant/src/inline_assistant.rs | 2 +- crates/collab/src/tests/following_tests.rs | 2 +- .../src/copilot_completion_provider.rs | 4 +- crates/diagnostics/src/diagnostics.rs | 7 +-- crates/editor/src/display_map/block_map.rs | 2 +- crates/editor/src/editor.rs | 22 ++------ crates/editor/src/editor_tests.rs | 20 +++---- crates/editor/src/git.rs | 2 +- crates/editor/src/hunk_diff.rs | 2 +- crates/editor/src/inlay_hint_cache.rs | 4 +- crates/editor/src/items.rs | 3 +- crates/editor/src/movement.rs | 2 +- crates/editor/src/test/editor_test_context.rs | 2 +- crates/multi_buffer/src/multi_buffer.rs | 56 ++++++++----------- crates/search/src/project_search.rs | 3 +- 16 files changed, 54 insertions(+), 84 deletions(-) diff --git a/crates/assistant/src/assistant_panel.rs b/crates/assistant/src/assistant_panel.rs index 364c6f9663..22237eeb07 100644 --- a/crates/assistant/src/assistant_panel.rs +++ b/crates/assistant/src/assistant_panel.rs @@ -2814,9 +2814,8 @@ impl ContextEditor { } else { // If there are multiple buffers or suggestion groups, create a multibuffer let multibuffer = cx.new_model(|cx| { - let replica_id = project.read(cx).replica_id(); - let mut multibuffer = MultiBuffer::new(replica_id, Capability::ReadWrite) - .with_title(resolved_step.title.clone()); + let mut multibuffer = + MultiBuffer::new(Capability::ReadWrite).with_title(resolved_step.title.clone()); for (buffer, groups) in &resolved_step.suggestion_groups { let excerpt_ids = multibuffer.push_excerpts( buffer.clone(), diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index 428b33f3bb..d95b54d3c6 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -1145,7 +1145,7 @@ impl InlineAssistant { let deleted_lines_editor = cx.new_view(|cx| { let multi_buffer = cx.new_model(|_| { - MultiBuffer::without_headers(0, language::Capability::ReadOnly) + MultiBuffer::without_headers(language::Capability::ReadOnly) }); multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index e66b66a1b4..9a39d6f3eb 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -289,7 +289,7 @@ async fn test_basic_following( .get_open_buffer(&(worktree_id, "2.txt").into(), cx) .unwrap() }); - let mut result = MultiBuffer::new(0, Capability::ReadWrite); + let mut result = MultiBuffer::new(Capability::ReadWrite); result.push_excerpts( buffer_a1, [ExcerptRange { diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index c54fefad6f..3a3361cda1 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -767,7 +767,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("a = 1\nb = 2\n", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("c = 3\nd = 4\n", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ExcerptRange { @@ -1018,7 +1018,7 @@ mod tests { .unwrap(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); multibuffer.push_excerpts( private_buffer.clone(), [ExcerptRange { diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index eec4f735ec..6876388542 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -156,12 +156,7 @@ impl ProjectDiagnosticsEditor { cx.on_focus_out(&focus_handle, |this, _event, cx| this.focus_out(cx)) .detach(); - let excerpts = cx.new_model(|cx| { - MultiBuffer::new( - project_handle.read(cx).replica_id(), - project_handle.read(cx).capability(), - ) - }); + let excerpts = cx.new_model(|cx| MultiBuffer::new(project_handle.read(cx).capability())); let editor = cx.new_view(|cx| { let mut editor = Editor::for_multibuffer(excerpts.clone(), Some(project_handle.clone()), false, cx); diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 3a298832de..efa026a56c 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -1671,7 +1671,7 @@ mod tests { let mut excerpt_ids = Vec::new(); let multi_buffer = cx.new_model(|cx| { - let mut multi_buffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multi_buffer = MultiBuffer::new(Capability::ReadWrite); excerpt_ids.extend(multi_buffer.push_excerpts( buffer1.clone(), [ExcerptRange { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index f797f82832..eb2dafc24d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2155,10 +2155,6 @@ impl Editor { }); } - pub fn replica_id(&self, cx: &AppContext) -> ReplicaId { - self.buffer.read(cx).replica_id() - } - pub fn leader_peer_id(&self) -> Option { self.leader_peer_id } @@ -4758,8 +4754,6 @@ impl Editor { title: String, mut cx: AsyncWindowContext, ) -> Result<()> { - let replica_id = this.update(&mut cx, |this, cx| this.replica_id(cx))?; - let mut entries = transaction.0.into_iter().collect::>(); cx.update(|cx| { entries.sort_unstable_by_key(|(buffer, _)| { @@ -4802,8 +4796,7 @@ impl Editor { let mut ranges_to_highlight = Vec::new(); let excerpt_buffer = cx.new_model(|cx| { - let mut multibuffer = - MultiBuffer::new(replica_id, Capability::ReadWrite).with_title(title); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite).with_title(title); for (buffer_handle, transaction) in &entries { let buffer = buffer_handle.read(cx); ranges_to_highlight.extend( @@ -9610,7 +9603,6 @@ impl Editor { }) }) } else if !definitions.is_empty() { - let replica_id = self.replica_id(cx); cx.spawn(|editor, mut cx| async move { let (title, location_tasks, workspace) = editor .update(&mut cx, |editor, cx| { @@ -9663,9 +9655,7 @@ impl Editor { }; let opened = workspace .update(&mut cx, |workspace, cx| { - Self::open_locations_in_multibuffer( - workspace, locations, replica_id, title, split, cx, - ) + Self::open_locations_in_multibuffer(workspace, locations, title, split, cx) }) .ok(); @@ -9762,7 +9752,6 @@ impl Editor { } let (buffer, head) = multi_buffer.text_anchor_for_position(head, cx)?; - let replica_id = self.replica_id(cx); let workspace = self.workspace()?; let project = workspace.read(cx).project().clone(); let references = project.update(cx, |project, cx| project.references(&buffer, head, cx)); @@ -9803,9 +9792,7 @@ impl Editor { ) }) .unwrap(); - Self::open_locations_in_multibuffer( - workspace, locations, replica_id, title, false, cx, - ); + Self::open_locations_in_multibuffer(workspace, locations, title, false, cx); Navigated::Yes }) })) @@ -9815,7 +9802,6 @@ impl Editor { pub fn open_locations_in_multibuffer( workspace: &mut Workspace, mut locations: Vec, - replica_id: ReplicaId, title: String, split: bool, cx: &mut ViewContext, @@ -9827,7 +9813,7 @@ impl Editor { let capability = workspace.project().read(cx).capability(); let excerpt_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(replica_id, capability); + let mut multibuffer = MultiBuffer::new(capability); while let Some(location) = locations.next() { let buffer = location.buffer.read(cx); let mut ranges_for_buffer = Vec::new(); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index e11b38ba59..589673447d 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -2822,7 +2822,7 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) { Buffer::local("const c: usize = 3;\n", cx).with_language(rust_language, cx) }); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( toml_buffer.clone(), [ExcerptRange { @@ -6671,7 +6671,7 @@ async fn test_multibuffer_format_during_save(cx: &mut gpui::TestAppContext) { .unwrap(); let multi_buffer = cx.new_model(|cx| { - let mut multi_buffer = MultiBuffer::new(0, ReadWrite); + let mut multi_buffer = MultiBuffer::new(ReadWrite); multi_buffer.push_excerpts( buffer_1.clone(), [ @@ -8614,7 +8614,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer.clone(), [ @@ -8698,7 +8698,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) { }); let buffer = cx.new_model(|cx| Buffer::local(initial_text, cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts(buffer, excerpt_ranges, cx); multibuffer }); @@ -8757,7 +8757,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let mut excerpt1_id = None; let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); excerpt1_id = multibuffer .push_excerpts( buffer.clone(), @@ -8842,7 +8842,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(3, 4, 'a'), cx)); let mut excerpt1_id = None; let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); excerpt1_id = multibuffer .push_excerpts( buffer.clone(), @@ -9230,7 +9230,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) { let cx = &mut VisualTestContext::from_window(*workspace.deref(), cx); let leader = pane.update(cx, |_, cx| { - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(ReadWrite)); cx.new_view(|cx| build_editor(multibuffer.clone(), cx)) }); @@ -10685,7 +10685,7 @@ async fn test_multibuffer_reverts(cx: &mut gpui::TestAppContext) { diff_every_buffer_row(&buffer_3, sample_text_3.clone(), cols, cx); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -10825,7 +10825,7 @@ async fn test_mutlibuffer_in_navigation_history(cx: &mut gpui::TestAppContext) { let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text_3.clone(), cx)); let multi_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -11764,7 +11764,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext) }); let multi_buffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 665c649e6e..63b083faa8 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -195,7 +195,7 @@ mod tests { cx.background_executor.run_until_parked(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, ReadWrite); + let mut multibuffer = MultiBuffer::new(ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 5dc73634bd..361ea6246e 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -764,7 +764,7 @@ fn editor_with_deleted_text( let parent_editor = cx.view().downgrade(); let editor = cx.new_view(|cx| { let multi_buffer = - cx.new_model(|_| MultiBuffer::without_headers(0, language::Capability::ReadOnly)); + cx.new_model(|_| MultiBuffer::without_headers(language::Capability::ReadOnly)); multi_buffer.update(cx, |multi_buffer, cx| { multi_buffer.push_excerpts( diff_base_buffer, diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index 24ccf64c4c..ca2db70a70 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -2607,7 +2607,7 @@ pub mod tests { .await .unwrap(); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ @@ -2957,7 +2957,7 @@ pub mod tests { }) .await .unwrap(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let (buffer_1_excerpts, buffer_2_excerpts) = multibuffer.update(cx, |multibuffer, cx| { let buffer_1_excerpts = multibuffer.push_excerpts( buffer_1.clone(), diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 3d04eb82d3..1d301f2ee6 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -68,7 +68,6 @@ impl FollowableItem for Editor { unreachable!() }; - let replica_id = project.read(cx).replica_id(); let buffer_ids = state .excerpts .iter() @@ -92,7 +91,7 @@ impl FollowableItem for Editor { if state.singleton && buffers.len() == 1 { multibuffer = MultiBuffer::singleton(buffers.pop().unwrap(), cx) } else { - multibuffer = MultiBuffer::new(replica_id, project.read(cx).capability()); + multibuffer = MultiBuffer::new(project.read(cx).capability()); let mut excerpts = state.excerpts.into_iter().peekable(); while let Some(excerpt) = excerpts.peek() { let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index a9f27d53a6..19e2a4ea95 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -928,7 +928,7 @@ mod tests { let buffer = cx.new_model(|cx| Buffer::local("abc\ndefg\nhijkl\nmn", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer.clone(), [ diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index 6f8a495895..3e4ef174d4 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -75,7 +75,7 @@ impl EditorTestContext { cx: &mut gpui::TestAppContext, excerpts: [&str; COUNT], ) -> EditorTestContext { - let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(language::Capability::ReadWrite); let buffer = cx.new_model(|cx| { for excerpt in excerpts.into_iter() { let (text, ranges) = marked_text_ranges(excerpt, false); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index c163dbc07a..f6a61f562a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -67,7 +67,6 @@ pub struct MultiBuffer { subscriptions: Topic, /// If true, the multi-buffer only contains a single [`Buffer`] and a single [`Excerpt`] singleton: bool, - replica_id: ReplicaId, history: History, title: Option, capability: Capability, @@ -350,7 +349,7 @@ impl std::ops::Deref for MultiBufferIndentGuide { } impl MultiBuffer { - pub fn new(replica_id: ReplicaId, capability: Capability) -> Self { + pub fn new(capability: Capability) -> Self { Self { snapshot: RefCell::new(MultiBufferSnapshot { show_headers: true, @@ -360,7 +359,6 @@ impl MultiBuffer { subscriptions: Topic::default(), singleton: false, capability, - replica_id, title: None, history: History { next_transaction_id: clock::Lamport::default(), @@ -372,14 +370,13 @@ impl MultiBuffer { } } - pub fn without_headers(replica_id: ReplicaId, capability: Capability) -> Self { + pub fn without_headers(capability: Capability) -> Self { Self { snapshot: Default::default(), buffers: Default::default(), subscriptions: Default::default(), singleton: false, capability, - replica_id, history: History { next_transaction_id: Default::default(), undo_stack: Default::default(), @@ -414,7 +411,6 @@ impl MultiBuffer { subscriptions: Default::default(), singleton: self.singleton, capability: self.capability, - replica_id: self.replica_id, history: self.history.clone(), title: self.title.clone(), } @@ -430,7 +426,7 @@ impl MultiBuffer { } pub fn singleton(buffer: Model, cx: &mut ModelContext) -> Self { - let mut this = Self::new(buffer.read(cx).replica_id(), buffer.read(cx).capability()); + let mut this = Self::new(buffer.read(cx).capability()); this.singleton = true; this.push_excerpts( buffer, @@ -444,10 +440,6 @@ impl MultiBuffer { this } - pub fn replica_id(&self) -> ReplicaId { - self.replica_id - } - /// Returns an up-to-date snapshot of the MultiBuffer. pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot { self.sync(cx); @@ -2011,7 +2003,7 @@ impl MultiBuffer { excerpts: [(&str, Vec>); COUNT], cx: &mut gpui::AppContext, ) -> Model { - let multi = cx.new_model(|_| Self::new(0, Capability::ReadWrite)); + let multi = cx.new_model(|_| Self::new(Capability::ReadWrite)); for (text, ranges) in excerpts { let buffer = cx.new_model(|cx| Buffer::local(text, cx)); let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange { @@ -2032,7 +2024,7 @@ impl MultiBuffer { pub fn build_random(rng: &mut impl rand::Rng, cx: &mut gpui::AppContext) -> Model { cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); let mutation_count = rng.gen_range(1..=5); multibuffer.randomly_edit_excerpts(rng, mutation_count, cx); multibuffer @@ -5063,7 +5055,7 @@ mod tests { fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let events = Arc::new(RwLock::new(Vec::::new())); multibuffer.update(cx, |_, cx| { @@ -5306,8 +5298,8 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(10, 3, 'm'), cx)); - let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); - let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let leader_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + let follower_multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let follower_edit_event_count = Arc::new(RwLock::new(0)); follower_multibuffer.update(cx, |_, cx| { @@ -5410,7 +5402,7 @@ mod tests { #[gpui::test] fn test_expand_excerpts(cx: &mut AppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts_with_context_lines( @@ -5486,7 +5478,7 @@ mod tests { #[gpui::test] fn test_push_excerpts_with_context_lines(cx: &mut AppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts_with_context_lines( buffer.clone(), @@ -5539,7 +5531,7 @@ mod tests { #[gpui::test] async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) { let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { let snapshot = buffer.read(cx); let ranges = vec![ @@ -5589,7 +5581,7 @@ mod tests { #[gpui::test] fn test_empty_multibuffer(cx: &mut AppContext) { - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let snapshot = multibuffer.read(cx).snapshot(cx); assert_eq!(snapshot.text(), ""); @@ -5628,7 +5620,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("efghi", cx)); let multibuffer = cx.new_model(|cx| { - let mut multibuffer = MultiBuffer::new(0, Capability::ReadWrite); + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); multibuffer.push_excerpts( buffer_1.clone(), [ExcerptRange { @@ -5685,7 +5677,7 @@ mod tests { fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local("abcd", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("ABCDEFGHIJKLMNOP", cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); // Create an insertion id in buffer 1 that doesn't exist in buffer 2. // Add an excerpt from buffer 1 that spans this new insertion. @@ -5819,7 +5811,7 @@ mod tests { .unwrap_or(10); let mut buffers: Vec> = Vec::new(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_ids = Vec::::new(); let mut expected_excerpts = Vec::<(Model, Range)>::new(); let mut anchors = Vec::new(); @@ -6283,7 +6275,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local("1234", cx)); let buffer_2 = cx.new_model(|cx| Buffer::local("5678", cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let group_interval = multibuffer.read(cx).history.group_interval; multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( @@ -6418,7 +6410,7 @@ mod tests { fn test_excerpts_in_ranges_no_ranges(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), @@ -6496,7 +6488,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut expected_excerpt_id = ExcerptId(0); multibuffer.update(cx, |multibuffer, cx| { @@ -6557,7 +6549,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6623,7 +6615,7 @@ mod tests { let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'r'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); let mut excerpt_3_id = ExcerptId(0); @@ -6698,7 +6690,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6764,7 +6756,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_len = buffer_1.read(cx).len(); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); let mut excerpt_1_id = ExcerptId(0); let mut excerpt_2_id = ExcerptId(0); @@ -6829,7 +6821,7 @@ mod tests { fn test_split_ranges(cx: &mut AppContext) { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), @@ -6885,7 +6877,7 @@ mod tests { let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'a'), cx)); let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'g'), cx)); let buffer_3 = cx.new_model(|cx| Buffer::local(sample_text(6, 6, 'm'), cx)); - let multibuffer = cx.new_model(|_| MultiBuffer::new(0, Capability::ReadWrite)); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); multibuffer.update(cx, |multibuffer, cx| { multibuffer.push_excerpts( buffer_1.clone(), diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index c43d4ed454..fac3c55bf4 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -176,12 +176,11 @@ pub struct ProjectSearchBar { impl ProjectSearch { pub fn new(project: Model, cx: &mut ModelContext) -> Self { - let replica_id = project.read(cx).replica_id(); let capability = project.read(cx).capability(); Self { project, - excerpts: cx.new_model(|_| MultiBuffer::new(replica_id, capability)), + excerpts: cx.new_model(|_| MultiBuffer::new(capability)), pending_search: Default::default(), match_ranges: Default::default(), active_query: None, From 759646e0a35a2c4586817b79028cb347e3749de4 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 11:45:03 -0400 Subject: [PATCH 62/96] editor: Improve rewrapping when working with comments at different indentation levels (#18146) This PR improves the `editor::Rewrap` command when working with comments that were not all at the same indentation level. We now use a heuristic of finding the most common indentation level for each line, using the deepest indent in the event of a tie. It also removes an `.unwrap()` that would previously lead to a panic in this case. Instead of unwrapping we now log an error to the logs and skip rewrapping for that selection. Release Notes: - Improved the behavior of `editor: rewrap` when working with a selection that contained comments at different indentation levels. --- crates/editor/src/editor.rs | 46 ++++++++++++++++--- crates/editor/src/editor_tests.rs | 74 +++++++++++++++++++++++++++++++ crates/language/src/buffer.rs | 4 +- 3 files changed, 116 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index eb2dafc24d..33eb51cb0e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6736,9 +6736,31 @@ impl Editor { } } - let row = selection.head().row; - let indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); - let indent_end = Point::new(row, indent_size.len); + // Since not all lines in the selection may be at the same indent + // level, choose the indent size that is the most common between all + // of the lines. + // + // If there is a tie, we use the deepest indent. + let (indent_size, indent_end) = { + let mut indent_size_occurrences = HashMap::default(); + let mut rows_by_indent_size = HashMap::>::default(); + + for row in start_row..=end_row { + let indent = buffer.indent_size_for_line(MultiBufferRow(row)); + rows_by_indent_size.entry(indent).or_default().push(row); + *indent_size_occurrences.entry(indent).or_insert(0) += 1; + } + + let indent_size = indent_size_occurrences + .into_iter() + .max_by_key(|(indent, count)| (*count, indent.len)) + .map(|(indent, _)| indent) + .unwrap_or_default(); + let row = rows_by_indent_size[&indent_size][0]; + let indent_end = Point::new(row, indent_size.len); + + (indent_size, indent_end) + }; let mut line_prefix = indent_size.chars().collect::(); @@ -6788,10 +6810,22 @@ impl Editor { let start = Point::new(start_row, 0); let end = Point::new(end_row, buffer.line_len(MultiBufferRow(end_row))); let selection_text = buffer.text_for_range(start..end).collect::(); - let unwrapped_text = selection_text + let Some(lines_without_prefixes) = selection_text .lines() - .map(|line| line.strip_prefix(&line_prefix).unwrap()) - .join(" "); + .map(|line| { + line.strip_prefix(&line_prefix) + .or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start())) + .ok_or_else(|| { + anyhow!("line did not start with prefix {line_prefix:?}: {line:?}") + }) + }) + .collect::, _>>() + .log_err() + else { + continue; + }; + + let unwrapped_text = lines_without_prefixes.join(" "); let wrap_column = buffer .settings_at(Point::new(start_row, 0), cx) .preferred_line_length as usize; diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 589673447d..85684db818 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4249,6 +4249,80 @@ async fn test_rewrap(cx: &mut TestAppContext) { cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); cx.assert_editor_state(wrapped_text); } + + // Test rewrapping unaligned comments in a selection. + { + let language = Arc::new(Language::new( + LanguageConfig { + line_comments: vec!["// ".into(), "/// ".into()], + ..LanguageConfig::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + let unwrapped_text = indoc! {" + fn foo() { + if true { + « // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. + // Praesent semper egestas tellus id dignissim.ˇ» + do_something(); + } else { + // + } + + } + "}; + + let wrapped_text = indoc! {" + fn foo() { + if true { + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus + // mollis elit purus, a ornare lacus gravida vitae. Praesent semper + // egestas tellus id dignissim.ˇ + do_something(); + } else { + // + } + + } + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + + let unwrapped_text = indoc! {" + fn foo() { + if true { + «ˇ // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus mollis elit purus, a ornare lacus gravida vitae. + // Praesent semper egestas tellus id dignissim.» + do_something(); + } else { + // + } + + } + "}; + + let wrapped_text = indoc! {" + fn foo() { + if true { + // Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus + // mollis elit purus, a ornare lacus gravida vitae. Praesent semper + // egestas tellus id dignissim.ˇ + do_something(); + } else { + // + } + + } + "}; + + cx.set_state(unwrapped_text); + cx.update_editor(|e, cx| e.rewrap(&Rewrap, cx)); + cx.assert_editor_state(wrapped_text); + } } #[gpui::test] diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 08fc1ccdb4..acb57273e3 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -144,7 +144,7 @@ pub struct BufferSnapshot { /// The kind and amount of indentation in a particular line. For now, /// assumes that indentation is all the same character. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] pub struct IndentSize { /// The number of bytes that comprise the indentation. pub len: u32, @@ -153,7 +153,7 @@ pub struct IndentSize { } /// A whitespace character that's used for indentation. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] pub enum IndentKind { /// An ASCII space character. #[default] From f8195c41e0019b77a56a2eb96c346b601a6c8b89 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 20 Sep 2024 11:52:57 -0400 Subject: [PATCH 63/96] docs: Switch proxy example to socks5h not socks5 (#18142) Very rarely when you have a SOCKS proxy configured do you want local DNS. `socks5` does local DNS. `socks5h` does remote DNS. --- assets/settings/default.json | 2 +- docs/src/configuring-zed.md | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index a9e1865258..537ad12082 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1029,7 +1029,7 @@ // environment variables. // // Examples: - // - "proxy": "socks5://localhost:10808" + // - "proxy": "socks5h://localhost:10808" // - "proxy": "http://127.0.0.1:10809" "proxy": null, // Set to configure aliases for the command palette. diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 1befa7d93a..de7433bf5d 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -1127,10 +1127,10 @@ The following URI schemes are supported: - `http` - `https` -- `socks4` -- `socks4a` -- `socks5` -- `socks5h` +- `socks4` - SOCKS4 proxy with local DNS +- `socks4a` - SOCKS4 proxy with remote DNS +- `socks5` - SOCKS5 proxy with local DNS +- `socks5h` - SOCKS5 proxy with remote DNS `http` will be used when no scheme is specified. @@ -1148,7 +1148,7 @@ Or to set a `socks5` proxy: ```json { - "proxy": "socks5://localhost:10808" + "proxy": "socks5h://localhost:10808" } ``` From 99bef273009a62b416300daa22b9a14910b5ca91 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8B=90=E7=8B=B8?= <134658521+Huliiiiii@users.noreply.github.com> Date: Sat, 21 Sep 2024 00:20:14 +0800 Subject: [PATCH 64/96] Add escape string highlights to JSON and JSONC files (#18138) Release Notes: - Added escape string highlights to JSON and JSONC files --- crates/languages/src/json/highlights.scm | 1 + crates/languages/src/jsonc/highlights.scm | 1 + 2 files changed, 2 insertions(+) diff --git a/crates/languages/src/json/highlights.scm b/crates/languages/src/json/highlights.scm index 7116805109..8cf7a6d20d 100644 --- a/crates/languages/src/json/highlights.scm +++ b/crates/languages/src/json/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string +(escape_sequence) @string.escape (pair key: (string) @property.json_key) diff --git a/crates/languages/src/jsonc/highlights.scm b/crates/languages/src/jsonc/highlights.scm index 7116805109..8cf7a6d20d 100644 --- a/crates/languages/src/jsonc/highlights.scm +++ b/crates/languages/src/jsonc/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string +(escape_sequence) @string.escape (pair key: (string) @property.json_key) From d97427f69eb46b62b4decac7ee88f5890a8a575c Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 20 Sep 2024 12:48:48 -0400 Subject: [PATCH 65/96] chore: Update flake inputs (#18150) Release Notes: - N/A --- flake.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/flake.lock b/flake.lock index 2b421a9efb..a5b7a7a6ae 100644 --- a/flake.lock +++ b/flake.lock @@ -23,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1726554553, - "narHash": "sha256-xakDhIS1c1VgJc/NMOLj05yBsTdlXKMEYz6wC8Hdshc=", + "lastModified": 1726813972, + "narHash": "sha256-t6turZgoSAVgj7hn5mxzNlLOeVeZvymFo8+ymB52q34=", "owner": "nix-community", "repo": "fenix", - "rev": "1f59d7585aa06d2c327960d397bea4067d8fee98", + "rev": "251caeafc75b710282ee7e375800f75f4c8c5727", "type": "github" }, "original": { @@ -53,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1726463316, - "narHash": "sha256-gI9kkaH0ZjakJOKrdjaI/VbaMEo9qBbSUl93DnU7f4c=", + "lastModified": 1726642912, + "narHash": "sha256-wiZzKGHRAhItEuoE599Wm3ic+Lg/NykuBvhb+awf7N8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "99dc8785f6a0adac95f5e2ab05cc2e1bf666d172", + "rev": "395c52d142ec1df377acd67db6d4a22950b02a98", "type": "github" }, "original": { From 9f6ff29a54aeeb1fac22e3d5315d47705d47cb31 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Fri, 20 Sep 2024 12:57:35 -0400 Subject: [PATCH 66/96] Reuse OpenAI low_speed_timeout setting for zed.dev provider (#18144) Release Notes: - N/A --- Cargo.lock | 1 + crates/language_model/Cargo.toml | 1 + crates/language_model/src/provider/cloud.rs | 22 +++++++++++++++++++-- crates/language_model/src/settings.rs | 9 +++++++++ 4 files changed, 31 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 26b8847041..a19506829e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6285,6 +6285,7 @@ dependencies = [ "http_client", "image", "inline_completion_button", + "isahc", "language", "log", "menu", diff --git a/crates/language_model/Cargo.toml b/crates/language_model/Cargo.toml index ef273ac44f..b63428c544 100644 --- a/crates/language_model/Cargo.toml +++ b/crates/language_model/Cargo.toml @@ -32,6 +32,7 @@ futures.workspace = true google_ai = { workspace = true, features = ["schemars"] } gpui.workspace = true http_client.workspace = true +isahc.workspace = true inline_completion_button.workspace = true log.workspace = true menu.workspace = true diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index f8f64ff3b8..58efb4cfe1 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -19,6 +19,7 @@ use gpui::{ Subscription, Task, }; use http_client::{AsyncBody, HttpClient, Method, Response}; +use isahc::config::Configurable; use schemars::JsonSchema; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::value::RawValue; @@ -27,6 +28,7 @@ use smol::{ io::{AsyncReadExt, BufReader}, lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}, }; +use std::time::Duration; use std::{ future, sync::{Arc, LazyLock}, @@ -56,6 +58,7 @@ fn zed_cloud_provider_additional_models() -> &'static [AvailableModel] { #[derive(Default, Clone, Debug, PartialEq)] pub struct ZedDotDevSettings { pub available_models: Vec, + pub low_speed_timeout: Option, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] @@ -380,6 +383,7 @@ impl CloudLanguageModel { client: Arc, llm_api_token: LlmApiToken, body: PerformCompletionParams, + low_speed_timeout: Option, ) -> Result> { let http_client = &client.http_client(); @@ -387,7 +391,11 @@ impl CloudLanguageModel { let mut did_retry = false; let response = loop { - let request = http_client::Request::builder() + let mut request_builder = http_client::Request::builder(); + if let Some(low_speed_timeout) = low_speed_timeout { + request_builder = request_builder.low_speed_timeout(100, low_speed_timeout); + }; + let request = request_builder .method(Method::POST) .uri(http_client.build_zed_llm_url("/completion", &[])?.as_ref()) .header("Content-Type", "application/json") @@ -501,8 +509,11 @@ impl LanguageModel for CloudLanguageModel { fn stream_completion( &self, request: LanguageModelRequest, - _cx: &AsyncAppContext, + cx: &AsyncAppContext, ) -> BoxFuture<'static, Result>>> { + let openai_low_speed_timeout = + AllLanguageModelSettings::try_read_global(cx, |s| s.openai.low_speed_timeout.unwrap()); + match &self.model { CloudModel::Anthropic(model) => { let request = request.into_anthropic(model.id().into(), model.max_output_tokens()); @@ -519,6 +530,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(map_to_language_model_completion_events(Box::pin( @@ -542,6 +554,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + openai_low_speed_timeout, ) .await?; Ok(open_ai::extract_text_from_events(response_lines(response))) @@ -569,6 +582,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(google_ai::extract_text_from_events(response_lines( @@ -599,6 +613,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; Ok(open_ai::extract_text_from_events(response_lines(response))) @@ -650,6 +665,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; @@ -694,6 +710,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; @@ -741,6 +758,7 @@ impl LanguageModel for CloudLanguageModel { &request, )?)?, }, + None, ) .await?; diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 80749c0bdb..8888d51e11 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -231,6 +231,7 @@ pub struct GoogleSettingsContent { #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] pub struct ZedDotDevSettingsContent { available_models: Option>, + pub low_speed_timeout_in_seconds: Option, } #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] @@ -333,6 +334,14 @@ impl settings::Settings for AllLanguageModelSettings { .as_ref() .and_then(|s| s.available_models.clone()), ); + if let Some(low_speed_timeout_in_seconds) = value + .zed_dot_dev + .as_ref() + .and_then(|s| s.low_speed_timeout_in_seconds) + { + settings.zed_dot_dev.low_speed_timeout = + Some(Duration::from_secs(low_speed_timeout_in_seconds)); + } merge( &mut settings.google.api_url, From 8bd624b5db035862ecb89a4cf126167f572712af Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 20 Sep 2024 13:06:43 -0400 Subject: [PATCH 67/96] editor: Remove unneeded blank lines in rewrap test cases (#18152) This PR removes some unneeded blank lines from some of the test cases for `editor::Rewrap`. These weren't meaningful to the test, and their presence could be confusing. Release Notes: - N/A --- crates/editor/src/editor_tests.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 85684db818..5927c22cb0 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4270,7 +4270,6 @@ async fn test_rewrap(cx: &mut TestAppContext) { } else { // } - } "}; @@ -4284,7 +4283,6 @@ async fn test_rewrap(cx: &mut TestAppContext) { } else { // } - } "}; From 601090511bde0cd39985f670d7d2acc895f2594c Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Fri, 20 Sep 2024 13:25:06 -0400 Subject: [PATCH 68/96] Remove `system_id` from all events but `editor_events` (#18154) Release Notes: - N/A --- crates/collab/src/api/events.rs | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/crates/collab/src/api/events.rs b/crates/collab/src/api/events.rs index 008c76e048..f8ae532013 100644 --- a/crates/collab/src/api/events.rs +++ b/crates/collab/src/api/events.rs @@ -905,7 +905,6 @@ impl AssistantEventRow { #[derive(Debug, clickhouse::Row, Serialize)] pub struct CpuEventRow { - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -944,7 +943,6 @@ impl CpuEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -968,7 +966,6 @@ pub struct MemoryEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1000,7 +997,6 @@ impl MemoryEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1024,7 +1020,6 @@ pub struct AppEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1055,7 +1050,6 @@ impl AppEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1078,7 +1072,6 @@ pub struct SettingEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1109,7 +1102,6 @@ impl SettingEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1133,7 +1125,6 @@ pub struct ExtensionEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, session_id: Option, is_staff: Option, @@ -1169,7 +1160,6 @@ impl ExtensionEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, @@ -1260,7 +1250,6 @@ pub struct EditEventRow { os_version: String, // ClientEventBase - system_id: Option, installation_id: Option, // Note: This column name has a typo in the ClickHouse table. #[serde(rename = "sesssion_id")] @@ -1298,7 +1287,6 @@ impl EditEventRow { release_channel: body.release_channel.clone().unwrap_or_default(), os_name: body.os_name.clone(), os_version: body.os_version.clone().unwrap_or_default(), - system_id: body.system_id.clone(), installation_id: body.installation_id.clone(), session_id: body.session_id.clone(), is_staff: body.is_staff, From 5d12e3ce3a318577ff09811bdf57c91674b1beea Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 20 Sep 2024 14:43:26 -0400 Subject: [PATCH 69/96] preview tabs: Toggle preview tab when saving (#18158) Release Notes: - Saving a preview tab will now mark it as a permanent tab --- crates/workspace/src/pane.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index a5f83f961f..82300690e7 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1595,8 +1595,13 @@ impl Pane { } if can_save { - pane.update(cx, |_, cx| item.save(should_format, project, cx))? - .await?; + pane.update(cx, |pane, cx| { + if pane.is_active_preview_item(item.item_id()) { + pane.set_preview_item_id(None, cx); + } + item.save(should_format, project, cx) + })? + .await?; } else if can_save_as { let abs_path = pane.update(cx, |pane, cx| { pane.workspace From 7dac5594cdb02259c455cee90f57fb610b8c6162 Mon Sep 17 00:00:00 2001 From: Daste Date: Fri, 20 Sep 2024 20:44:13 +0200 Subject: [PATCH 70/96] file_finder: Display file icons (#18091) This PR adds file icons (like in tabs, the project panel and tab switcher) to the file finder popup. It's similar to [tab_switcher icons](https://github.com/zed-industries/zed/pull/17115), but simpler, because we're only dealing with actual files. Release Notes: - Added icons to the file finder. Screenshot: ![image](https://github.com/user-attachments/assets/bd6a54c1-cdbd-415a-9a82-0cc7a0bb6ca2) --------- Co-authored-by: Marshall Bowers --- Cargo.lock | 3 +++ assets/settings/default.json | 5 ++++ crates/file_finder/Cargo.toml | 3 +++ crates/file_finder/src/file_finder.rs | 21 +++++++++++++-- .../file_finder/src/file_finder_settings.rs | 27 +++++++++++++++++++ 5 files changed, 57 insertions(+), 2 deletions(-) create mode 100644 crates/file_finder/src/file_finder_settings.rs diff --git a/Cargo.lock b/Cargo.lock index a19506829e..dd07dfa1cf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4326,6 +4326,7 @@ dependencies = [ "ctor", "editor", "env_logger", + "file_icons", "futures 0.3.30", "fuzzy", "gpui", @@ -4333,7 +4334,9 @@ dependencies = [ "menu", "picker", "project", + "schemars", "serde", + "serde_derive", "serde_json", "settings", "text", diff --git a/assets/settings/default.json b/assets/settings/default.json index 537ad12082..8424c5733d 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -496,6 +496,11 @@ // Whether a preview tab gets replaced when code navigation is used to navigate away from the tab. "enable_preview_from_code_navigation": false }, + // Settings related to the file finder. + "file_finder": { + // Whether to show file icons in the file finder. + "file_icons": true + }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. "remove_trailing_whitespace_on_save": true, diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml index 8f17b191a5..2b4aa5fe30 100644 --- a/crates/file_finder/Cargo.toml +++ b/crates/file_finder/Cargo.toml @@ -16,14 +16,17 @@ doctest = false anyhow.workspace = true collections.workspace = true editor.workspace = true +file_icons.workspace = true futures.workspace = true fuzzy.workspace = true gpui.workspace = true menu.workspace = true picker.workspace = true project.workspace = true +schemars.workspace = true settings.workspace = true serde.workspace = true +serde_derive.workspace = true text.workspace = true theme.workspace = true ui.workspace = true diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 50a14b62db..e1e0998f8a 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1,11 +1,14 @@ #[cfg(test)] mod file_finder_tests; +mod file_finder_settings; mod new_path_prompt; mod open_path_prompt; use collections::HashMap; use editor::{scroll::Autoscroll, Bias, Editor}; +use file_finder_settings::FileFinderSettings; +use file_icons::FileIcons; use fuzzy::{CharBag, PathMatch, PathMatchCandidate}; use gpui::{ actions, rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle, @@ -39,7 +42,12 @@ pub struct FileFinder { init_modifiers: Option, } +pub fn init_settings(cx: &mut AppContext) { + FileFinderSettings::register(cx); +} + pub fn init(cx: &mut AppContext) { + init_settings(cx); cx.observe_new_views(FileFinder::register).detach(); cx.observe_new_views(NewPathPrompt::register).detach(); cx.observe_new_views(OpenPathPrompt::register).detach(); @@ -1041,12 +1049,14 @@ impl PickerDelegate for FileFinderDelegate { selected: bool, cx: &mut ViewContext>, ) -> Option { + let settings = FileFinderSettings::get_global(cx); + let path_match = self .matches .get(ix) .expect("Invalid matches state: no element for index {ix}"); - let icon = match &path_match { + let history_icon = match &path_match { Match::History { .. } => Icon::new(IconName::HistoryRerun) .color(Color::Muted) .size(IconSize::Small) @@ -1059,10 +1069,17 @@ impl PickerDelegate for FileFinderDelegate { let (file_name, file_name_positions, full_path, full_path_positions) = self.labels_for_match(path_match, cx, ix); + let file_icon = if settings.file_icons { + FileIcons::get_icon(Path::new(&file_name), cx).map(Icon::from_path) + } else { + None + }; + Some( ListItem::new(ix) .spacing(ListItemSpacing::Sparse) - .end_slot::(Some(icon)) + .start_slot::(file_icon) + .end_slot::(history_icon) .inset(true) .selected(selected) .child( diff --git a/crates/file_finder/src/file_finder_settings.rs b/crates/file_finder/src/file_finder_settings.rs new file mode 100644 index 0000000000..c02008c917 --- /dev/null +++ b/crates/file_finder/src/file_finder_settings.rs @@ -0,0 +1,27 @@ +use anyhow::Result; +use schemars::JsonSchema; +use serde_derive::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Debug, Clone, Copy, PartialEq)] +pub struct FileFinderSettings { + pub file_icons: bool, +} + +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +pub struct FileFinderSettingsContent { + /// Whether to show file icons in the file finder. + /// + /// Default: true + pub file_icons: Option, +} + +impl Settings for FileFinderSettings { + const KEY: Option<&'static str> = Some("file_finder"); + + type FileContent = FileFinderSettingsContent; + + fn load(sources: SettingsSources, _: &mut gpui::AppContext) -> Result { + sources.json_merge() + } +} From 45388805ad4bc5e27c0fcdd6936fb5bce687a8ff Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 20 Sep 2024 13:02:39 -0600 Subject: [PATCH 71/96] vim: gq (#18156) Closes #ISSUE Release Notes: - vim: Added gq/gw for rewrapping lines --- assets/keymaps/vim.json | 13 +++- crates/editor/src/editor.rs | 6 +- crates/vim/src/normal.rs | 30 +++++++- crates/vim/src/rewrap.rs | 114 ++++++++++++++++++++++++++++++ crates/vim/src/state.rs | 3 + crates/vim/src/vim.rs | 2 + crates/vim/test_data/test_gq.json | 12 ++++ 7 files changed, 177 insertions(+), 3 deletions(-) create mode 100644 crates/vim/src/rewrap.rs create mode 100644 crates/vim/test_data/test_gq.json diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 18b38384ef..8d933f19af 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -124,7 +124,6 @@ "g i": "vim::InsertAtPrevious", "g ,": "vim::ChangeListNewer", "g ;": "vim::ChangeListOlder", - "g q": "editor::Rewrap", "shift-h": "vim::WindowTop", "shift-m": "vim::WindowMiddle", "shift-l": "vim::WindowBottom", @@ -240,6 +239,8 @@ "g shift-u": ["vim::PushOperator", "Uppercase"], "g ~": ["vim::PushOperator", "OppositeCase"], "\"": ["vim::PushOperator", "Register"], + "g q": ["vim::PushOperator", "Rewrap"], + "g w": ["vim::PushOperator", "Rewrap"], "q": "vim::ToggleRecord", "shift-q": "vim::ReplayLastRecording", "@": ["vim::PushOperator", "ReplayRegister"], @@ -301,6 +302,7 @@ "i": ["vim::PushOperator", { "Object": { "around": false } }], "a": ["vim::PushOperator", { "Object": { "around": true } }], "g c": "vim::ToggleComments", + "g q": "vim::Rewrap", "\"": ["vim::PushOperator", "Register"], // tree-sitter related commands "[ x": "editor::SelectLargerSyntaxNode", @@ -428,6 +430,15 @@ "~": "vim::CurrentLine" } }, + { + "context": "vim_operator == gq", + "bindings": { + "g q": "vim::CurrentLine", + "q": "vim::CurrentLine", + "g w": "vim::CurrentLine", + "w": "vim::CurrentLine" + } + }, { "context": "vim_operator == y", "bindings": { diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 33eb51cb0e..1f4a9376d2 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -6705,6 +6705,10 @@ impl Editor { } pub fn rewrap(&mut self, _: &Rewrap, cx: &mut ViewContext) { + self.rewrap_impl(true, cx) + } + + pub fn rewrap_impl(&mut self, only_text: bool, cx: &mut ViewContext) { let buffer = self.buffer.read(cx).snapshot(cx); let selections = self.selections.all::(cx); let mut selections = selections.iter().peekable(); @@ -6725,7 +6729,7 @@ impl Editor { continue; } - let mut should_rewrap = false; + let mut should_rewrap = !only_text; if let Some(language_scope) = buffer.language_scope_at(selection.head()) { match language_scope.language_name().0.as_ref() { diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 741e09f178..10bf3c8e8d 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -168,6 +168,7 @@ impl Vim { Some(Operator::Yank) => self.yank_motion(motion, times, cx), Some(Operator::AddSurrounds { target: None }) => {} Some(Operator::Indent) => self.indent_motion(motion, times, IndentDirection::In, cx), + Some(Operator::Rewrap) => self.rewrap_motion(motion, times, cx), Some(Operator::Outdent) => self.indent_motion(motion, times, IndentDirection::Out, cx), Some(Operator::Lowercase) => { self.change_case_motion(motion, times, CaseTarget::Lowercase, cx) @@ -199,6 +200,7 @@ impl Vim { Some(Operator::Outdent) => { self.indent_object(object, around, IndentDirection::Out, cx) } + Some(Operator::Rewrap) => self.rewrap_object(object, around, cx), Some(Operator::Lowercase) => { self.change_case_object(object, around, CaseTarget::Lowercase, cx) } @@ -478,8 +480,9 @@ impl Vim { } #[cfg(test)] mod test { - use gpui::{KeyBinding, TestAppContext}; + use gpui::{KeyBinding, TestAppContext, UpdateGlobal}; use indoc::indoc; + use language::language_settings::AllLanguageSettings; use settings::SettingsStore; use crate::{ @@ -1386,4 +1389,29 @@ mod test { cx.simulate_shared_keystrokes("2 0 r - ").await; cx.shared_state().await.assert_eq("ˇhello world\n"); } + + #[gpui::test] + async fn test_gq(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("textwidth=5").await; + + cx.update(|cx| { + SettingsStore::update_global(cx, |settings, cx| { + settings.update_user_settings::(cx, |settings| { + settings.defaults.preferred_line_length = Some(5); + }); + }) + }); + + cx.set_shared_state("ˇth th th th th th\n").await; + cx.simulate_shared_keystrokes("g q q").await; + cx.shared_state().await.assert_eq("th th\nth th\nˇth th\n"); + + cx.set_shared_state("ˇth th th th th th\nth th th th th th\n") + .await; + cx.simulate_shared_keystrokes("v j g q").await; + cx.shared_state() + .await + .assert_eq("th th\nth th\nth th\nth th\nth th\nˇth th\n"); + } } diff --git a/crates/vim/src/rewrap.rs b/crates/vim/src/rewrap.rs new file mode 100644 index 0000000000..3e61b3c3a1 --- /dev/null +++ b/crates/vim/src/rewrap.rs @@ -0,0 +1,114 @@ +use crate::{motion::Motion, object::Object, state::Mode, Vim}; +use collections::HashMap; +use editor::{display_map::ToDisplayPoint, scroll::Autoscroll, Bias, Editor}; +use gpui::actions; +use language::SelectionGoal; +use ui::ViewContext; + +actions!(vim, [Rewrap]); + +pub(crate) fn register(editor: &mut Editor, cx: &mut ViewContext) { + Vim::action(editor, cx, |vim, _: &Rewrap, cx| { + vim.record_current_action(cx); + vim.take_count(cx); + vim.store_visual_marks(cx); + vim.update_editor(cx, |vim, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut positions = vim.save_selection_starts(editor, cx); + editor.rewrap_impl(false, cx); + editor.change_selections(Some(Autoscroll::fit()), cx, |s| { + s.move_with(|map, selection| { + if let Some(anchor) = positions.remove(&selection.id) { + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + } + }); + }); + }); + }); + if vim.mode.is_visual() { + vim.switch_mode(Mode::Normal, true, cx) + } + }); +} + +impl Vim { + pub(crate) fn rewrap_motion( + &mut self, + motion: Motion, + times: Option, + cx: &mut ViewContext, + ) { + self.stop_recording(cx); + self.update_editor(cx, |_, editor, cx| { + let text_layout_details = editor.text_layout_details(cx); + editor.transact(cx, |editor, cx| { + let mut selection_starts: HashMap<_, _> = Default::default(); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = map.display_point_to_anchor(selection.head(), Bias::Right); + selection_starts.insert(selection.id, anchor); + motion.expand_selection(map, selection, times, false, &text_layout_details); + }); + }); + editor.rewrap_impl(false, cx); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = selection_starts.remove(&selection.id).unwrap(); + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + }); + }); + }); + }); + } + + pub(crate) fn rewrap_object( + &mut self, + object: Object, + around: bool, + cx: &mut ViewContext, + ) { + self.stop_recording(cx); + self.update_editor(cx, |_, editor, cx| { + editor.transact(cx, |editor, cx| { + let mut original_positions: HashMap<_, _> = Default::default(); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = map.display_point_to_anchor(selection.head(), Bias::Right); + original_positions.insert(selection.id, anchor); + object.expand_selection(map, selection, around); + }); + }); + editor.rewrap_impl(false, cx); + editor.change_selections(None, cx, |s| { + s.move_with(|map, selection| { + let anchor = original_positions.remove(&selection.id).unwrap(); + let mut point = anchor.to_display_point(map); + *point.column_mut() = 0; + selection.collapse_to(point, SelectionGoal::None); + }); + }); + }); + }); + } +} + +#[cfg(test)] +mod test { + use crate::test::NeovimBackedTestContext; + + #[gpui::test] + async fn test_indent_gv(cx: &mut gpui::TestAppContext) { + let mut cx = NeovimBackedTestContext::new(cx).await; + cx.set_neovim_option("shiftwidth=4").await; + + cx.set_shared_state("ˇhello\nworld\n").await; + cx.simulate_shared_keystrokes("v j > g v").await; + cx.shared_state() + .await + .assert_eq("« hello\n ˇ» world\n"); + } +} diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 1d642e990f..b61cb405e1 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -72,6 +72,7 @@ pub enum Operator { Jump { line: bool }, Indent, Outdent, + Rewrap, Lowercase, Uppercase, OppositeCase, @@ -454,6 +455,7 @@ impl Operator { Operator::Jump { line: true } => "'", Operator::Jump { line: false } => "`", Operator::Indent => ">", + Operator::Rewrap => "gq", Operator::Outdent => "<", Operator::Uppercase => "gU", Operator::Lowercase => "gu", @@ -482,6 +484,7 @@ impl Operator { Operator::Change | Operator::Delete | Operator::Yank + | Operator::Rewrap | Operator::Indent | Operator::Outdent | Operator::Lowercase diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index a4b77b1a7a..701972c19b 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -13,6 +13,7 @@ mod motion; mod normal; mod object; mod replace; +mod rewrap; mod state; mod surrounds; mod visual; @@ -291,6 +292,7 @@ impl Vim { command::register(editor, cx); replace::register(editor, cx); indent::register(editor, cx); + rewrap::register(editor, cx); object::register(editor, cx); visual::register(editor, cx); change_list::register(editor, cx); diff --git a/crates/vim/test_data/test_gq.json b/crates/vim/test_data/test_gq.json new file mode 100644 index 0000000000..08cdb12315 --- /dev/null +++ b/crates/vim/test_data/test_gq.json @@ -0,0 +1,12 @@ +{"SetOption":{"value":"textwidth=5"}} +{"Put":{"state":"ˇth th th th th th\n"}} +{"Key":"g"} +{"Key":"q"} +{"Key":"q"} +{"Get":{"state":"th th\nth th\nˇth th\n","mode":"Normal"}} +{"Put":{"state":"ˇth th th th th th\nth th th th th th\n"}} +{"Key":"v"} +{"Key":"j"} +{"Key":"g"} +{"Key":"q"} +{"Get":{"state":"th th\nth th\nth th\nth th\nth th\nˇth th\n","mode":"Normal"}} From 7d62fda5a38d1199e79c30177828dfac2a1ce4b3 Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Sat, 21 Sep 2024 03:49:40 +0800 Subject: [PATCH 72/96] file_finder: Notify user when picker an non-utf8 file (#18136) notify user when using file finder picker an file which cannot open. Release Notes: - N/A --- crates/file_finder/src/file_finder.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index e1e0998f8a..4c3f92d3c1 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -31,7 +31,7 @@ use std::{ use text::Point; use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing}; use util::{paths::PathWithPosition, post_inc, ResultExt}; -use workspace::{item::PreviewTabsSettings, ModalView, Workspace}; +use workspace::{item::PreviewTabsSettings, notifications::NotifyResultExt, ModalView, Workspace}; actions!(file_finder, [SelectPrev]); @@ -1011,7 +1011,7 @@ impl PickerDelegate for FileFinderDelegate { let finder = self.file_finder.clone(); cx.spawn(|_, mut cx| async move { - let item = open_task.await.log_err()?; + let item = open_task.await.notify_async_err(&mut cx)?; if let Some(row) = row { if let Some(active_editor) = item.downcast::() { active_editor From 5905fbb9accdc5d34b7fec0fe021022a5b38420e Mon Sep 17 00:00:00 2001 From: Roy Williams Date: Fri, 20 Sep 2024 16:59:12 -0400 Subject: [PATCH 73/96] Allow Anthropic custom models to override temperature (#18160) Release Notes: - Allow Anthropic custom models to override "temperature" This also centralized the defaulting of "temperature" to be inside of each model's `into_x` call instead of being sprinkled around the code. --- crates/anthropic/src/anthropic.rs | 14 ++++++++++++++ crates/assistant/src/context.rs | 2 +- crates/assistant/src/inline_assistant.rs | 2 +- crates/assistant/src/prompt_library.rs | 2 +- .../assistant/src/slash_command/auto_command.rs | 2 +- .../assistant/src/terminal_inline_assistant.rs | 2 +- crates/language_model/src/provider/anthropic.rs | 10 ++++++++-- crates/language_model/src/provider/cloud.rs | 16 +++++++++++++--- crates/language_model/src/provider/ollama.rs | 2 +- crates/language_model/src/request.rs | 15 ++++++++++----- crates/language_model/src/settings.rs | 2 ++ crates/semantic_index/src/summary_index.rs | 2 +- 12 files changed, 54 insertions(+), 17 deletions(-) diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index f960dc541a..91b6723e90 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -49,6 +49,7 @@ pub enum Model { /// Indicates whether this custom model supports caching. cache_configuration: Option, max_output_tokens: Option, + default_temperature: Option, }, } @@ -124,6 +125,19 @@ impl Model { } } + pub fn default_temperature(&self) -> f32 { + match self { + Self::Claude3_5Sonnet + | Self::Claude3Opus + | Self::Claude3Sonnet + | Self::Claude3Haiku => 1.0, + Self::Custom { + default_temperature, + .. + } => default_temperature.unwrap_or(1.0), + } + } + pub fn tool_model_id(&self) -> &str { if let Self::Custom { tool_override: Some(tool_override), diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 830c098049..97a5b3ea98 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -2180,7 +2180,7 @@ impl Context { messages: Vec::new(), tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; for message in self.messages(cx) { if message.status != MessageStatus::Done { diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index d95b54d3c6..f2428c3a2e 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -2732,7 +2732,7 @@ impl CodegenAlternative { messages, tools: Vec::new(), stop: Vec::new(), - temperature: 1., + temperature: None, }) } diff --git a/crates/assistant/src/prompt_library.rs b/crates/assistant/src/prompt_library.rs index 76ee95d507..24e20a18a7 100644 --- a/crates/assistant/src/prompt_library.rs +++ b/crates/assistant/src/prompt_library.rs @@ -796,7 +796,7 @@ impl PromptLibrary { }], tools: Vec::new(), stop: Vec::new(), - temperature: 1., + temperature: None, }, cx, ) diff --git a/crates/assistant/src/slash_command/auto_command.rs b/crates/assistant/src/slash_command/auto_command.rs index e1f20c311b..14cee29682 100644 --- a/crates/assistant/src/slash_command/auto_command.rs +++ b/crates/assistant/src/slash_command/auto_command.rs @@ -216,7 +216,7 @@ async fn commands_for_summaries( }], tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; while let Some(current_summaries) = stack.pop() { diff --git a/crates/assistant/src/terminal_inline_assistant.rs b/crates/assistant/src/terminal_inline_assistant.rs index caf819bae5..e1a26d8510 100644 --- a/crates/assistant/src/terminal_inline_assistant.rs +++ b/crates/assistant/src/terminal_inline_assistant.rs @@ -284,7 +284,7 @@ impl TerminalInlineAssistant { messages, tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }) } diff --git a/crates/language_model/src/provider/anthropic.rs b/crates/language_model/src/provider/anthropic.rs index 1e3d275094..86538bec49 100644 --- a/crates/language_model/src/provider/anthropic.rs +++ b/crates/language_model/src/provider/anthropic.rs @@ -51,6 +51,7 @@ pub struct AvailableModel { /// Configuration of Anthropic's caching API. pub cache_configuration: Option, pub max_output_tokens: Option, + pub default_temperature: Option, } pub struct AnthropicLanguageModelProvider { @@ -200,6 +201,7 @@ impl LanguageModelProvider for AnthropicLanguageModelProvider { } }), max_output_tokens: model.max_output_tokens, + default_temperature: model.default_temperature, }, ); } @@ -375,8 +377,11 @@ impl LanguageModel for AnthropicModel { request: LanguageModelRequest, cx: &AsyncAppContext, ) -> BoxFuture<'static, Result>>> { - let request = - request.into_anthropic(self.model.id().into(), self.model.max_output_tokens()); + let request = request.into_anthropic( + self.model.id().into(), + self.model.default_temperature(), + self.model.max_output_tokens(), + ); let request = self.stream_completion(request, cx); let future = self.request_limiter.stream(async move { let response = request.await.map_err(|err| anyhow!(err))?; @@ -405,6 +410,7 @@ impl LanguageModel for AnthropicModel { ) -> BoxFuture<'static, Result>>> { let mut request = request.into_anthropic( self.model.tool_model_id().into(), + self.model.default_temperature(), self.model.max_output_tokens(), ); request.tool_choice = Some(anthropic::ToolChoice::Tool { diff --git a/crates/language_model/src/provider/cloud.rs b/crates/language_model/src/provider/cloud.rs index 58efb4cfe1..606a6fbace 100644 --- a/crates/language_model/src/provider/cloud.rs +++ b/crates/language_model/src/provider/cloud.rs @@ -87,6 +87,8 @@ pub struct AvailableModel { pub tool_override: Option, /// Indicates whether this custom model supports caching. pub cache_configuration: Option, + /// The default temperature to use for this model. + pub default_temperature: Option, } pub struct CloudLanguageModelProvider { @@ -255,6 +257,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { min_total_token: config.min_total_token, } }), + default_temperature: model.default_temperature, max_output_tokens: model.max_output_tokens, }), AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom { @@ -516,7 +519,11 @@ impl LanguageModel for CloudLanguageModel { match &self.model { CloudModel::Anthropic(model) => { - let request = request.into_anthropic(model.id().into(), model.max_output_tokens()); + let request = request.into_anthropic( + model.id().into(), + model.default_temperature(), + model.max_output_tokens(), + ); let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); let future = self.request_limiter.stream(async move { @@ -642,8 +649,11 @@ impl LanguageModel for CloudLanguageModel { match &self.model { CloudModel::Anthropic(model) => { - let mut request = - request.into_anthropic(model.tool_model_id().into(), model.max_output_tokens()); + let mut request = request.into_anthropic( + model.tool_model_id().into(), + model.default_temperature(), + model.max_output_tokens(), + ); request.tool_choice = Some(anthropic::ToolChoice::Tool { name: tool_name.clone(), }); diff --git a/crates/language_model/src/provider/ollama.rs b/crates/language_model/src/provider/ollama.rs index 6a3190dee7..a29ff3cf6a 100644 --- a/crates/language_model/src/provider/ollama.rs +++ b/crates/language_model/src/provider/ollama.rs @@ -235,7 +235,7 @@ impl OllamaLanguageModel { options: Some(ChatOptions { num_ctx: Some(self.model.max_tokens), stop: Some(request.stop), - temperature: Some(request.temperature), + temperature: request.temperature.or(Some(1.0)), ..Default::default() }), tools: vec![], diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index dd480b8aaf..06dde1862a 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -236,7 +236,7 @@ pub struct LanguageModelRequest { pub messages: Vec, pub tools: Vec, pub stop: Vec, - pub temperature: f32, + pub temperature: Option, } impl LanguageModelRequest { @@ -262,7 +262,7 @@ impl LanguageModelRequest { .collect(), stream, stop: self.stop, - temperature: self.temperature, + temperature: self.temperature.unwrap_or(1.0), max_tokens: max_output_tokens, tools: Vec::new(), tool_choice: None, @@ -290,7 +290,7 @@ impl LanguageModelRequest { candidate_count: Some(1), stop_sequences: Some(self.stop), max_output_tokens: None, - temperature: Some(self.temperature as f64), + temperature: self.temperature.map(|t| t as f64).or(Some(1.0)), top_p: None, top_k: None, }), @@ -298,7 +298,12 @@ impl LanguageModelRequest { } } - pub fn into_anthropic(self, model: String, max_output_tokens: u32) -> anthropic::Request { + pub fn into_anthropic( + self, + model: String, + default_temperature: f32, + max_output_tokens: u32, + ) -> anthropic::Request { let mut new_messages: Vec = Vec::new(); let mut system_message = String::new(); @@ -400,7 +405,7 @@ impl LanguageModelRequest { tool_choice: None, metadata: None, stop_sequences: Vec::new(), - temperature: Some(self.temperature), + temperature: self.temperature.or(Some(default_temperature)), top_k: None, top_p: None, } diff --git a/crates/language_model/src/settings.rs b/crates/language_model/src/settings.rs index 8888d51e11..2bf8deb042 100644 --- a/crates/language_model/src/settings.rs +++ b/crates/language_model/src/settings.rs @@ -99,6 +99,7 @@ impl AnthropicSettingsContent { tool_override, cache_configuration, max_output_tokens, + default_temperature, } => Some(provider::anthropic::AvailableModel { name, display_name, @@ -112,6 +113,7 @@ impl AnthropicSettingsContent { }, ), max_output_tokens, + default_temperature, }), _ => None, }) diff --git a/crates/semantic_index/src/summary_index.rs b/crates/semantic_index/src/summary_index.rs index 08f25ae028..f4c6d4726c 100644 --- a/crates/semantic_index/src/summary_index.rs +++ b/crates/semantic_index/src/summary_index.rs @@ -562,7 +562,7 @@ impl SummaryIndex { }], tools: Vec::new(), stop: Vec::new(), - temperature: 1.0, + temperature: None, }; let code_len = code.len(); From e309fbda2a95a55a043ad41ead97c568c7aeef19 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 20 Sep 2024 15:09:18 -0700 Subject: [PATCH 74/96] Add a slash command for automatically retrieving relevant context (#17972) * [x] put this slash command behind a feature flag until we release embedding access to the general population * [x] choose a name for this slash command and name the rust module to match Release Notes: - N/A --------- Co-authored-by: Jason Co-authored-by: Richard Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com> Co-authored-by: Richard Feldman --- assets/prompts/project_slash_command.hbs | 8 + crates/assistant/src/assistant.rs | 32 +- crates/assistant/src/context.rs | 5 +- crates/assistant/src/prompts.rs | 15 + crates/assistant/src/slash_command.rs | 2 +- .../slash_command/cargo_workspace_command.rs | 153 ++++++++++ .../src/slash_command/project_command.rs | 257 +++++++++------- .../src/slash_command/search_command.rs | 63 ++-- .../assistant/src/slash_command_settings.rs | 10 +- crates/evals/src/eval.rs | 2 +- crates/semantic_index/examples/index.rs | 2 +- crates/semantic_index/src/embedding.rs | 23 +- crates/semantic_index/src/project_index.rs | 59 ++-- crates/semantic_index/src/semantic_index.rs | 275 +++++++++++++++--- 14 files changed, 683 insertions(+), 223 deletions(-) create mode 100644 assets/prompts/project_slash_command.hbs create mode 100644 crates/assistant/src/slash_command/cargo_workspace_command.rs diff --git a/assets/prompts/project_slash_command.hbs b/assets/prompts/project_slash_command.hbs new file mode 100644 index 0000000000..6c63f71d89 --- /dev/null +++ b/assets/prompts/project_slash_command.hbs @@ -0,0 +1,8 @@ +A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings. +Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets +that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently +distinct from previous ones. + +Here is the question that's been asked, together with context that the developer has added manually: + +{{{context_buffer}}} diff --git a/crates/assistant/src/assistant.rs b/crates/assistant/src/assistant.rs index 8b9c66ee55..9cc63af5a1 100644 --- a/crates/assistant/src/assistant.rs +++ b/crates/assistant/src/assistant.rs @@ -41,9 +41,10 @@ use semantic_index::{CloudEmbeddingProvider, SemanticDb}; use serde::{Deserialize, Serialize}; use settings::{update_settings_file, Settings, SettingsStore}; use slash_command::{ - auto_command, context_server_command, default_command, delta_command, diagnostics_command, - docs_command, fetch_command, file_command, now_command, project_command, prompt_command, - search_command, symbols_command, tab_command, terminal_command, workflow_command, + auto_command, cargo_workspace_command, context_server_command, default_command, delta_command, + diagnostics_command, docs_command, fetch_command, file_command, now_command, project_command, + prompt_command, search_command, symbols_command, tab_command, terminal_command, + workflow_command, }; use std::path::PathBuf; use std::sync::Arc; @@ -384,20 +385,33 @@ fn register_slash_commands(prompt_builder: Option>, cx: &mut slash_command_registry.register_command(delta_command::DeltaSlashCommand, true); slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true); slash_command_registry.register_command(tab_command::TabSlashCommand, true); - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); slash_command_registry.register_command(prompt_command::PromptSlashCommand, true); slash_command_registry.register_command(default_command::DefaultSlashCommand, false); slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true); slash_command_registry.register_command(now_command::NowSlashCommand, false); slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true); + slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); if let Some(prompt_builder) = prompt_builder { slash_command_registry.register_command( workflow_command::WorkflowSlashCommand::new(prompt_builder.clone()), true, ); + cx.observe_flag::({ + let slash_command_registry = slash_command_registry.clone(); + move |is_enabled, _cx| { + if is_enabled { + slash_command_registry.register_command( + project_command::ProjectSlashCommand::new(prompt_builder.clone()), + true, + ); + } + } + }) + .detach(); } - slash_command_registry.register_command(fetch_command::FetchSlashCommand, false); cx.observe_flag::({ let slash_command_registry = slash_command_registry.clone(); @@ -435,10 +449,12 @@ fn update_slash_commands_from_settings(cx: &mut AppContext) { slash_command_registry.unregister_command(docs_command::DocsSlashCommand); } - if settings.project.enabled { - slash_command_registry.register_command(project_command::ProjectSlashCommand, true); + if settings.cargo_workspace.enabled { + slash_command_registry + .register_command(cargo_workspace_command::CargoWorkspaceSlashCommand, true); } else { - slash_command_registry.unregister_command(project_command::ProjectSlashCommand); + slash_command_registry + .unregister_command(cargo_workspace_command::CargoWorkspaceSlashCommand); } } diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 97a5b3ea98..1cac47831f 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -1967,8 +1967,9 @@ impl Context { } pub fn assist(&mut self, cx: &mut ModelContext) -> Option { - let provider = LanguageModelRegistry::read_global(cx).active_provider()?; - let model = LanguageModelRegistry::read_global(cx).active_model()?; + let model_registry = LanguageModelRegistry::read_global(cx); + let provider = model_registry.active_provider()?; + let model = model_registry.active_model()?; let last_message_id = self.get_last_valid_message_id(cx)?; if !provider.is_authenticated(cx) { diff --git a/crates/assistant/src/prompts.rs b/crates/assistant/src/prompts.rs index 3b9f75bac9..106935cb88 100644 --- a/crates/assistant/src/prompts.rs +++ b/crates/assistant/src/prompts.rs @@ -40,6 +40,11 @@ pub struct TerminalAssistantPromptContext { pub user_prompt: String, } +#[derive(Serialize)] +pub struct ProjectSlashCommandPromptContext { + pub context_buffer: String, +} + /// Context required to generate a workflow step resolution prompt. #[derive(Debug, Serialize)] pub struct StepResolutionContext { @@ -317,4 +322,14 @@ impl PromptBuilder { pub fn generate_workflow_prompt(&self) -> Result { self.handlebars.lock().render("edit_workflow", &()) } + + pub fn generate_project_slash_command_prompt( + &self, + context_buffer: String, + ) -> Result { + self.handlebars.lock().render( + "project_slash_command", + &ProjectSlashCommandPromptContext { context_buffer }, + ) + } } diff --git a/crates/assistant/src/slash_command.rs b/crates/assistant/src/slash_command.rs index cf957a15c6..e430e35622 100644 --- a/crates/assistant/src/slash_command.rs +++ b/crates/assistant/src/slash_command.rs @@ -18,8 +18,8 @@ use std::{ }; use ui::ActiveTheme; use workspace::Workspace; - pub mod auto_command; +pub mod cargo_workspace_command; pub mod context_server_command; pub mod default_command; pub mod delta_command; diff --git a/crates/assistant/src/slash_command/cargo_workspace_command.rs b/crates/assistant/src/slash_command/cargo_workspace_command.rs new file mode 100644 index 0000000000..baf16d7f01 --- /dev/null +++ b/crates/assistant/src/slash_command/cargo_workspace_command.rs @@ -0,0 +1,153 @@ +use super::{SlashCommand, SlashCommandOutput}; +use anyhow::{anyhow, Context, Result}; +use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; +use fs::Fs; +use gpui::{AppContext, Model, Task, WeakView}; +use language::{BufferSnapshot, LspAdapterDelegate}; +use project::{Project, ProjectPath}; +use std::{ + fmt::Write, + path::Path, + sync::{atomic::AtomicBool, Arc}, +}; +use ui::prelude::*; +use workspace::Workspace; + +pub(crate) struct CargoWorkspaceSlashCommand; + +impl CargoWorkspaceSlashCommand { + async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { + let buffer = fs.load(path_to_cargo_toml).await?; + let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; + + let mut message = String::new(); + writeln!(message, "You are in a Rust project.")?; + + if let Some(workspace) = cargo_toml.workspace { + writeln!( + message, + "The project is a Cargo workspace with the following members:" + )?; + for member in workspace.members { + writeln!(message, "- {member}")?; + } + + if !workspace.default_members.is_empty() { + writeln!(message, "The default members are:")?; + for member in workspace.default_members { + writeln!(message, "- {member}")?; + } + } + + if !workspace.dependencies.is_empty() { + writeln!( + message, + "The following workspace dependencies are installed:" + )?; + for dependency in workspace.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } else if let Some(package) = cargo_toml.package { + writeln!( + message, + "The project name is \"{name}\".", + name = package.name + )?; + + let description = package + .description + .as_ref() + .and_then(|description| description.get().ok().cloned()); + if let Some(description) = description.as_ref() { + writeln!(message, "It describes itself as \"{description}\".")?; + } + + if !cargo_toml.dependencies.is_empty() { + writeln!(message, "The following dependencies are installed:")?; + for dependency in cargo_toml.dependencies.keys() { + writeln!(message, "- {dependency}")?; + } + } + } + + Ok(message) + } + + fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { + let worktree = project.read(cx).worktrees(cx).next()?; + let worktree = worktree.read(cx); + let entry = worktree.entry_for_path("Cargo.toml")?; + let path = ProjectPath { + worktree_id: worktree.id(), + path: entry.path.clone(), + }; + Some(Arc::from( + project.read(cx).absolute_path(&path, cx)?.as_path(), + )) + } +} + +impl SlashCommand for CargoWorkspaceSlashCommand { + fn name(&self) -> String { + "cargo-workspace".into() + } + + fn description(&self) -> String { + "insert project workspace metadata".into() + } + + fn menu_text(&self) -> String { + "Insert Project Workspace Metadata".into() + } + + fn complete_argument( + self: Arc, + _arguments: &[String], + _cancel: Arc, + _workspace: Option>, + _cx: &mut WindowContext, + ) -> Task>> { + Task::ready(Err(anyhow!("this command does not require argument"))) + } + + fn requires_argument(&self) -> bool { + false + } + + fn run( + self: Arc, + _arguments: &[String], + _context_slash_command_output_sections: &[SlashCommandOutputSection], + _context_buffer: BufferSnapshot, + workspace: WeakView, + _delegate: Option>, + cx: &mut WindowContext, + ) -> Task> { + let output = workspace.update(cx, |workspace, cx| { + let project = workspace.project().clone(); + let fs = workspace.project().read(cx).fs().clone(); + let path = Self::path_to_cargo_toml(project, cx); + let output = cx.background_executor().spawn(async move { + let path = path.with_context(|| "Cargo.toml not found")?; + Self::build_message(fs, &path).await + }); + + cx.foreground_executor().spawn(async move { + let text = output.await?; + let range = 0..text.len(); + Ok(SlashCommandOutput { + text, + sections: vec![SlashCommandOutputSection { + range, + icon: IconName::FileTree, + label: "Project".into(), + metadata: None, + }], + run_commands_in_text: false, + }) + }) + }); + output.unwrap_or_else(|error| Task::ready(Err(error))) + } +} diff --git a/crates/assistant/src/slash_command/project_command.rs b/crates/assistant/src/slash_command/project_command.rs index 3e8596d942..197e91d91a 100644 --- a/crates/assistant/src/slash_command/project_command.rs +++ b/crates/assistant/src/slash_command/project_command.rs @@ -1,90 +1,39 @@ -use super::{SlashCommand, SlashCommandOutput}; -use anyhow::{anyhow, Context, Result}; +use super::{ + create_label_for_command, search_command::add_search_result_section, SlashCommand, + SlashCommandOutput, +}; +use crate::PromptBuilder; +use anyhow::{anyhow, Result}; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; -use fs::Fs; -use gpui::{AppContext, Model, Task, WeakView}; -use language::{BufferSnapshot, LspAdapterDelegate}; -use project::{Project, ProjectPath}; +use feature_flags::FeatureFlag; +use gpui::{AppContext, Task, WeakView, WindowContext}; +use language::{Anchor, CodeLabel, LspAdapterDelegate}; +use language_model::{LanguageModelRegistry, LanguageModelTool}; +use schemars::JsonSchema; +use semantic_index::SemanticDb; +use serde::Deserialize; + +pub struct ProjectSlashCommandFeatureFlag; + +impl FeatureFlag for ProjectSlashCommandFeatureFlag { + const NAME: &'static str = "project-slash-command"; +} + use std::{ - fmt::Write, - path::Path, + fmt::Write as _, + ops::DerefMut, sync::{atomic::AtomicBool, Arc}, }; -use ui::prelude::*; +use ui::{BorrowAppContext as _, IconName}; use workspace::Workspace; -pub(crate) struct ProjectSlashCommand; +pub struct ProjectSlashCommand { + prompt_builder: Arc, +} impl ProjectSlashCommand { - async fn build_message(fs: Arc, path_to_cargo_toml: &Path) -> Result { - let buffer = fs.load(path_to_cargo_toml).await?; - let cargo_toml: cargo_toml::Manifest = toml::from_str(&buffer)?; - - let mut message = String::new(); - writeln!(message, "You are in a Rust project.")?; - - if let Some(workspace) = cargo_toml.workspace { - writeln!( - message, - "The project is a Cargo workspace with the following members:" - )?; - for member in workspace.members { - writeln!(message, "- {member}")?; - } - - if !workspace.default_members.is_empty() { - writeln!(message, "The default members are:")?; - for member in workspace.default_members { - writeln!(message, "- {member}")?; - } - } - - if !workspace.dependencies.is_empty() { - writeln!( - message, - "The following workspace dependencies are installed:" - )?; - for dependency in workspace.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } else if let Some(package) = cargo_toml.package { - writeln!( - message, - "The project name is \"{name}\".", - name = package.name - )?; - - let description = package - .description - .as_ref() - .and_then(|description| description.get().ok().cloned()); - if let Some(description) = description.as_ref() { - writeln!(message, "It describes itself as \"{description}\".")?; - } - - if !cargo_toml.dependencies.is_empty() { - writeln!(message, "The following dependencies are installed:")?; - for dependency in cargo_toml.dependencies.keys() { - writeln!(message, "- {dependency}")?; - } - } - } - - Ok(message) - } - - fn path_to_cargo_toml(project: Model, cx: &mut AppContext) -> Option> { - let worktree = project.read(cx).worktrees(cx).next()?; - let worktree = worktree.read(cx); - let entry = worktree.entry_for_path("Cargo.toml")?; - let path = ProjectPath { - worktree_id: worktree.id(), - path: entry.path.clone(), - }; - Some(Arc::from( - project.read(cx).absolute_path(&path, cx)?.as_path(), - )) + pub fn new(prompt_builder: Arc) -> Self { + Self { prompt_builder } } } @@ -93,12 +42,20 @@ impl SlashCommand for ProjectSlashCommand { "project".into() } + fn label(&self, cx: &AppContext) -> CodeLabel { + create_label_for_command("project", &[], cx) + } + fn description(&self) -> String { - "insert project metadata".into() + "Generate semantic searches based on the current context".into() } fn menu_text(&self) -> String { - "Insert Project Metadata".into() + "Project Context".into() + } + + fn requires_argument(&self) -> bool { + false } fn complete_argument( @@ -108,46 +65,126 @@ impl SlashCommand for ProjectSlashCommand { _workspace: Option>, _cx: &mut WindowContext, ) -> Task>> { - Task::ready(Err(anyhow!("this command does not require argument"))) - } - - fn requires_argument(&self) -> bool { - false + Task::ready(Ok(Vec::new())) } fn run( self: Arc, _arguments: &[String], - _context_slash_command_output_sections: &[SlashCommandOutputSection], - _context_buffer: BufferSnapshot, + _context_slash_command_output_sections: &[SlashCommandOutputSection], + context_buffer: language::BufferSnapshot, workspace: WeakView, _delegate: Option>, cx: &mut WindowContext, ) -> Task> { - let output = workspace.update(cx, |workspace, cx| { - let project = workspace.project().clone(); - let fs = workspace.project().read(cx).fs().clone(); - let path = Self::path_to_cargo_toml(project, cx); - let output = cx.background_executor().spawn(async move { - let path = path.with_context(|| "Cargo.toml not found")?; - Self::build_message(fs, &path).await - }); + let model_registry = LanguageModelRegistry::read_global(cx); + let current_model = model_registry.active_model(); + let prompt_builder = self.prompt_builder.clone(); - cx.foreground_executor().spawn(async move { - let text = output.await?; - let range = 0..text.len(); - Ok(SlashCommandOutput { - text, - sections: vec![SlashCommandOutputSection { - range, - icon: IconName::FileTree, - label: "Project".into(), + let Some(workspace) = workspace.upgrade() else { + return Task::ready(Err(anyhow::anyhow!("workspace was dropped"))); + }; + let project = workspace.read(cx).project().clone(); + let fs = project.read(cx).fs().clone(); + let Some(project_index) = + cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx)) + else { + return Task::ready(Err(anyhow::anyhow!("no project indexer"))); + }; + + cx.spawn(|mut cx| async move { + let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?; + + let prompt = + prompt_builder.generate_project_slash_command_prompt(context_buffer.text())?; + + let search_queries = current_model + .use_tool::( + language_model::LanguageModelRequest { + messages: vec![language_model::LanguageModelRequestMessage { + role: language_model::Role::User, + content: vec![language_model::MessageContent::Text(prompt)], + cache: false, + }], + tools: vec![], + stop: vec![], + temperature: None, + }, + cx.deref_mut(), + ) + .await? + .search_queries; + + let results = project_index + .read_with(&cx, |project_index, cx| { + project_index.search(search_queries.clone(), 25, cx) + })? + .await?; + + let results = SemanticDb::load_results(results, &fs, &cx).await?; + + cx.background_executor() + .spawn(async move { + let mut output = "Project context:\n".to_string(); + let mut sections = Vec::new(); + + for (ix, query) in search_queries.into_iter().enumerate() { + let start_ix = output.len(); + writeln!(&mut output, "Results for {query}:").unwrap(); + let mut has_results = false; + for result in &results { + if result.query_index == ix { + add_search_result_section(result, &mut output, &mut sections); + has_results = true; + } + } + if has_results { + sections.push(SlashCommandOutputSection { + range: start_ix..output.len(), + icon: IconName::MagnifyingGlass, + label: query.into(), + metadata: None, + }); + output.push('\n'); + } else { + output.truncate(start_ix); + } + } + + sections.push(SlashCommandOutputSection { + range: 0..output.len(), + icon: IconName::Book, + label: "Project context".into(), metadata: None, - }], - run_commands_in_text: false, + }); + + Ok(SlashCommandOutput { + text: output, + sections, + run_commands_in_text: true, + }) }) - }) - }); - output.unwrap_or_else(|error| Task::ready(Err(error))) + .await + }) + } +} + +#[derive(JsonSchema, Deserialize)] +struct SearchQueries { + /// An array of semantic search queries. + /// + /// These queries will be used to search the user's codebase. + /// The function can only accept 4 queries, otherwise it will error. + /// As such, it's important that you limit the length of the search_queries array to 5 queries or less. + search_queries: Vec, +} + +impl LanguageModelTool for SearchQueries { + fn name() -> String { + "search_queries".to_string() + } + + fn description() -> String { + "Generate semantic search queries based on context".to_string() } } diff --git a/crates/assistant/src/slash_command/search_command.rs b/crates/assistant/src/slash_command/search_command.rs index 7e408cad39..f0f3ee3d25 100644 --- a/crates/assistant/src/slash_command/search_command.rs +++ b/crates/assistant/src/slash_command/search_command.rs @@ -7,7 +7,7 @@ use anyhow::Result; use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection}; use feature_flags::FeatureFlag; use gpui::{AppContext, Task, WeakView}; -use language::{CodeLabel, LineEnding, LspAdapterDelegate}; +use language::{CodeLabel, LspAdapterDelegate}; use semantic_index::{LoadedSearchResult, SemanticDb}; use std::{ fmt::Write, @@ -101,7 +101,7 @@ impl SlashCommand for SearchSlashCommand { cx.spawn(|cx| async move { let results = project_index .read_with(&cx, |project_index, cx| { - project_index.search(query.clone(), limit.unwrap_or(5), cx) + project_index.search(vec![query.clone()], limit.unwrap_or(5), cx) })? .await?; @@ -112,31 +112,8 @@ impl SlashCommand for SearchSlashCommand { .spawn(async move { let mut text = format!("Search results for {query}:\n"); let mut sections = Vec::new(); - for LoadedSearchResult { - path, - range, - full_path, - file_content, - row_range, - } in loaded_results - { - let section_start_ix = text.len(); - text.push_str(&codeblock_fence_for_path( - Some(&path), - Some(row_range.clone()), - )); - - let mut excerpt = file_content[range].to_string(); - LineEnding::normalize(&mut excerpt); - text.push_str(&excerpt); - writeln!(text, "\n```\n").unwrap(); - let section_end_ix = text.len() - 1; - sections.push(build_entry_output_section( - section_start_ix..section_end_ix, - Some(&full_path), - false, - Some(row_range.start() + 1..row_range.end() + 1), - )); + for loaded_result in &loaded_results { + add_search_result_section(loaded_result, &mut text, &mut sections); } let query = SharedString::from(query); @@ -159,3 +136,35 @@ impl SlashCommand for SearchSlashCommand { }) } } + +pub fn add_search_result_section( + loaded_result: &LoadedSearchResult, + text: &mut String, + sections: &mut Vec>, +) { + let LoadedSearchResult { + path, + full_path, + excerpt_content, + row_range, + .. + } = loaded_result; + let section_start_ix = text.len(); + text.push_str(&codeblock_fence_for_path( + Some(&path), + Some(row_range.clone()), + )); + + text.push_str(&excerpt_content); + if !text.ends_with('\n') { + text.push('\n'); + } + writeln!(text, "```\n").unwrap(); + let section_end_ix = text.len() - 1; + sections.push(build_entry_output_section( + section_start_ix..section_end_ix, + Some(&full_path), + false, + Some(row_range.start() + 1..row_range.end() + 1), + )); +} diff --git a/crates/assistant/src/slash_command_settings.rs b/crates/assistant/src/slash_command_settings.rs index eda950b6a2..c524b37803 100644 --- a/crates/assistant/src/slash_command_settings.rs +++ b/crates/assistant/src/slash_command_settings.rs @@ -10,9 +10,9 @@ pub struct SlashCommandSettings { /// Settings for the `/docs` slash command. #[serde(default)] pub docs: DocsCommandSettings, - /// Settings for the `/project` slash command. + /// Settings for the `/cargo-workspace` slash command. #[serde(default)] - pub project: ProjectCommandSettings, + pub cargo_workspace: CargoWorkspaceCommandSettings, } /// Settings for the `/docs` slash command. @@ -23,10 +23,10 @@ pub struct DocsCommandSettings { pub enabled: bool, } -/// Settings for the `/project` slash command. +/// Settings for the `/cargo-workspace` slash command. #[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)] -pub struct ProjectCommandSettings { - /// Whether `/project` is enabled. +pub struct CargoWorkspaceCommandSettings { + /// Whether `/cargo-workspace` is enabled. #[serde(default)] pub enabled: bool, } diff --git a/crates/evals/src/eval.rs b/crates/evals/src/eval.rs index 0580053373..e2c8b42644 100644 --- a/crates/evals/src/eval.rs +++ b/crates/evals/src/eval.rs @@ -438,7 +438,7 @@ async fn run_eval_project( loop { match cx.update(|cx| { let project_index = project_index.read(cx); - project_index.search(query.query.clone(), SEARCH_RESULT_LIMIT, cx) + project_index.search(vec![query.query.clone()], SEARCH_RESULT_LIMIT, cx) }) { Ok(task) => match task.await { Ok(answer) => { diff --git a/crates/semantic_index/examples/index.rs b/crates/semantic_index/examples/index.rs index 0cc3f9f317..c5c2c633a1 100644 --- a/crates/semantic_index/examples/index.rs +++ b/crates/semantic_index/examples/index.rs @@ -98,7 +98,7 @@ fn main() { .update(|cx| { let project_index = project_index.read(cx); let query = "converting an anchor to a point"; - project_index.search(query.into(), 4, cx) + project_index.search(vec![query.into()], 4, cx) }) .unwrap() .await diff --git a/crates/semantic_index/src/embedding.rs b/crates/semantic_index/src/embedding.rs index b05c4ac9da..1e1e0f0be7 100644 --- a/crates/semantic_index/src/embedding.rs +++ b/crates/semantic_index/src/embedding.rs @@ -42,14 +42,23 @@ impl Embedding { self.0.len() } - pub fn similarity(self, other: &Embedding) -> f32 { - debug_assert_eq!(self.0.len(), other.0.len()); - self.0 + pub fn similarity(&self, others: &[Embedding]) -> (f32, usize) { + debug_assert!(others.iter().all(|other| self.0.len() == other.0.len())); + others .iter() - .copied() - .zip(other.0.iter().copied()) - .map(|(a, b)| a * b) - .sum() + .enumerate() + .map(|(index, other)| { + let dot_product: f32 = self + .0 + .iter() + .copied() + .zip(other.0.iter().copied()) + .map(|(a, b)| a * b) + .sum(); + (dot_product, index) + }) + .max_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal)) + .unwrap_or((0.0, 0)) } } diff --git a/crates/semantic_index/src/project_index.rs b/crates/semantic_index/src/project_index.rs index 5c35c93fa9..21c036d60a 100644 --- a/crates/semantic_index/src/project_index.rs +++ b/crates/semantic_index/src/project_index.rs @@ -31,20 +31,23 @@ pub struct SearchResult { pub path: Arc, pub range: Range, pub score: f32, + pub query_index: usize, } +#[derive(Debug, PartialEq, Eq)] pub struct LoadedSearchResult { pub path: Arc, - pub range: Range, pub full_path: PathBuf, - pub file_content: String, + pub excerpt_content: String, pub row_range: RangeInclusive, + pub query_index: usize, } pub struct WorktreeSearchResult { pub worktree_id: WorktreeId, pub path: Arc, pub range: Range, + pub query_index: usize, pub score: f32, } @@ -227,7 +230,7 @@ impl ProjectIndex { pub fn search( &self, - query: String, + queries: Vec, limit: usize, cx: &AppContext, ) -> Task>> { @@ -275,15 +278,18 @@ impl ProjectIndex { cx.spawn(|cx| async move { #[cfg(debug_assertions)] let embedding_query_start = std::time::Instant::now(); - log::info!("Searching for {query}"); + log::info!("Searching for {queries:?}"); + let queries: Vec = queries + .iter() + .map(|s| TextToEmbed::new(s.as_str())) + .collect(); - let query_embeddings = embedding_provider - .embed(&[TextToEmbed::new(&query)]) - .await?; - let query_embedding = query_embeddings - .into_iter() - .next() - .ok_or_else(|| anyhow!("no embedding for query"))?; + let query_embeddings = embedding_provider.embed(&queries[..]).await?; + if query_embeddings.len() != queries.len() { + return Err(anyhow!( + "The number of query embeddings does not match the number of queries" + )); + } let mut results_by_worker = Vec::new(); for _ in 0..cx.background_executor().num_cpus() { @@ -292,28 +298,34 @@ impl ProjectIndex { #[cfg(debug_assertions)] let search_start = std::time::Instant::now(); - cx.background_executor() .scoped(|cx| { for results in results_by_worker.iter_mut() { cx.spawn(async { while let Ok((worktree_id, path, chunk)) = chunks_rx.recv().await { - let score = chunk.embedding.similarity(&query_embedding); + let (score, query_index) = + chunk.embedding.similarity(&query_embeddings); + let ix = match results.binary_search_by(|probe| { score.partial_cmp(&probe.score).unwrap_or(Ordering::Equal) }) { Ok(ix) | Err(ix) => ix, }; - results.insert( - ix, - WorktreeSearchResult { - worktree_id, - path: path.clone(), - range: chunk.chunk.range.clone(), - score, - }, - ); - results.truncate(limit); + if ix < limit { + results.insert( + ix, + WorktreeSearchResult { + worktree_id, + path: path.clone(), + range: chunk.chunk.range.clone(), + query_index, + score, + }, + ); + if results.len() > limit { + results.pop(); + } + } } }); } @@ -333,6 +345,7 @@ impl ProjectIndex { path: result.path, range: result.range, score: result.score, + query_index: result.query_index, }) })); } diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 6c97ece024..332b4271a0 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -12,8 +12,13 @@ use anyhow::{Context as _, Result}; use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, Context, Global, Model, WeakModel}; -use project::Project; -use std::{path::PathBuf, sync::Arc}; +use language::LineEnding; +use project::{Project, Worktree}; +use std::{ + cmp::Ordering, + path::{Path, PathBuf}, + sync::Arc, +}; use ui::ViewContext; use util::ResultExt as _; use workspace::Workspace; @@ -77,46 +82,127 @@ impl SemanticDb { } pub async fn load_results( - results: Vec, + mut results: Vec, fs: &Arc, cx: &AsyncAppContext, ) -> Result> { - let mut loaded_results = Vec::new(); - for result in results { - let (full_path, file_content) = result.worktree.read_with(cx, |worktree, _cx| { - let entry_abs_path = worktree.abs_path().join(&result.path); - let mut entry_full_path = PathBuf::from(worktree.root_name()); - entry_full_path.push(&result.path); - let file_content = async { - let entry_abs_path = entry_abs_path; - fs.load(&entry_abs_path).await - }; - (entry_full_path, file_content) - })?; - if let Some(file_content) = file_content.await.log_err() { - let range_start = result.range.start.min(file_content.len()); - let range_end = result.range.end.min(file_content.len()); - - let start_row = file_content[0..range_start].matches('\n').count() as u32; - let end_row = file_content[0..range_end].matches('\n').count() as u32; - let start_line_byte_offset = file_content[0..range_start] - .rfind('\n') - .map(|pos| pos + 1) - .unwrap_or_default(); - let end_line_byte_offset = file_content[range_end..] - .find('\n') - .map(|pos| range_end + pos) - .unwrap_or_else(|| file_content.len()); - - loaded_results.push(LoadedSearchResult { - path: result.path, - range: start_line_byte_offset..end_line_byte_offset, - full_path, - file_content, - row_range: start_row..=end_row, - }); + let mut max_scores_by_path = HashMap::<_, (f32, usize)>::default(); + for result in &results { + let (score, query_index) = max_scores_by_path + .entry((result.worktree.clone(), result.path.clone())) + .or_default(); + if result.score > *score { + *score = result.score; + *query_index = result.query_index; } } + + results.sort_by(|a, b| { + let max_score_a = max_scores_by_path[&(a.worktree.clone(), a.path.clone())].0; + let max_score_b = max_scores_by_path[&(b.worktree.clone(), b.path.clone())].0; + max_score_b + .partial_cmp(&max_score_a) + .unwrap_or(Ordering::Equal) + .then_with(|| a.worktree.entity_id().cmp(&b.worktree.entity_id())) + .then_with(|| a.path.cmp(&b.path)) + .then_with(|| a.range.start.cmp(&b.range.start)) + }); + + let mut last_loaded_file: Option<(Model, Arc, PathBuf, String)> = None; + let mut loaded_results = Vec::::new(); + for result in results { + let full_path; + let file_content; + if let Some(last_loaded_file) = + last_loaded_file + .as_ref() + .filter(|(last_worktree, last_path, _, _)| { + last_worktree == &result.worktree && last_path == &result.path + }) + { + full_path = last_loaded_file.2.clone(); + file_content = &last_loaded_file.3; + } else { + let output = result.worktree.read_with(cx, |worktree, _cx| { + let entry_abs_path = worktree.abs_path().join(&result.path); + let mut entry_full_path = PathBuf::from(worktree.root_name()); + entry_full_path.push(&result.path); + let file_content = async { + let entry_abs_path = entry_abs_path; + fs.load(&entry_abs_path).await + }; + (entry_full_path, file_content) + })?; + full_path = output.0; + let Some(content) = output.1.await.log_err() else { + continue; + }; + last_loaded_file = Some(( + result.worktree.clone(), + result.path.clone(), + full_path.clone(), + content, + )); + file_content = &last_loaded_file.as_ref().unwrap().3; + }; + + let query_index = max_scores_by_path[&(result.worktree.clone(), result.path.clone())].1; + + let mut range_start = result.range.start.min(file_content.len()); + let mut range_end = result.range.end.min(file_content.len()); + while !file_content.is_char_boundary(range_start) { + range_start += 1; + } + while !file_content.is_char_boundary(range_end) { + range_end += 1; + } + + let start_row = file_content[0..range_start].matches('\n').count() as u32; + let mut end_row = file_content[0..range_end].matches('\n').count() as u32; + let start_line_byte_offset = file_content[0..range_start] + .rfind('\n') + .map(|pos| pos + 1) + .unwrap_or_default(); + let mut end_line_byte_offset = range_end; + if file_content[..end_line_byte_offset].ends_with('\n') { + end_row -= 1; + } else { + end_line_byte_offset = file_content[range_end..] + .find('\n') + .map(|pos| range_end + pos + 1) + .unwrap_or_else(|| file_content.len()); + } + let mut excerpt_content = + file_content[start_line_byte_offset..end_line_byte_offset].to_string(); + LineEnding::normalize(&mut excerpt_content); + + if let Some(prev_result) = loaded_results.last_mut() { + if prev_result.full_path == full_path { + if *prev_result.row_range.end() + 1 == start_row { + prev_result.row_range = *prev_result.row_range.start()..=end_row; + prev_result.excerpt_content.push_str(&excerpt_content); + continue; + } + } + } + + loaded_results.push(LoadedSearchResult { + path: result.path, + full_path, + excerpt_content, + row_range: start_row..=end_row, + query_index, + }); + } + + for result in &mut loaded_results { + while result.excerpt_content.ends_with("\n\n") { + result.excerpt_content.pop(); + result.row_range = + *result.row_range.start()..=result.row_range.end().saturating_sub(1) + } + } + Ok(loaded_results) } @@ -312,7 +398,7 @@ mod tests { .update(|cx| { let project_index = project_index.read(cx); let query = "garbage in, garbage out"; - project_index.search(query.into(), 4, cx) + project_index.search(vec![query.into()], 4, cx) }) .await .unwrap(); @@ -426,4 +512,117 @@ mod tests { ], ); } + + #[gpui::test] + async fn test_load_search_results(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project_path = Path::new("/fake_project"); + + let file1_content = "one\ntwo\nthree\nfour\nfive\n"; + let file2_content = "aaa\nbbb\nccc\nddd\neee\n"; + + fs.insert_tree( + project_path, + json!({ + "file1.txt": file1_content, + "file2.txt": file2_content, + }), + ) + .await; + + let fs = fs as Arc; + let project = Project::test(fs.clone(), [project_path], cx).await; + let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap()); + + // chunk that is already newline-aligned + let search_results = vec![SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: 0..file1_content.find("four").unwrap(), + score: 0.5, + query_index: 0, + }]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: "one\ntwo\nthree\n".into(), + row_range: 0..=2, + query_index: 0, + }] + ); + + // chunk that is *not* newline-aligned + let search_results = vec![SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: file1_content.find("two").unwrap() + 1..file1_content.find("four").unwrap() + 2, + score: 0.5, + query_index: 0, + }]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: "two\nthree\nfour\n".into(), + row_range: 1..=3, + query_index: 0, + }] + ); + + // chunks that are adjacent + + let search_results = vec![ + SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: file1_content.find("two").unwrap()..file1_content.len(), + score: 0.6, + query_index: 0, + }, + SearchResult { + worktree: worktree.clone(), + path: Path::new("file1.txt").into(), + range: 0..file1_content.find("two").unwrap(), + score: 0.5, + query_index: 1, + }, + SearchResult { + worktree: worktree.clone(), + path: Path::new("file2.txt").into(), + range: 0..file2_content.len(), + score: 0.8, + query_index: 1, + }, + ]; + assert_eq!( + SemanticDb::load_results(search_results, &fs, &cx.to_async()) + .await + .unwrap(), + &[ + LoadedSearchResult { + path: Path::new("file2.txt").into(), + full_path: "fake_project/file2.txt".into(), + excerpt_content: file2_content.into(), + row_range: 0..=4, + query_index: 1, + }, + LoadedSearchResult { + path: Path::new("file1.txt").into(), + full_path: "fake_project/file1.txt".into(), + excerpt_content: file1_content.into(), + row_range: 0..=4, + query_index: 0, + } + ] + ); + } } From 743feb98bcae8e00c8399be03fb27dc2b925bcdb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Fri, 20 Sep 2024 15:28:50 -0700 Subject: [PATCH 75/96] Add the ability to propose changes to a set of buffers (#18170) This PR introduces functionality for creating *branches* of buffers that can be used to preview and edit change sets that haven't yet been applied to the buffers themselves. Release Notes: - N/A --------- Co-authored-by: Marshall Bowers Co-authored-by: Marshall --- Cargo.lock | 1 - crates/assistant/src/context.rs | 9 +- crates/channel/src/channel_buffer.rs | 5 +- crates/clock/src/clock.rs | 83 ++++++---- crates/editor/src/actions.rs | 1 + crates/editor/src/editor.rs | 78 ++++++++-- crates/editor/src/element.rs | 5 +- crates/editor/src/git.rs | 24 +-- crates/editor/src/hunk_diff.rs | 24 +-- crates/editor/src/proposed_changes_editor.rs | 125 +++++++++++++++ crates/editor/src/test.rs | 6 +- crates/git/src/diff.rs | 70 ++++----- crates/language/src/buffer.rs | 154 ++++++++++++++----- crates/language/src/buffer_tests.rs | 146 ++++++++++++++++-- crates/multi_buffer/Cargo.toml | 1 - crates/multi_buffer/src/multi_buffer.rs | 46 +++--- crates/project/src/project.rs | 7 +- crates/project/src/project_tests.rs | 2 +- crates/remote_server/src/headless_project.rs | 7 +- crates/text/src/text.rs | 14 ++ 20 files changed, 622 insertions(+), 186 deletions(-) create mode 100644 crates/editor/src/proposed_changes_editor.rs diff --git a/Cargo.lock b/Cargo.lock index dd07dfa1cf..c0f6751b89 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7055,7 +7055,6 @@ dependencies = [ "ctor", "env_logger", "futures 0.3.30", - "git", "gpui", "itertools 0.13.0", "language", diff --git a/crates/assistant/src/context.rs b/crates/assistant/src/context.rs index 1cac47831f..4f1f885b33 100644 --- a/crates/assistant/src/context.rs +++ b/crates/assistant/src/context.rs @@ -1006,9 +1006,12 @@ impl Context { cx: &mut ModelContext, ) { match event { - language::BufferEvent::Operation(operation) => cx.emit(ContextEvent::Operation( - ContextOperation::BufferOperation(operation.clone()), - )), + language::BufferEvent::Operation { + operation, + is_local: true, + } => cx.emit(ContextEvent::Operation(ContextOperation::BufferOperation( + operation.clone(), + ))), language::BufferEvent::Edited => { self.count_remaining_tokens(cx); self.reparse(cx); diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 755e7400e1..0a4a259648 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -175,7 +175,10 @@ impl ChannelBuffer { cx: &mut ModelContext, ) { match event { - language::BufferEvent::Operation(operation) => { + language::BufferEvent::Operation { + operation, + is_local: true, + } => { if *ZED_ALWAYS_ACTIVE { if let language::Operation::UpdateSelections { selections, .. } = operation { if selections.is_empty() { diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index f7d36ed4a8..2b45e4a8fa 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -9,6 +9,8 @@ use std::{ pub use system_clock::*; +pub const LOCAL_BRANCH_REPLICA_ID: u16 = u16::MAX; + /// A unique identifier for each distributed node. pub type ReplicaId = u16; @@ -25,7 +27,10 @@ pub struct Lamport { /// A [vector clock](https://en.wikipedia.org/wiki/Vector_clock). #[derive(Clone, Default, Hash, Eq, PartialEq)] -pub struct Global(SmallVec<[u32; 8]>); +pub struct Global { + values: SmallVec<[u32; 8]>, + local_branch_value: u32, +} impl Global { pub fn new() -> Self { @@ -33,41 +38,51 @@ impl Global { } pub fn get(&self, replica_id: ReplicaId) -> Seq { - self.0.get(replica_id as usize).copied().unwrap_or(0) as Seq + if replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value + } else { + self.values.get(replica_id as usize).copied().unwrap_or(0) as Seq + } } pub fn observe(&mut self, timestamp: Lamport) { if timestamp.value > 0 { - let new_len = timestamp.replica_id as usize + 1; - if new_len > self.0.len() { - self.0.resize(new_len, 0); - } + if timestamp.replica_id == LOCAL_BRANCH_REPLICA_ID { + self.local_branch_value = cmp::max(self.local_branch_value, timestamp.value); + } else { + let new_len = timestamp.replica_id as usize + 1; + if new_len > self.values.len() { + self.values.resize(new_len, 0); + } - let entry = &mut self.0[timestamp.replica_id as usize]; - *entry = cmp::max(*entry, timestamp.value); + let entry = &mut self.values[timestamp.replica_id as usize]; + *entry = cmp::max(*entry, timestamp.value); + } } } pub fn join(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } - for (left, right) in self.0.iter_mut().zip(&other.0) { + for (left, right) in self.values.iter_mut().zip(&other.values) { *left = cmp::max(*left, *right); } + + self.local_branch_value = cmp::max(self.local_branch_value, other.local_branch_value); } pub fn meet(&mut self, other: &Self) { - if other.0.len() > self.0.len() { - self.0.resize(other.0.len(), 0); + if other.values.len() > self.values.len() { + self.values.resize(other.values.len(), 0); } let mut new_len = 0; for (ix, (left, right)) in self - .0 + .values .iter_mut() - .zip(other.0.iter().chain(iter::repeat(&0))) + .zip(other.values.iter().chain(iter::repeat(&0))) .enumerate() { if *left == 0 { @@ -80,7 +95,8 @@ impl Global { new_len = ix + 1; } } - self.0.resize(new_len, 0); + self.values.resize(new_len, 0); + self.local_branch_value = cmp::min(self.local_branch_value, other.local_branch_value); } pub fn observed(&self, timestamp: Lamport) -> bool { @@ -88,34 +104,44 @@ impl Global { } pub fn observed_any(&self, other: &Self) -> bool { - self.0 + self.values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| *right > 0 && left >= right) + || (other.local_branch_value > 0 && self.local_branch_value >= other.local_branch_value) } pub fn observed_all(&self, other: &Self) -> bool { - let mut rhs = other.0.iter(); - self.0.iter().all(|left| match rhs.next() { + let mut rhs = other.values.iter(); + self.values.iter().all(|left| match rhs.next() { Some(right) => left >= right, None => true, }) && rhs.next().is_none() + && self.local_branch_value >= other.local_branch_value } pub fn changed_since(&self, other: &Self) -> bool { - self.0.len() > other.0.len() + self.values.len() > other.values.len() || self - .0 + .values .iter() - .zip(other.0.iter()) + .zip(other.values.iter()) .any(|(left, right)| left > right) + || self.local_branch_value > other.local_branch_value } pub fn iter(&self) -> impl Iterator + '_ { - self.0.iter().enumerate().map(|(replica_id, seq)| Lamport { - replica_id: replica_id as ReplicaId, - value: *seq, - }) + self.values + .iter() + .enumerate() + .map(|(replica_id, seq)| Lamport { + replica_id: replica_id as ReplicaId, + value: *seq, + }) + .chain((self.local_branch_value > 0).then_some(Lamport { + replica_id: LOCAL_BRANCH_REPLICA_ID, + value: self.local_branch_value, + })) } } @@ -192,6 +218,9 @@ impl fmt::Debug for Global { } write!(f, "{}: {}", timestamp.replica_id, timestamp.value)?; } + if self.local_branch_value > 0 { + write!(f, ": {}", self.local_branch_value)?; + } write!(f, "}}") } } diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 93c83af195..2383c7f71a 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -273,6 +273,7 @@ gpui::actions!( NextScreen, OpenExcerpts, OpenExcerptsSplit, + OpenProposedChangesEditor, OpenFile, OpenPermalinkToLine, OpenUrl, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 1f4a9376d2..b1a3d95a0d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -35,6 +35,7 @@ mod lsp_ext; mod mouse_context_menu; pub mod movement; mod persistence; +mod proposed_changes_editor; mod rust_analyzer_ext; pub mod scroll; mod selections_collection; @@ -46,7 +47,7 @@ mod signature_help; #[cfg(any(test, feature = "test-support"))] pub mod test; -use ::git::diff::{DiffHunk, DiffHunkStatus}; +use ::git::diff::DiffHunkStatus; use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry}; pub(crate) use actions::*; use aho_corasick::AhoCorasick; @@ -98,6 +99,7 @@ use language::{ }; use language::{point_to_lsp, BufferRow, CharClassifier, Runnable, RunnableRange}; use linked_editing_ranges::refresh_linked_ranges; +use proposed_changes_editor::{ProposedChangesBuffer, ProposedChangesEditor}; use similar::{ChangeTag, TextDiff}; use task::{ResolvedTask, TaskTemplate, TaskVariables}; @@ -113,7 +115,9 @@ pub use multi_buffer::{ Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, }; -use multi_buffer::{ExpandExcerptDirection, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16}; +use multi_buffer::{ + ExpandExcerptDirection, MultiBufferDiffHunk, MultiBufferPoint, MultiBufferRow, ToOffsetUtf16, +}; use ordered_float::OrderedFloat; use parking_lot::{Mutex, RwLock}; use project::project_settings::{GitGutterSetting, ProjectSettings}; @@ -6152,7 +6156,7 @@ impl Editor { pub fn prepare_revert_change( revert_changes: &mut HashMap, Rope)>>, multi_buffer: &Model, - hunk: &DiffHunk, + hunk: &MultiBufferDiffHunk, cx: &AppContext, ) -> Option<()> { let buffer = multi_buffer.read(cx).buffer(hunk.buffer_id)?; @@ -9338,7 +9342,7 @@ impl Editor { snapshot: &DisplaySnapshot, initial_point: Point, is_wrapped: bool, - hunks: impl Iterator>, + hunks: impl Iterator, cx: &mut ViewContext, ) -> bool { let display_point = initial_point.to_display_point(snapshot); @@ -11885,6 +11889,52 @@ impl Editor { self.searchable } + fn open_proposed_changes_editor( + &mut self, + _: &OpenProposedChangesEditor, + cx: &mut ViewContext, + ) { + let Some(workspace) = self.workspace() else { + cx.propagate(); + return; + }; + + let buffer = self.buffer.read(cx); + let mut new_selections_by_buffer = HashMap::default(); + for selection in self.selections.all::(cx) { + for (buffer, mut range, _) in + buffer.range_to_buffer_ranges(selection.start..selection.end, cx) + { + if selection.reversed { + mem::swap(&mut range.start, &mut range.end); + } + let mut range = range.to_point(buffer.read(cx)); + range.start.column = 0; + range.end.column = buffer.read(cx).line_len(range.end.row); + new_selections_by_buffer + .entry(buffer) + .or_insert(Vec::new()) + .push(range) + } + } + + let proposed_changes_buffers = new_selections_by_buffer + .into_iter() + .map(|(buffer, ranges)| ProposedChangesBuffer { buffer, ranges }) + .collect::>(); + let proposed_changes_editor = cx.new_view(|cx| { + ProposedChangesEditor::new(proposed_changes_buffers, self.project.clone(), cx) + }); + + cx.window_context().defer(move |cx| { + workspace.update(cx, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item(Box::new(proposed_changes_editor), true, true, None, cx); + }); + }); + }); + } + fn open_excerpts_in_split(&mut self, _: &OpenExcerptsSplit, cx: &mut ViewContext) { self.open_excerpts_common(true, cx) } @@ -12399,7 +12449,7 @@ impl Editor { fn hunks_for_selections( multi_buffer_snapshot: &MultiBufferSnapshot, selections: &[Selection], -) -> Vec> { +) -> Vec { let buffer_rows_for_selections = selections.iter().map(|selection| { let head = selection.head(); let tail = selection.tail(); @@ -12418,7 +12468,7 @@ fn hunks_for_selections( pub fn hunks_for_rows( rows: impl Iterator>, multi_buffer_snapshot: &MultiBufferSnapshot, -) -> Vec> { +) -> Vec { let mut hunks = Vec::new(); let mut processed_buffer_rows: HashMap>> = HashMap::default(); @@ -12430,14 +12480,14 @@ pub fn hunks_for_rows( // when the caret is just above or just below the deleted hunk. let allow_adjacent = hunk_status(&hunk) == DiffHunkStatus::Removed; let related_to_selection = if allow_adjacent { - hunk.associated_range.overlaps(&query_rows) - || hunk.associated_range.start == query_rows.end - || hunk.associated_range.end == query_rows.start + hunk.row_range.overlaps(&query_rows) + || hunk.row_range.start == query_rows.end + || hunk.row_range.end == query_rows.start } else { // `selected_multi_buffer_rows` are inclusive (e.g. [2..2] means 2nd row is selected) - // `hunk.associated_range` is exclusive (e.g. [2..3] means 2nd row is selected) - hunk.associated_range.overlaps(&selected_multi_buffer_rows) - || selected_multi_buffer_rows.end == hunk.associated_range.start + // `hunk.row_range` is exclusive (e.g. [2..3] means 2nd row is selected) + hunk.row_range.overlaps(&selected_multi_buffer_rows) + || selected_multi_buffer_rows.end == hunk.row_range.start }; if related_to_selection { if !processed_buffer_rows @@ -13738,10 +13788,10 @@ impl RowRangeExt for Range { } } -fn hunk_status(hunk: &DiffHunk) -> DiffHunkStatus { +fn hunk_status(hunk: &MultiBufferDiffHunk) -> DiffHunkStatus { if hunk.diff_base_byte_range.is_empty() { DiffHunkStatus::Added - } else if hunk.associated_range.is_empty() { + } else if hunk.row_range.is_empty() { DiffHunkStatus::Removed } else { DiffHunkStatus::Modified diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 47107b9754..d4075431ff 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -346,6 +346,7 @@ impl EditorElement { register_action(view, cx, Editor::toggle_code_actions); register_action(view, cx, Editor::open_excerpts); register_action(view, cx, Editor::open_excerpts_in_split); + register_action(view, cx, Editor::open_proposed_changes_editor); register_action(view, cx, Editor::toggle_soft_wrap); register_action(view, cx, Editor::toggle_tab_bar); register_action(view, cx, Editor::toggle_line_numbers); @@ -3710,11 +3711,11 @@ impl EditorElement { ) .map(|hunk| { let start_display_row = - MultiBufferPoint::new(hunk.associated_range.start.0, 0) + MultiBufferPoint::new(hunk.row_range.start.0, 0) .to_display_point(&snapshot.display_snapshot) .row(); let mut end_display_row = - MultiBufferPoint::new(hunk.associated_range.end.0, 0) + MultiBufferPoint::new(hunk.row_range.end.0, 0) .to_display_point(&snapshot.display_snapshot) .row(); if end_display_row != start_display_row { diff --git a/crates/editor/src/git.rs b/crates/editor/src/git.rs index 63b083faa8..79b78d5d14 100644 --- a/crates/editor/src/git.rs +++ b/crates/editor/src/git.rs @@ -2,9 +2,9 @@ pub mod blame; use std::ops::Range; -use git::diff::{DiffHunk, DiffHunkStatus}; +use git::diff::DiffHunkStatus; use language::Point; -use multi_buffer::{Anchor, MultiBufferRow}; +use multi_buffer::{Anchor, MultiBufferDiffHunk}; use crate::{ display_map::{DisplaySnapshot, ToDisplayPoint}, @@ -49,25 +49,25 @@ impl DisplayDiffHunk { } pub fn diff_hunk_to_display( - hunk: &DiffHunk, + hunk: &MultiBufferDiffHunk, snapshot: &DisplaySnapshot, ) -> DisplayDiffHunk { - let hunk_start_point = Point::new(hunk.associated_range.start.0, 0); - let hunk_start_point_sub = Point::new(hunk.associated_range.start.0.saturating_sub(1), 0); + let hunk_start_point = Point::new(hunk.row_range.start.0, 0); + let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0); let hunk_end_point_sub = Point::new( - hunk.associated_range + hunk.row_range .end .0 .saturating_sub(1) - .max(hunk.associated_range.start.0), + .max(hunk.row_range.start.0), 0, ); let status = hunk_status(hunk); let is_removal = status == DiffHunkStatus::Removed; - let folds_start = Point::new(hunk.associated_range.start.0.saturating_sub(2), 0); - let folds_end = Point::new(hunk.associated_range.end.0 + 2, 0); + let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0); + let folds_end = Point::new(hunk.row_range.end.0 + 2, 0); let folds_range = folds_start..folds_end; let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| { @@ -87,7 +87,7 @@ pub fn diff_hunk_to_display( } else { let start = hunk_start_point.to_display_point(snapshot).row(); - let hunk_end_row = hunk.associated_range.end.max(hunk.associated_range.start); + let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start); let hunk_end_point = Point::new(hunk_end_row.0, 0); let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point); @@ -288,7 +288,7 @@ mod tests { assert_eq!( snapshot .git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), &expected, ); @@ -296,7 +296,7 @@ mod tests { assert_eq!( snapshot .git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12)) - .map(|hunk| (hunk_status(&hunk), hunk.associated_range)) + .map(|hunk| (hunk_status(&hunk), hunk.row_range)) .collect::>(), expected .iter() diff --git a/crates/editor/src/hunk_diff.rs b/crates/editor/src/hunk_diff.rs index 361ea6246e..917d07ec4e 100644 --- a/crates/editor/src/hunk_diff.rs +++ b/crates/editor/src/hunk_diff.rs @@ -4,11 +4,12 @@ use std::{ }; use collections::{hash_map, HashMap, HashSet}; -use git::diff::{DiffHunk, DiffHunkStatus}; +use git::diff::DiffHunkStatus; use gpui::{Action, AppContext, CursorStyle, Hsla, Model, MouseButton, Subscription, Task, View}; use language::Buffer; use multi_buffer::{ - Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferRow, MultiBufferSnapshot, ToPoint, + Anchor, AnchorRangeExt, ExcerptRange, MultiBuffer, MultiBufferDiffHunk, MultiBufferRow, + MultiBufferSnapshot, ToPoint, }; use settings::SettingsStore; use text::{BufferId, Point}; @@ -190,9 +191,9 @@ impl Editor { .buffer_snapshot .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) .filter(|hunk| { - let hunk_display_row_range = Point::new(hunk.associated_range.start.0, 0) + let hunk_display_row_range = Point::new(hunk.row_range.start.0, 0) .to_display_point(&snapshot.display_snapshot) - ..Point::new(hunk.associated_range.end.0, 0) + ..Point::new(hunk.row_range.end.0, 0) .to_display_point(&snapshot.display_snapshot); let row_range_end = display_rows_with_expanded_hunks.get(&hunk_display_row_range.start.row()); @@ -203,7 +204,7 @@ impl Editor { fn toggle_hunks_expanded( &mut self, - hunks_to_toggle: Vec>, + hunks_to_toggle: Vec, cx: &mut ViewContext, ) { let previous_toggle_task = self.expanded_hunks.hunk_update_tasks.remove(&None); @@ -274,8 +275,8 @@ impl Editor { }); for remaining_hunk in hunks_to_toggle { let remaining_hunk_point_range = - Point::new(remaining_hunk.associated_range.start.0, 0) - ..Point::new(remaining_hunk.associated_range.end.0, 0); + Point::new(remaining_hunk.row_range.start.0, 0) + ..Point::new(remaining_hunk.row_range.end.0, 0); hunks_to_expand.push(HoveredHunk { status: hunk_status(&remaining_hunk), multi_buffer_range: remaining_hunk_point_range @@ -705,7 +706,7 @@ impl Editor { fn to_diff_hunk( hovered_hunk: &HoveredHunk, multi_buffer_snapshot: &MultiBufferSnapshot, -) -> Option> { +) -> Option { let buffer_id = hovered_hunk .multi_buffer_range .start @@ -716,9 +717,8 @@ fn to_diff_hunk( let point_range = hovered_hunk .multi_buffer_range .to_point(multi_buffer_snapshot); - Some(DiffHunk { - associated_range: MultiBufferRow(point_range.start.row) - ..MultiBufferRow(point_range.end.row), + Some(MultiBufferDiffHunk { + row_range: MultiBufferRow(point_range.start.row)..MultiBufferRow(point_range.end.row), buffer_id, buffer_range, diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(), @@ -868,7 +868,7 @@ fn editor_with_deleted_text( fn buffer_diff_hunk( buffer_snapshot: &MultiBufferSnapshot, row_range: Range, -) -> Option> { +) -> Option { let mut hunks = buffer_snapshot.git_diff_hunks_in_range( MultiBufferRow(row_range.start.row)..MultiBufferRow(row_range.end.row), ); diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs new file mode 100644 index 0000000000..3979e558a4 --- /dev/null +++ b/crates/editor/src/proposed_changes_editor.rs @@ -0,0 +1,125 @@ +use crate::{Editor, EditorEvent}; +use collections::HashSet; +use futures::{channel::mpsc, future::join_all}; +use gpui::{AppContext, EventEmitter, FocusableView, Model, Render, Subscription, Task, View}; +use language::{Buffer, BufferEvent, Capability}; +use multi_buffer::{ExcerptRange, MultiBuffer}; +use project::Project; +use smol::stream::StreamExt; +use std::{ops::Range, time::Duration}; +use text::ToOffset; +use ui::prelude::*; +use workspace::Item; + +pub struct ProposedChangesEditor { + editor: View, + _subscriptions: Vec, + _recalculate_diffs_task: Task>, + recalculate_diffs_tx: mpsc::UnboundedSender>, +} + +pub struct ProposedChangesBuffer { + pub buffer: Model, + pub ranges: Vec>, +} + +impl ProposedChangesEditor { + pub fn new( + buffers: Vec>, + project: Option>, + cx: &mut ViewContext, + ) -> Self { + let mut subscriptions = Vec::new(); + let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite)); + + for buffer in buffers { + let branch_buffer = buffer.buffer.update(cx, |buffer, cx| buffer.branch(cx)); + subscriptions.push(cx.subscribe(&branch_buffer, Self::on_buffer_event)); + + multibuffer.update(cx, |multibuffer, cx| { + multibuffer.push_excerpts( + branch_buffer, + buffer.ranges.into_iter().map(|range| ExcerptRange { + context: range, + primary: None, + }), + cx, + ); + }); + } + + let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded(); + + Self { + editor: cx + .new_view(|cx| Editor::for_multibuffer(multibuffer.clone(), project, true, cx)), + recalculate_diffs_tx, + _recalculate_diffs_task: cx.spawn(|_, mut cx| async move { + let mut buffers_to_diff = HashSet::default(); + while let Some(buffer) = recalculate_diffs_rx.next().await { + buffers_to_diff.insert(buffer); + + loop { + cx.background_executor() + .timer(Duration::from_millis(250)) + .await; + let mut had_further_changes = false; + while let Ok(next_buffer) = recalculate_diffs_rx.try_next() { + buffers_to_diff.insert(next_buffer?); + had_further_changes = true; + } + if !had_further_changes { + break; + } + } + + join_all(buffers_to_diff.drain().filter_map(|buffer| { + buffer + .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) + .ok()? + })) + .await; + } + None + }), + _subscriptions: subscriptions, + } + } + + fn on_buffer_event( + &mut self, + buffer: Model, + event: &BufferEvent, + _cx: &mut ViewContext, + ) { + if let BufferEvent::Edited = event { + self.recalculate_diffs_tx.unbounded_send(buffer).ok(); + } + } +} + +impl Render for ProposedChangesEditor { + fn render(&mut self, _cx: &mut ViewContext) -> impl IntoElement { + self.editor.clone() + } +} + +impl FocusableView for ProposedChangesEditor { + fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle { + self.editor.focus_handle(cx) + } +} + +impl EventEmitter for ProposedChangesEditor {} + +impl Item for ProposedChangesEditor { + type Event = EditorEvent; + + fn tab_icon(&self, _cx: &ui::WindowContext) -> Option { + Some(Icon::new(IconName::Pencil)) + } + + fn tab_content_text(&self, _cx: &WindowContext) -> Option { + Some("Proposed changes".into()) + } +} diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index fcbd3bd423..50214cd723 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -108,16 +108,16 @@ pub fn editor_hunks( .buffer_snapshot .git_diff_hunks_in_range(MultiBufferRow::MIN..MultiBufferRow::MAX) .map(|hunk| { - let display_range = Point::new(hunk.associated_range.start.0, 0) + let display_range = Point::new(hunk.row_range.start.0, 0) .to_display_point(snapshot) .row() - ..Point::new(hunk.associated_range.end.0, 0) + ..Point::new(hunk.row_range.end.0, 0) .to_display_point(snapshot) .row(); let (_, buffer, _) = editor .buffer() .read(cx) - .excerpt_containing(Point::new(hunk.associated_range.start.0, 0), cx) + .excerpt_containing(Point::new(hunk.row_range.start.0, 0), cx) .expect("no excerpt for expanded buffer's hunk start"); let diff_base = buffer .read(cx) diff --git a/crates/git/src/diff.rs b/crates/git/src/diff.rs index 8cc7ee1863..1f7930ce14 100644 --- a/crates/git/src/diff.rs +++ b/crates/git/src/diff.rs @@ -1,7 +1,7 @@ use rope::Rope; use std::{iter, ops::Range}; use sum_tree::SumTree; -use text::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, Point}; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point}; pub use git2 as libgit; use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; @@ -13,29 +13,30 @@ pub enum DiffHunkStatus { Removed, } -/// A diff hunk, representing a range of consequent lines in a singleton buffer, associated with a generic range. +/// A diff hunk resolved to rows in the buffer. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct DiffHunk { - /// E.g. a range in multibuffer, that has an excerpt added, singleton buffer for which has this diff hunk. - /// Consider a singleton buffer with 10 lines, all of them are modified — so a corresponding diff hunk would have a range 0..10. - /// And a multibuffer with the excerpt of lines 2-6 from the singleton buffer. - /// If the multibuffer is searched for diff hunks, the associated range would be multibuffer rows, corresponding to rows 2..6 from the singleton buffer. - /// But the hunk range would be 0..10, same for any other excerpts from the same singleton buffer. - pub associated_range: Range, - /// Singleton buffer ID this hunk belongs to. - pub buffer_id: BufferId, - /// A consequent range of lines in the singleton buffer, that were changed and produced this diff hunk. +pub struct DiffHunk { + /// The buffer range, expressed in terms of rows. + pub row_range: Range, + /// The range in the buffer to which this hunk corresponds. pub buffer_range: Range, - /// Original singleton buffer text before the change, that was instead of the `buffer_range`. + /// The range in the buffer's diff base text to which this hunk corresponds. pub diff_base_byte_range: Range, } -impl sum_tree::Item for DiffHunk { +/// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range. +#[derive(Debug, Clone)] +struct InternalDiffHunk { + buffer_range: Range, + diff_base_byte_range: Range, +} + +impl sum_tree::Item for InternalDiffHunk { type Summary = DiffHunkSummary; fn summary(&self) -> Self::Summary { DiffHunkSummary { - buffer_range: self.associated_range.clone(), + buffer_range: self.buffer_range.clone(), } } } @@ -64,7 +65,7 @@ impl sum_tree::Summary for DiffHunkSummary { #[derive(Debug, Clone)] pub struct BufferDiff { last_buffer_version: Option, - tree: SumTree>, + tree: SumTree, } impl BufferDiff { @@ -79,11 +80,12 @@ impl BufferDiff { self.tree.is_empty() } + #[cfg(any(test, feature = "test-support"))] pub fn hunks_in_row_range<'a>( &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let start = buffer.anchor_before(Point::new(range.start, 0)); let end = buffer.anchor_after(Point::new(range.end, 0)); @@ -94,7 +96,7 @@ impl BufferDiff { &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { @@ -109,11 +111,8 @@ impl BufferDiff { }) .flat_map(move |hunk| { [ - ( - &hunk.associated_range.start, - hunk.diff_base_byte_range.start, - ), - (&hunk.associated_range.end, hunk.diff_base_byte_range.end), + (&hunk.buffer_range.start, hunk.diff_base_byte_range.start), + (&hunk.buffer_range.end, hunk.diff_base_byte_range.end), ] .into_iter() }); @@ -129,10 +128,9 @@ impl BufferDiff { } Some(DiffHunk { - associated_range: start_point.row..end_point.row, + row_range: start_point.row..end_point.row, diff_base_byte_range: start_base..end_base, buffer_range: buffer.anchor_before(start_point)..buffer.anchor_after(end_point), - buffer_id: buffer.remote_id(), }) }) } @@ -141,7 +139,7 @@ impl BufferDiff { &'a self, range: Range, buffer: &'a BufferSnapshot, - ) -> impl 'a + Iterator> { + ) -> impl 'a + Iterator { let mut cursor = self .tree .filter::<_, DiffHunkSummary>(buffer, move |summary| { @@ -154,7 +152,7 @@ impl BufferDiff { cursor.prev(buffer); let hunk = cursor.item()?; - let range = hunk.associated_range.to_point(buffer); + let range = hunk.buffer_range.to_point(buffer); let end_row = if range.end.column > 0 { range.end.row + 1 } else { @@ -162,10 +160,9 @@ impl BufferDiff { }; Some(DiffHunk { - associated_range: range.start.row..end_row, + row_range: range.start.row..end_row, diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, }) }) } @@ -196,7 +193,7 @@ impl BufferDiff { } #[cfg(test)] - fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { + fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator { let start = text.anchor_before(Point::new(0, 0)); let end = text.anchor_after(Point::new(u32::MAX, u32::MAX)); self.hunks_intersecting_range(start..end, text) @@ -229,7 +226,7 @@ impl BufferDiff { hunk_index: usize, buffer: &text::BufferSnapshot, buffer_row_divergence: &mut i64, - ) -> DiffHunk { + ) -> InternalDiffHunk { let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap(); assert!(line_item_count > 0); @@ -284,11 +281,9 @@ impl BufferDiff { let start = Point::new(buffer_row_range.start, 0); let end = Point::new(buffer_row_range.end, 0); let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end); - DiffHunk { - associated_range: buffer_range.clone(), + InternalDiffHunk { buffer_range, diff_base_byte_range, - buffer_id: buffer.remote_id(), } } } @@ -302,17 +297,16 @@ pub fn assert_hunks( diff_base: &str, expected_hunks: &[(Range, &str, &str)], ) where - Iter: Iterator>, + Iter: Iterator, { let actual_hunks = diff_hunks .map(|hunk| { ( - hunk.associated_range.clone(), + hunk.row_range.clone(), &diff_base[hunk.diff_base_byte_range], buffer .text_for_range( - Point::new(hunk.associated_range.start, 0) - ..Point::new(hunk.associated_range.end, 0), + Point::new(hunk.row_range.start, 0)..Point::new(hunk.row_range.end, 0), ) .collect::(), ) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index acb57273e3..5735ee9616 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -21,8 +21,8 @@ use async_watch as watch; pub use clock::ReplicaId; use futures::channel::oneshot; use gpui::{ - AnyElement, AppContext, EventEmitter, HighlightStyle, ModelContext, Pixels, Task, TaskLabel, - WindowContext, + AnyElement, AppContext, Context as _, EventEmitter, HighlightStyle, Model, ModelContext, + Pixels, Task, TaskLabel, WindowContext, }; use lsp::LanguageServerId; use parking_lot::Mutex; @@ -84,11 +84,17 @@ pub enum Capability { pub type BufferRow = u32; +#[derive(Clone)] +enum BufferDiffBase { + Git(Rope), + PastBufferVersion(Model, BufferSnapshot), +} + /// An in-memory representation of a source code file, including its text, /// syntax trees, git status, and diagnostics. pub struct Buffer { text: TextBuffer, - diff_base: Option, + diff_base: Option, git_diff: git::diff::BufferDiff, file: Option>, /// The mtime of the file when this buffer was last loaded from @@ -121,6 +127,7 @@ pub struct Buffer { /// Memoize calls to has_changes_since(saved_version). /// The contents of a cell are (self.version, has_changes) at the time of a last call. has_unsaved_edits: Cell<(clock::Global, bool)>, + _subscriptions: Vec, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -308,7 +315,10 @@ pub enum Operation { pub enum BufferEvent { /// The buffer was changed in a way that must be /// propagated to its other replicas. - Operation(Operation), + Operation { + operation: Operation, + is_local: bool, + }, /// The buffer was edited. Edited, /// The buffer's `dirty` bit changed. @@ -644,7 +654,7 @@ impl Buffer { id: self.remote_id().into(), file: self.file.as_ref().map(|f| f.to_proto(cx)), base_text: self.base_text().to_string(), - diff_base: self.diff_base.as_ref().map(|h| h.to_string()), + diff_base: self.diff_base().as_ref().map(|h| h.to_string()), line_ending: proto::serialize_line_ending(self.line_ending()) as i32, saved_version: proto::serialize_version(&self.saved_version), saved_mtime: self.saved_mtime.map(|time| time.into()), @@ -734,12 +744,10 @@ impl Buffer { was_dirty_before_starting_transaction: None, has_unsaved_edits: Cell::new((buffer.version(), false)), text: buffer, - diff_base: diff_base - .map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - raw_diff_base - }) - .map(Rope::from), + diff_base: diff_base.map(|mut raw_diff_base| { + LineEnding::normalize(&mut raw_diff_base); + BufferDiffBase::Git(Rope::from(raw_diff_base)) + }), diff_base_version: 0, git_diff, file, @@ -759,6 +767,7 @@ impl Buffer { completion_triggers_timestamp: Default::default(), deferred_ops: OperationQueue::new(), has_conflict: false, + _subscriptions: Vec::new(), } } @@ -782,6 +791,52 @@ impl Buffer { } } + pub fn branch(&mut self, cx: &mut ModelContext) -> Model { + let this = cx.handle(); + cx.new_model(|cx| { + let mut branch = Self { + diff_base: Some(BufferDiffBase::PastBufferVersion( + this.clone(), + self.snapshot(), + )), + language: self.language.clone(), + has_conflict: self.has_conflict, + has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()), + _subscriptions: vec![cx.subscribe(&this, |branch: &mut Self, _, event, cx| { + if let BufferEvent::Operation { operation, .. } = event { + branch.apply_ops([operation.clone()], cx); + branch.diff_base_version += 1; + } + })], + ..Self::build( + self.text.branch(), + None, + self.file.clone(), + self.capability(), + ) + }; + if let Some(language_registry) = self.language_registry() { + branch.set_language_registry(language_registry); + } + + branch + }) + } + + pub fn merge(&mut self, branch: &Model, cx: &mut ModelContext) { + let branch = branch.read(cx); + let edits = branch + .edits_since::(&self.version) + .map(|edit| { + ( + edit.old, + branch.text_for_range(edit.new).collect::(), + ) + }) + .collect::>(); + self.edit(edits, None, cx); + } + #[cfg(test)] pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot { &self.text @@ -961,20 +1016,23 @@ impl Buffer { /// Returns the current diff base, see [Buffer::set_diff_base]. pub fn diff_base(&self) -> Option<&Rope> { - self.diff_base.as_ref() + match self.diff_base.as_ref()? { + BufferDiffBase::Git(rope) => Some(rope), + BufferDiffBase::PastBufferVersion(_, buffer_snapshot) => { + Some(buffer_snapshot.as_rope()) + } + } } /// Sets the text that will be used to compute a Git diff /// against the buffer text. pub fn set_diff_base(&mut self, diff_base: Option, cx: &mut ModelContext) { - self.diff_base = diff_base - .map(|mut raw_diff_base| { - LineEnding::normalize(&mut raw_diff_base); - raw_diff_base - }) - .map(Rope::from); + self.diff_base = diff_base.map(|mut raw_diff_base| { + LineEnding::normalize(&mut raw_diff_base); + BufferDiffBase::Git(Rope::from(raw_diff_base)) + }); self.diff_base_version += 1; - if let Some(recalc_task) = self.git_diff_recalc(cx) { + if let Some(recalc_task) = self.recalculate_diff(cx) { cx.spawn(|buffer, mut cx| async move { recalc_task.await; buffer @@ -992,14 +1050,21 @@ impl Buffer { self.diff_base_version } - /// Recomputes the Git diff status. - pub fn git_diff_recalc(&mut self, cx: &mut ModelContext) -> Option> { - let diff_base = self.diff_base.clone()?; + /// Recomputes the diff. + pub fn recalculate_diff(&mut self, cx: &mut ModelContext) -> Option> { + let diff_base_rope = match self.diff_base.as_mut()? { + BufferDiffBase::Git(rope) => rope.clone(), + BufferDiffBase::PastBufferVersion(base_buffer, base_buffer_snapshot) => { + let new_base_snapshot = base_buffer.read(cx).snapshot(); + *base_buffer_snapshot = new_base_snapshot; + base_buffer_snapshot.as_rope().clone() + } + }; let snapshot = self.snapshot(); let mut diff = self.git_diff.clone(); let diff = cx.background_executor().spawn(async move { - diff.update(&diff_base, &snapshot).await; + diff.update(&diff_base_rope, &snapshot).await; diff }); @@ -1169,7 +1234,7 @@ impl Buffer { lamport_timestamp, }; self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx); - self.send_operation(op, cx); + self.send_operation(op, true, cx); } fn request_autoindent(&mut self, cx: &mut ModelContext) { @@ -1743,6 +1808,7 @@ impl Buffer { lamport_timestamp, cursor_shape, }, + true, cx, ); self.non_text_state_update_count += 1; @@ -1889,7 +1955,7 @@ impl Buffer { } self.end_transaction(cx); - self.send_operation(Operation::Buffer(edit_operation), cx); + self.send_operation(Operation::Buffer(edit_operation), true, cx); Some(edit_id) } @@ -1991,6 +2057,9 @@ impl Buffer { } }) .collect::>(); + for operation in buffer_ops.iter() { + self.send_operation(Operation::Buffer(operation.clone()), false, cx); + } self.text.apply_ops(buffer_ops); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); @@ -2114,8 +2183,16 @@ impl Buffer { } } - fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext) { - cx.emit(BufferEvent::Operation(operation)); + fn send_operation( + &mut self, + operation: Operation, + is_local: bool, + cx: &mut ModelContext, + ) { + cx.emit(BufferEvent::Operation { + operation, + is_local, + }); } /// Removes the selections for a given peer. @@ -2130,7 +2207,7 @@ impl Buffer { let old_version = self.version.clone(); if let Some((transaction_id, operation)) = self.text.undo() { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -2147,7 +2224,7 @@ impl Buffer { let was_dirty = self.is_dirty(); let old_version = self.version.clone(); if let Some(operation) = self.text.undo_transaction(transaction_id) { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); true } else { @@ -2167,7 +2244,7 @@ impl Buffer { let operations = self.text.undo_to_transaction(transaction_id); let undone = !operations.is_empty(); for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); } if undone { self.did_edit(&old_version, was_dirty, cx) @@ -2181,7 +2258,7 @@ impl Buffer { let old_version = self.version.clone(); if let Some((transaction_id, operation)) = self.text.redo() { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); self.did_edit(&old_version, was_dirty, cx); Some(transaction_id) } else { @@ -2201,7 +2278,7 @@ impl Buffer { let operations = self.text.redo_to_transaction(transaction_id); let redone = !operations.is_empty(); for operation in operations { - self.send_operation(Operation::Buffer(operation), cx); + self.send_operation(Operation::Buffer(operation), true, cx); } if redone { self.did_edit(&old_version, was_dirty, cx) @@ -2218,6 +2295,7 @@ impl Buffer { triggers, lamport_timestamp: self.completion_triggers_timestamp, }, + true, cx, ); cx.notify(); @@ -2297,7 +2375,7 @@ impl Buffer { let ops = self.text.randomly_undo_redo(rng); if !ops.is_empty() { for op in ops { - self.send_operation(Operation::Buffer(op), cx); + self.send_operation(Operation::Buffer(op), true, cx); self.did_edit(&old_version, was_dirty, cx); } } @@ -3638,12 +3716,12 @@ impl BufferSnapshot { !self.git_diff.is_empty() } - /// Returns all the Git diff hunks intersecting the given - /// row range. + /// Returns all the Git diff hunks intersecting the given row range. + #[cfg(any(test, feature = "test-support"))] pub fn git_diff_hunks_in_row_range( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_in_row_range(range, self) } @@ -3652,7 +3730,7 @@ impl BufferSnapshot { pub fn git_diff_hunks_intersecting_range( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_intersecting_range(range, self) } @@ -3661,7 +3739,7 @@ impl BufferSnapshot { pub fn git_diff_hunks_intersecting_range_rev( &self, range: Range, - ) -> impl '_ + Iterator> { + ) -> impl '_ + Iterator { self.git_diff.hunks_intersecting_range_rev(range, self) } diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 23faa33316..1335a94dd0 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -6,6 +6,7 @@ use crate::Buffer; use clock::ReplicaId; use collections::BTreeMap; use futures::FutureExt as _; +use git::diff::assert_hunks; use gpui::{AppContext, BorrowAppContext, Model}; use gpui::{Context, TestAppContext}; use indoc::indoc; @@ -275,13 +276,19 @@ fn test_edit_events(cx: &mut gpui::AppContext) { |buffer, cx| { let buffer_1_events = buffer_1_events.clone(); cx.subscribe(&buffer1, move |_, _, event, _| match event.clone() { - BufferEvent::Operation(op) => buffer1_ops.lock().push(op), + BufferEvent::Operation { + operation, + is_local: true, + } => buffer1_ops.lock().push(operation), event => buffer_1_events.lock().push(event), }) .detach(); let buffer_2_events = buffer_2_events.clone(); - cx.subscribe(&buffer2, move |_, _, event, _| { - buffer_2_events.lock().push(event.clone()) + cx.subscribe(&buffer2, move |_, _, event, _| match event.clone() { + BufferEvent::Operation { + is_local: false, .. + } => {} + event => buffer_2_events.lock().push(event), }) .detach(); @@ -2370,6 +2377,118 @@ async fn test_find_matching_indent(cx: &mut TestAppContext) { ); } +#[gpui::test] +fn test_branch_and_merge(cx: &mut TestAppContext) { + cx.update(|cx| init_settings(cx, |_| {})); + + let base_buffer = cx.new_model(|cx| Buffer::local("one\ntwo\nthree\n", cx)); + + // Create a remote replica of the base buffer. + let base_buffer_replica = cx.new_model(|cx| { + Buffer::from_proto( + 1, + Capability::ReadWrite, + base_buffer.read(cx).to_proto(cx), + None, + ) + .unwrap() + }); + base_buffer.update(cx, |_buffer, cx| { + cx.subscribe(&base_buffer_replica, |this, _, event, cx| { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + this.apply_ops([operation.clone()], cx); + } + }) + .detach(); + }); + + // Create a branch, which initially has the same state as the base buffer. + let branch_buffer = base_buffer.update(cx, |buffer, cx| buffer.branch(cx)); + branch_buffer.read_with(cx, |buffer, _| { + assert_eq!(buffer.text(), "one\ntwo\nthree\n"); + }); + + // Edits to the branch are not applied to the base. + branch_buffer.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(1, 0)..Point::new(1, 0), "ONE_POINT_FIVE\n")], + None, + cx, + ) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!(base_buffer.read(cx).text(), "one\ntwo\nthree\n"); + assert_eq!(branch_buffer.text(), "one\nONE_POINT_FIVE\ntwo\nthree\n"); + }); + + // Edits to the base are applied to the branch. + base_buffer.update(cx, |buffer, cx| { + buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "ZERO\n")], None, cx) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!(base_buffer.read(cx).text(), "ZERO\none\ntwo\nthree\n"); + assert_eq!( + branch_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nthree\n" + ); + }); + + assert_diff_hunks(&branch_buffer, cx, &[(2..3, "", "ONE_POINT_FIVE\n")]); + + // Edits to any replica of the base are applied to the branch. + base_buffer_replica.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(2, 0)..Point::new(2, 0), "TWO_POINT_FIVE\n")], + None, + cx, + ) + }); + branch_buffer.read_with(cx, |branch_buffer, cx| { + assert_eq!( + base_buffer.read(cx).text(), + "ZERO\none\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + assert_eq!( + branch_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + }); + + // Merging the branch applies all of its changes to the base. + base_buffer.update(cx, |base_buffer, cx| { + base_buffer.merge(&branch_buffer, cx); + assert_eq!( + base_buffer.text(), + "ZERO\none\nONE_POINT_FIVE\ntwo\nTWO_POINT_FIVE\nthree\n" + ); + }); +} + +fn assert_diff_hunks( + buffer: &Model, + cx: &mut TestAppContext, + expected_hunks: &[(Range, &str, &str)], +) { + buffer + .update(cx, |buffer, cx| buffer.recalculate_diff(cx).unwrap()) + .detach(); + cx.executor().run_until_parked(); + + buffer.read_with(cx, |buffer, _| { + let snapshot = buffer.snapshot(); + assert_hunks( + snapshot.git_diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX), + &snapshot, + &buffer.diff_base().unwrap().to_string(), + expected_hunks, + ); + }); +} + #[gpui::test(iterations = 100)] fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { let min_peers = env::var("MIN_PEERS") @@ -2407,10 +2526,15 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { - if let BufferEvent::Operation(op) = event { - network - .lock() - .broadcast(buffer.replica_id(), vec![proto::serialize_operation(op)]); + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { + network.lock().broadcast( + buffer.replica_id(), + vec![proto::serialize_operation(operation)], + ); } }) .detach(); @@ -2533,10 +2657,14 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { new_buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200))); let network = network.clone(); cx.subscribe(&cx.handle(), move |buffer, _, event, _| { - if let BufferEvent::Operation(op) = event { + if let BufferEvent::Operation { + operation, + is_local: true, + } = event + { network.lock().broadcast( buffer.replica_id(), - vec![proto::serialize_operation(op)], + vec![proto::serialize_operation(operation)], ); } }) diff --git a/crates/multi_buffer/Cargo.toml b/crates/multi_buffer/Cargo.toml index acd0c89f8e..444fe3c75c 100644 --- a/crates/multi_buffer/Cargo.toml +++ b/crates/multi_buffer/Cargo.toml @@ -27,7 +27,6 @@ collections.workspace = true ctor.workspace = true env_logger.workspace = true futures.workspace = true -git.workspace = true gpui.workspace = true itertools.workspace = true language.workspace = true diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index f6a61f562a..d406f9bfaf 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5,7 +5,6 @@ use anyhow::{anyhow, Result}; use clock::ReplicaId; use collections::{BTreeMap, Bound, HashMap, HashSet}; use futures::{channel::mpsc, SinkExt}; -use git::diff::DiffHunk; use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext}; use itertools::Itertools; use language::{ @@ -110,6 +109,19 @@ pub enum Event { DiagnosticsUpdated, } +/// A diff hunk, representing a range of consequent lines in a multibuffer. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MultiBufferDiffHunk { + /// The row range in the multibuffer where this diff hunk appears. + pub row_range: Range, + /// The buffer ID that this hunk belongs to. + pub buffer_id: BufferId, + /// The range of the underlying buffer that this hunk corresponds to. + pub buffer_range: Range, + /// The range within the buffer's diff base that this hunk corresponds to. + pub diff_base_byte_range: Range, +} + pub type MultiBufferPoint = Point; #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq, serde::Deserialize)] @@ -1711,7 +1723,7 @@ impl MultiBuffer { } // - language::BufferEvent::Operation(_) => return, + language::BufferEvent::Operation { .. } => return, }); } @@ -3561,7 +3573,7 @@ impl MultiBufferSnapshot { pub fn git_diff_hunks_in_range_rev( &self, row_range: Range, - ) -> impl Iterator> + '_ { + ) -> impl Iterator + '_ { let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.end.0, 0), Bias::Left, &()); @@ -3599,22 +3611,19 @@ impl MultiBufferSnapshot { .git_diff_hunks_intersecting_range_rev(buffer_start..buffer_end) .map(move |hunk| { let start = multibuffer_start.row - + hunk - .associated_range - .start - .saturating_sub(excerpt_start_point.row); + + hunk.row_range.start.saturating_sub(excerpt_start_point.row); let end = multibuffer_start.row + hunk - .associated_range + .row_range .end .min(excerpt_end_point.row + 1) .saturating_sub(excerpt_start_point.row); - DiffHunk { - associated_range: MultiBufferRow(start)..MultiBufferRow(end), + MultiBufferDiffHunk { + row_range: MultiBufferRow(start)..MultiBufferRow(end), diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, + buffer_id: excerpt.buffer_id, } }); @@ -3628,7 +3637,7 @@ impl MultiBufferSnapshot { pub fn git_diff_hunks_in_range( &self, row_range: Range, - ) -> impl Iterator> + '_ { + ) -> impl Iterator + '_ { let mut cursor = self.excerpts.cursor::(&()); cursor.seek(&Point::new(row_range.start.0, 0), Bias::Left, &()); @@ -3673,23 +3682,20 @@ impl MultiBufferSnapshot { MultiBufferRow(0)..MultiBufferRow(1) } else { let start = multibuffer_start.row - + hunk - .associated_range - .start - .saturating_sub(excerpt_rows.start); + + hunk.row_range.start.saturating_sub(excerpt_rows.start); let end = multibuffer_start.row + hunk - .associated_range + .row_range .end .min(excerpt_rows.end + 1) .saturating_sub(excerpt_rows.start); MultiBufferRow(start)..MultiBufferRow(end) }; - DiffHunk { - associated_range: buffer_range, + MultiBufferDiffHunk { + row_range: buffer_range, diff_base_byte_range: hunk.diff_base_byte_range.clone(), buffer_range: hunk.buffer_range.clone(), - buffer_id: hunk.buffer_id, + buffer_id: excerpt.buffer_id, } }); diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 435c143024..bd9c17ecb2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2182,7 +2182,10 @@ impl Project { let buffer_id = buffer.read(cx).remote_id(); match event { - BufferEvent::Operation(operation) => { + BufferEvent::Operation { + operation, + is_local: true, + } => { let operation = language::proto::serialize_operation(operation); if let Some(ssh) = &self.ssh_session { @@ -2267,7 +2270,7 @@ impl Project { .filter_map(|buffer| { let buffer = buffer.upgrade()?; buffer - .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx)) + .update(&mut cx, |buffer, cx| buffer.recalculate_diff(cx)) .ok() .flatten() }) diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 72a38ccba7..d0d67f0cda 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -3288,7 +3288,7 @@ async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) { cx.subscribe(&buffer1, { let events = events.clone(); move |_, _, event, _| match event { - BufferEvent::Operation(_) => {} + BufferEvent::Operation { .. } => {} _ => events.lock().push(event.clone()), } }) diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 54f48e3626..9d5c26d6c7 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -146,12 +146,15 @@ impl HeadlessProject { cx: &mut ModelContext, ) { match event { - BufferEvent::Operation(op) => cx + BufferEvent::Operation { + operation, + is_local: true, + } => cx .background_executor() .spawn(self.session.request(proto::UpdateBuffer { project_id: SSH_PROJECT_ID, buffer_id: buffer.read(cx).remote_id().to_proto(), - operations: vec![serialize_operation(op)], + operations: vec![serialize_operation(operation)], })) .detach(), _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 8d2cd97aac..8bdc9fdb03 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -13,6 +13,7 @@ mod undo_map; pub use anchor::*; use anyhow::{anyhow, Context as _, Result}; pub use clock::ReplicaId; +use clock::LOCAL_BRANCH_REPLICA_ID; use collections::{HashMap, HashSet}; use locator::Locator; use operation_queue::OperationQueue; @@ -715,6 +716,19 @@ impl Buffer { self.snapshot.clone() } + pub fn branch(&self) -> Self { + Self { + snapshot: self.snapshot.clone(), + history: History::new(self.base_text().clone()), + deferred_ops: OperationQueue::new(), + deferred_replicas: HashSet::default(), + lamport_clock: clock::Lamport::new(LOCAL_BRANCH_REPLICA_ID), + subscriptions: Default::default(), + edit_id_resolvers: Default::default(), + wait_for_version_txs: Default::default(), + } + } + pub fn replica_id(&self) -> ReplicaId { self.lamport_clock.replica_id } From 4f227fd3bf19fe7393d278545edfa06343dc5958 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 20 Sep 2024 18:51:34 -0600 Subject: [PATCH 76/96] Use LanguageServerName in more places (#18167) This pushes the new LanguageServerName type to more places. As both languages and language servers were identified by Arc, it was sometimes hard to tell which was intended. Release Notes: - N/A --- .../src/activity_indicator.rs | 7 ++- .../remote_editing_collaboration_tests.rs | 2 +- .../src/wasm_host/wit/since_v0_1_0.rs | 4 +- .../src/wasm_host/wit/since_v0_2_0.rs | 4 +- crates/gpui/src/shared_string.rs | 7 +++ crates/language/src/language.rs | 57 ++++++++++++++++--- crates/language/src/language_settings.rs | 32 ++++++----- crates/language_tools/src/lsp_log.rs | 2 +- crates/languages/src/c.rs | 7 ++- crates/languages/src/go.rs | 7 ++- crates/languages/src/python.rs | 21 ++++--- crates/languages/src/rust.rs | 8 +-- crates/languages/src/tailwind.rs | 9 +-- crates/languages/src/typescript.rs | 13 +++-- crates/languages/src/vtsls.rs | 8 +-- crates/languages/src/yaml.rs | 8 +-- crates/project/src/lsp_store.rs | 24 ++++---- crates/project/src/prettier_store.rs | 4 +- crates/project/src/project_settings.rs | 3 +- .../remote_server/src/remote_editing_tests.rs | 8 +-- 20 files changed, 150 insertions(+), 85 deletions(-) diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 3f567c9e80..a9ae7d075d 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -19,7 +19,10 @@ use workspace::{item::ItemHandle, StatusItemView, Workspace}; actions!(activity_indicator, [ShowErrorMessage]); pub enum Event { - ShowError { lsp_name: Arc, error: String }, + ShowError { + lsp_name: LanguageServerName, + error: String, + }, } pub struct ActivityIndicator { @@ -123,7 +126,7 @@ impl ActivityIndicator { self.statuses.retain(|status| { if let LanguageServerBinaryStatus::Failed { error } = &status.status { cx.emit(Event::ShowError { - lsp_name: status.name.0.clone(), + lsp_name: status.name.clone(), error: error.clone(), }); false diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index c4410fd776..cdcf69cf7e 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -102,7 +102,7 @@ async fn test_sharing_an_ssh_remote_project( all_language_settings(file, cx) .language(Some(&("Rust".into()))) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); diff --git a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs index 50547b6371..3835f58f88 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_1_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_1_0.rs @@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; -use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; +use language::{LanguageName, LanguageServerName}; use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ @@ -366,7 +366,7 @@ impl ExtensionImports for WasmState { .and_then(|key| { ProjectSettings::get(location, cx) .lsp - .get(&Arc::::from(key)) + .get(&LanguageServerName(key.into())) }) .cloned() .unwrap_or_default(); diff --git a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs index 7fa79c2544..eb6e1a09a2 100644 --- a/crates/extension/src/wasm_host/wit/since_v0_2_0.rs +++ b/crates/extension/src/wasm_host/wit/since_v0_2_0.rs @@ -9,10 +9,10 @@ use futures::{io::BufReader, FutureExt as _}; use futures::{lock::Mutex, AsyncReadExt}; use indexed_docs::IndexedDocsDatabase; use isahc::config::{Configurable, RedirectPolicy}; -use language::LanguageName; use language::{ language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate, }; +use language::{LanguageName, LanguageServerName}; use project::project_settings::ProjectSettings; use semantic_version::SemanticVersion; use std::{ @@ -412,7 +412,7 @@ impl ExtensionImports for WasmState { .and_then(|key| { ProjectSettings::get(location, cx) .lsp - .get(&Arc::::from(key)) + .get(&LanguageServerName::from_proto(key)) }) .cloned() .unwrap_or_default(); diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index a4ed36ec21..f5aef6adf8 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -9,6 +9,13 @@ use util::arc_cow::ArcCow; #[derive(Deref, DerefMut, Eq, PartialEq, PartialOrd, Ord, Hash, Clone)] pub struct SharedString(ArcCow<'static, str>); +impl SharedString { + /// creates a static SharedString + pub const fn new_static(s: &'static str) -> Self { + Self(ArcCow::Borrowed(s)) + } +} + impl Default for SharedString { fn default() -> Self { Self(ArcCow::Owned(Arc::default())) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 309a67a1a9..29a7ac1860 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -139,11 +139,52 @@ pub trait ToLspPosition { /// A name of a language server. #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] -pub struct LanguageServerName(pub Arc); +pub struct LanguageServerName(pub SharedString); +impl std::fmt::Display for LanguageServerName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(&self.0, f) + } +} + +impl AsRef for LanguageServerName { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} + +impl AsRef for LanguageServerName { + fn as_ref(&self) -> &OsStr { + self.0.as_ref().as_ref() + } +} + +impl JsonSchema for LanguageServerName { + fn schema_name() -> String { + "LanguageServerName".into() + } + + fn json_schema(_: &mut SchemaGenerator) -> Schema { + SchemaObject { + instance_type: Some(InstanceType::String.into()), + ..Default::default() + } + .into() + } +} impl LanguageServerName { + pub const fn new_static(s: &'static str) -> Self { + Self(SharedString::new_static(s)) + } + pub fn from_proto(s: String) -> Self { - Self(Arc::from(s)) + Self(s.into()) + } +} + +impl<'a> From<&'a str> for LanguageServerName { + fn from(str: &'a str) -> LanguageServerName { + LanguageServerName(str.to_string().into()) } } @@ -202,8 +243,8 @@ impl CachedLspAdapter { }) } - pub fn name(&self) -> Arc { - self.adapter.name().0.clone() + pub fn name(&self) -> LanguageServerName { + self.adapter.name().clone() } pub async fn get_language_server_command( @@ -594,7 +635,7 @@ pub struct LanguageConfig { pub block_comment: Option<(Arc, Arc)>, /// A list of language servers that are allowed to run on subranges of a given language. #[serde(default)] - pub scope_opt_in_language_servers: Vec, + pub scope_opt_in_language_servers: Vec, #[serde(default)] pub overrides: HashMap, /// A list of characters that Zed should treat as word characters for the @@ -658,7 +699,7 @@ pub struct LanguageConfigOverride { #[serde(default)] pub word_characters: Override>, #[serde(default)] - pub opt_into_language_servers: Vec, + pub opt_into_language_servers: Vec, } #[derive(Clone, Deserialize, Debug, Serialize, JsonSchema)] @@ -1479,9 +1520,9 @@ impl LanguageScope { pub fn language_allowed(&self, name: &LanguageServerName) -> bool { let config = &self.language.config; let opt_in_servers = &config.scope_opt_in_language_servers; - if opt_in_servers.iter().any(|o| *o == *name.0) { + if opt_in_servers.iter().any(|o| *o == *name) { if let Some(over) = self.config_override() { - over.opt_into_language_servers.iter().any(|o| *o == *name.0) + over.opt_into_language_servers.iter().any(|o| *o == *name) } else { false } diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 6121cb6a39..82d4208aae 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -99,7 +99,7 @@ pub struct LanguageSettings { /// special tokens: /// - `"!"` - A language server ID prefixed with a `!` will be disabled. /// - `"..."` - A placeholder to refer to the **rest** of the registered language servers for this language. - pub language_servers: Vec>, + pub language_servers: Vec, /// Controls whether inline completions are shown immediately (true) /// or manually by triggering `editor::ShowInlineCompletion` (false). pub show_inline_completions: bool, @@ -137,22 +137,24 @@ impl LanguageSettings { } pub(crate) fn resolve_language_servers( - configured_language_servers: &[Arc], + configured_language_servers: &[String], available_language_servers: &[LanguageServerName], ) -> Vec { - let (disabled_language_servers, enabled_language_servers): (Vec>, Vec>) = - configured_language_servers.iter().partition_map( - |language_server| match language_server.strip_prefix('!') { - Some(disabled) => Either::Left(disabled.into()), - None => Either::Right(language_server.clone()), - }, - ); + let (disabled_language_servers, enabled_language_servers): ( + Vec, + Vec, + ) = configured_language_servers.iter().partition_map( + |language_server| match language_server.strip_prefix('!') { + Some(disabled) => Either::Left(LanguageServerName(disabled.to_string().into())), + None => Either::Right(LanguageServerName(language_server.clone().into())), + }, + ); let rest = available_language_servers .iter() .filter(|&available_language_server| { - !disabled_language_servers.contains(&available_language_server.0) - && !enabled_language_servers.contains(&available_language_server.0) + !disabled_language_servers.contains(&available_language_server) + && !enabled_language_servers.contains(&available_language_server) }) .cloned() .collect::>(); @@ -160,10 +162,10 @@ impl LanguageSettings { enabled_language_servers .into_iter() .flat_map(|language_server| { - if language_server.as_ref() == Self::REST_OF_LANGUAGE_SERVERS { + if language_server.0.as_ref() == Self::REST_OF_LANGUAGE_SERVERS { rest.clone() } else { - vec![LanguageServerName(language_server.clone())] + vec![language_server.clone()] } }) .collect::>() @@ -295,7 +297,7 @@ pub struct LanguageSettingsContent { /// /// Default: ["..."] #[serde(default)] - pub language_servers: Option>>, + pub language_servers: Option>, /// Controls whether inline completions are shown immediately (true) /// or manually by triggering `editor::ShowInlineCompletion` (false). /// @@ -1165,7 +1167,7 @@ mod tests { names .iter() .copied() - .map(|name| LanguageServerName(name.into())) + .map(|name| LanguageServerName(name.to_string().into())) .collect::>() } diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 53def5eb2a..bde5fe9b19 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -236,7 +236,7 @@ impl LogStore { )); this.add_language_server( LanguageServerKind::Global { - name: LanguageServerName(Arc::from("copilot")), + name: LanguageServerName::new_static("copilot"), }, server.server_id(), Some(server.clone()), diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 4ebb4569ef..8a04e0aae6 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -13,13 +13,13 @@ use util::{fs::remove_matching, maybe, ResultExt}; pub struct CLspAdapter; impl CLspAdapter { - const SERVER_NAME: &'static str = "clangd"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("clangd"); } #[async_trait(?Send)] impl super::LspAdapter for CLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -28,7 +28,8 @@ impl super::LspAdapter for CLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index a103c4783c..a1a996c066 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -33,7 +33,7 @@ fn server_binary_arguments() -> Vec { pub struct GoLspAdapter; impl GoLspAdapter { - const SERVER_NAME: &'static str = "gopls"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("gopls"); } static GOPLS_VERSION_REGEX: LazyLock = @@ -46,7 +46,7 @@ static GO_ESCAPE_SUBTEST_NAME_REGEX: LazyLock = LazyLock::new(|| { #[async_trait(?Send)] impl super::LspAdapter for GoLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -71,7 +71,8 @@ impl super::LspAdapter for GoLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &Self::SERVER_NAME, cx) + .and_then(|s| s.binary.clone()) }); match configured_binary { diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index ee127c00cc..0dce8fb661 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -30,7 +30,7 @@ pub struct PythonLspAdapter { } impl PythonLspAdapter { - const SERVER_NAME: &'static str = "pyright"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("pyright"); pub fn new(node: Arc) -> Self { PythonLspAdapter { node } @@ -40,7 +40,7 @@ impl PythonLspAdapter { #[async_trait(?Send)] impl LspAdapter for PythonLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -49,7 +49,7 @@ impl LspAdapter for PythonLspAdapter { ) -> Result> { Ok(Box::new( self.node - .npm_package_latest_version(Self::SERVER_NAME) + .npm_package_latest_version(Self::SERVER_NAME.as_ref()) .await?, ) as Box<_>) } @@ -62,16 +62,23 @@ impl LspAdapter for PythonLspAdapter { ) -> Result { let latest_version = latest_version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); - let package_name = Self::SERVER_NAME; let should_install_language_server = self .node - .should_install_npm_package(package_name, &server_path, &container_dir, &latest_version) + .should_install_npm_package( + Self::SERVER_NAME.as_ref(), + &server_path, + &container_dir, + &latest_version, + ) .await; if should_install_language_server { self.node - .npm_install_packages(&container_dir, &[(package_name, latest_version.as_str())]) + .npm_install_packages( + &container_dir, + &[(Self::SERVER_NAME.as_ref(), latest_version.as_str())], + ) .await?; } @@ -182,7 +189,7 @@ impl LspAdapter for PythonLspAdapter { cx: &mut AsyncAppContext, ) -> Result { cx.update(|cx| { - language_server_settings(adapter.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(adapter.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() }) diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index a32ffe50f5..eebd573a7e 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -25,13 +25,13 @@ use util::{fs::remove_matching, maybe, ResultExt}; pub struct RustLspAdapter; impl RustLspAdapter { - const SERVER_NAME: &'static str = "rust-analyzer"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("rust-analyzer"); } #[async_trait(?Send)] impl LspAdapter for RustLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -41,7 +41,7 @@ impl LspAdapter for RustLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()?; @@ -60,7 +60,7 @@ impl LspAdapter for RustLspAdapter { path_lookup: None, .. }) => { - let path = delegate.which(Self::SERVER_NAME.as_ref()).await; + let path = delegate.which("rust-analyzer".as_ref()).await; let env = delegate.shell_env().await; if let Some(path) = path { diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 9a053dbd87..e3e17a8fa7 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -32,7 +32,8 @@ pub struct TailwindLspAdapter { } impl TailwindLspAdapter { - const SERVER_NAME: &'static str = "tailwindcss-language-server"; + const SERVER_NAME: LanguageServerName = + LanguageServerName::new_static("tailwindcss-language-server"); pub fn new(node: Arc) -> Self { TailwindLspAdapter { node } @@ -42,7 +43,7 @@ impl TailwindLspAdapter { #[async_trait(?Send)] impl LspAdapter for TailwindLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -52,7 +53,7 @@ impl LspAdapter for TailwindLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -152,7 +153,7 @@ impl LspAdapter for TailwindLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let tailwind_user_settings = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index c65b74aa9b..b09216c970 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -71,7 +71,8 @@ pub struct TypeScriptLspAdapter { impl TypeScriptLspAdapter { const OLD_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.js"; const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; - const SERVER_NAME: &'static str = "typescript-language-server"; + const SERVER_NAME: LanguageServerName = + LanguageServerName::new_static("typescript-language-server"); pub fn new(node: Arc) -> Self { TypeScriptLspAdapter { node } } @@ -97,7 +98,7 @@ struct TypeScriptVersions { #[async_trait(?Send)] impl LspAdapter for TypeScriptLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -239,7 +240,7 @@ impl LspAdapter for TypeScriptLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; if let Some(options) = override_options { @@ -304,7 +305,7 @@ impl EsLintLspAdapter { const GITHUB_ASSET_KIND: AssetKind = AssetKind::Zip; const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js"; - const SERVER_NAME: &'static str = "eslint"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("eslint"); const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] = &["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"]; @@ -331,7 +332,7 @@ impl LspAdapter for EsLintLspAdapter { let workspace_root = delegate.worktree_root_path(); let eslint_user_settings = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) .unwrap_or_default() })?; @@ -403,7 +404,7 @@ impl LspAdapter for EsLintLspAdapter { } fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn fetch_latest_server_version( diff --git a/crates/languages/src/vtsls.rs b/crates/languages/src/vtsls.rs index 9499b5c54f..5ec3121384 100644 --- a/crates/languages/src/vtsls.rs +++ b/crates/languages/src/vtsls.rs @@ -48,11 +48,11 @@ struct TypeScriptVersions { server_version: String, } -const SERVER_NAME: &str = "vtsls"; +const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("vtsls"); #[async_trait(?Send)] impl LspAdapter for VtslsLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(SERVER_NAME.into()) + SERVER_NAME.clone() } async fn fetch_latest_server_version( @@ -74,7 +74,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &AsyncAppContext, ) -> Option { let configured_binary = cx.update(|cx| { - language_server_settings(delegate, SERVER_NAME, cx).and_then(|s| s.binary.clone()) + language_server_settings(delegate, &SERVER_NAME, cx).and_then(|s| s.binary.clone()) }); match configured_binary { @@ -267,7 +267,7 @@ impl LspAdapter for VtslsLspAdapter { cx: &mut AsyncAppContext, ) -> Result { let override_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; diff --git a/crates/languages/src/yaml.rs b/crates/languages/src/yaml.rs index 06360847ac..583961f4b1 100644 --- a/crates/languages/src/yaml.rs +++ b/crates/languages/src/yaml.rs @@ -30,7 +30,7 @@ pub struct YamlLspAdapter { } impl YamlLspAdapter { - const SERVER_NAME: &'static str = "yaml-language-server"; + const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("yaml-language-server"); pub fn new(node: Arc) -> Self { YamlLspAdapter { node } } @@ -39,7 +39,7 @@ impl YamlLspAdapter { #[async_trait(?Send)] impl LspAdapter for YamlLspAdapter { fn name(&self) -> LanguageServerName { - LanguageServerName(Self::SERVER_NAME.into()) + Self::SERVER_NAME.clone() } async fn check_if_user_installed( @@ -49,7 +49,7 @@ impl LspAdapter for YamlLspAdapter { ) -> Option { let configured_binary = cx .update(|cx| { - language_server_settings(delegate, Self::SERVER_NAME, cx) + language_server_settings(delegate, &Self::SERVER_NAME, cx) .and_then(|s| s.binary.clone()) }) .ok()??; @@ -145,7 +145,7 @@ impl LspAdapter for YamlLspAdapter { let mut options = serde_json::json!({"[yaml]": {"editor.tabSize": tab_size}}); let project_options = cx.update(|cx| { - language_server_settings(delegate.as_ref(), Self::SERVER_NAME, cx) + language_server_settings(delegate.as_ref(), &Self::SERVER_NAME, cx) .and_then(|s| s.settings.clone()) })?; if let Some(override_options) = project_options { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 92f37f87af..6a3788c879 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -103,7 +103,7 @@ pub struct LocalLspStore { supplementary_language_servers: HashMap)>, prettier_store: Model, - current_lsp_settings: HashMap, LspSettings>, + current_lsp_settings: HashMap, _subscription: gpui::Subscription, } @@ -138,7 +138,7 @@ impl RemoteLspStore {} pub struct SshLspStore { upstream_client: AnyProtoClient, - current_lsp_settings: HashMap, LspSettings>, + current_lsp_settings: HashMap, } #[allow(clippy::large_enum_variant)] @@ -316,8 +316,8 @@ impl LspStore { pub fn swap_current_lsp_settings( &mut self, - new_settings: HashMap, LspSettings>, - ) -> Option, LspSettings>> { + new_settings: HashMap, + ) -> Option> { match &mut self.mode { LspStoreMode::Ssh(SshLspStore { current_lsp_settings, @@ -933,7 +933,7 @@ impl LspStore { if !language_settings(Some(language), file.as_ref(), cx).enable_language_server { language_servers_to_stop.push((worktree_id, started_lsp_name.clone())); } else if let Some(worktree) = worktree { - let server_name = &adapter.name.0; + let server_name = &adapter.name; match ( current_lsp_settings.get(server_name), new_lsp_settings.get(server_name), @@ -4765,7 +4765,7 @@ impl LspStore { let project_id = self.project_id; let worktree_id = worktree.read(cx).id().to_proto(); let upstream_client = ssh.upstream_client.clone(); - let name = adapter.name().to_string(); + let name = adapter.name(); let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); @@ -4783,7 +4783,7 @@ impl LspStore { } }; - let name = adapter.name().to_string(); + let name = adapter.name(); let code_action_kinds = adapter .adapter .code_action_kinds() @@ -4809,7 +4809,7 @@ impl LspStore { .request(proto::CreateLanguageServer { project_id, worktree_id, - name, + name: name.0.to_string(), binary: Some(language_server_command), initialization_options, code_action_kinds, @@ -4892,7 +4892,7 @@ impl LspStore { ); // We need some on the SSH client, and some on SSH host - let lsp = project_settings.lsp.get(&adapter.name.0); + let lsp = project_settings.lsp.get(&adapter.name); let override_options = lsp.and_then(|s| s.initialization_options.clone()); let server_id = pending_server.server_id; @@ -5078,7 +5078,7 @@ impl LspStore { async fn shutdown_language_server( server_state: Option, - name: Arc, + name: LanguageServerName, cx: AsyncAppContext, ) { let server = match server_state { @@ -5123,7 +5123,7 @@ impl LspStore { let key = (worktree_id, adapter_name); if self.mode.is_local() { if let Some(server_id) = self.language_server_ids.remove(&key) { - let name = key.1 .0; + let name = key.1; log::info!("stopping language server {name}"); // Remove other entries for this language server as well @@ -7168,7 +7168,7 @@ impl LspAdapter for SshLspAdapter { } pub fn language_server_settings<'a, 'b: 'a>( delegate: &'a dyn LspAdapterDelegate, - language: &str, + language: &LanguageServerName, cx: &'b AppContext, ) -> Option<&'a LspSettings> { ProjectSettings::get( diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 29101917fb..75d70c1d3f 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -338,7 +338,7 @@ impl PrettierStore { prettier_store .update(cx, |prettier_store, cx| { let name = if is_default { - LanguageServerName(Arc::from("prettier (default)")) + LanguageServerName("prettier (default)".to_string().into()) } else { let worktree_path = worktree_id .and_then(|id| { @@ -366,7 +366,7 @@ impl PrettierStore { } None => format!("prettier ({})", prettier_dir.display()), }; - LanguageServerName(Arc::from(name)) + LanguageServerName(name.into()) }; cx.emit(PrettierStoreEvent::LanguageServerAdded { new_server_id, diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 9a7c80703c..904efe0a6b 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1,6 +1,7 @@ use collections::HashMap; use fs::Fs; use gpui::{AppContext, AsyncAppContext, BorrowAppContext, EventEmitter, Model, ModelContext}; +use language::LanguageServerName; use paths::local_settings_file_relative_path; use rpc::{proto, AnyProtoClient, TypedEnvelope}; use schemars::JsonSchema; @@ -27,7 +28,7 @@ pub struct ProjectSettings { /// name to the lsp value. /// Default: null #[serde(default)] - pub lsp: HashMap, LspSettings>, + pub lsp: HashMap, /// Configuration for Git-related features #[serde(default)] diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index b7fc56d3c6..b5ab1c4007 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -205,7 +205,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo AllLanguageSettings::get_global(cx) .language(Some(&"Rust".into())) .language_servers, - ["custom-rust-analyzer".into()] + ["custom-rust-analyzer".to_string()] ) }); @@ -264,7 +264,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo ) .language(Some(&"Rust".into())) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); @@ -274,7 +274,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo all_language_settings(file, cx) .language(Some(&"Rust".into())) .language_servers, - ["override-rust-analyzer".into()] + ["override-rust-analyzer".to_string()] ) }); } @@ -357,7 +357,7 @@ async fn test_remote_lsp(cx: &mut TestAppContext, server_cx: &mut TestAppContext all_language_settings(file, cx) .language(Some(&"Rust".into())) .language_servers, - ["rust-analyzer".into()] + ["rust-analyzer".to_string()] ) }); From 3ca18af40b8a7cb83d8303a8131e90ca997f09ca Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sat, 21 Sep 2024 21:01:29 +0900 Subject: [PATCH 77/96] docs: Fix typo in `configuring-zed.md` (#18178) Fix typo in `configuring-zed.md` Release Notes: - N/A --- docs/src/configuring-zed.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index de7433bf5d..7cc6a4a8cb 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -313,10 +313,10 @@ List of `string` values "cursor_shape": "block" ``` -3. An underline that runs along the following character: +3. An underscore that runs along the following character: ```json -"cursor_shape": "underline" +"cursor_shape": "underscore" ``` 4. An box drawn around the following character: From 1f35c8d09df9612e23d62a472d49c7021202711d Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 17:47:07 +0900 Subject: [PATCH 78/96] Fix tooltip of `always_treat_brackets_as_autoclosed` (#18191) Fixed a bug where the `always_treat_brackets_as_autoclosed` option would not display the message in the tooltip that appears when hovering. Release Notes: - N/A --- crates/language/src/language_settings.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 82d4208aae..735a9a60f8 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -325,11 +325,11 @@ pub struct LanguageSettingsContent { /// /// Default: true pub use_auto_surround: Option, - // Controls how the editor handles the autoclosed characters. - // When set to `false`(default), skipping over and auto-removing of the closing characters - // happen only for auto-inserted characters. - // Otherwise(when `true`), the closing characters are always skipped over and auto-removed - // no matter how they were inserted. + /// Controls how the editor handles the autoclosed characters. + /// When set to `false`(default), skipping over and auto-removing of the closing characters + /// happen only for auto-inserted characters. + /// Otherwise(when `true`), the closing characters are always skipped over and auto-removed + /// no matter how they were inserted. /// /// Default: false pub always_treat_brackets_as_autoclosed: Option, From e7fcf83ce8d88ca36d2aa7fe8fc017c308aaf138 Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 17:48:52 +0900 Subject: [PATCH 79/96] docs: Fix misordered headings (#18192) 1. Raised the `Indent Guides` heading to level 2, which is completely unrelated to `Git`. 2. the `Git` heading now only contains `Git Gutter` and `Inline Git Blame` as subheadings. 3. The `Indent Guides` heading is now located directly after the `Git` heading. Release Notes: - N/A --- docs/src/configuring-zed.md | 88 ++++++++++++++++++------------------- 1 file changed, 44 insertions(+), 44 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 7cc6a4a8cb..518dbb7f38 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -857,7 +857,50 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files } ``` -### Indent Guides +### Inline Git Blame + +- Description: Whether or not to show git blame information inline, on the currently focused line. +- Setting: `inline_blame` +- Default: + +```json +{ + "git": { + "inline_blame": { + "enabled": true + } + } +} +``` + +**Options** + +1. Disable inline git blame: + +```json +{ + "git": { + "inline_blame": { + "enabled": false + } + } +} +``` + +2. Only show inline git blame after a delay (that starts after cursor stops moving): + +```json +{ + "git": { + "inline_blame": { + "enabled": true, + "delay_ms": 500 + } + } +} +``` + +## Indent Guides - Description: Configuration related to indent guides. Indent guides can be configured separately for each language. - Setting: `indent_guides` @@ -926,49 +969,6 @@ To interpret all `.c` files as C++, files called `MyLockFile` as TOML and files } ``` -### Inline Git Blame - -- Description: Whether or not to show git blame information inline, on the currently focused line. -- Setting: `inline_blame` -- Default: - -```json -{ - "git": { - "inline_blame": { - "enabled": true - } - } -} -``` - -**Options** - -1. Disable inline git blame: - -```json -{ - "git": { - "inline_blame": { - "enabled": false - } - } -} -``` - -2. Only show inline git blame after a delay (that starts after cursor stops moving): - -```json -{ - "git": { - "inline_blame": { - "enabled": true, - "delay_ms": 500 - } - } -} -``` - ## Hard Tabs - Description: Whether to indent lines using tab characters or multiple spaces. From 37c93d8fead2f33ed444c1ee8efd303a2b5a4c8c Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 18:09:35 +0900 Subject: [PATCH 80/96] docs: Add missing `base_keymap` option in `configuring-zed.md` (#18190) Added `base_keymap`, an option that works in the editor but is missing from the documentation. Release Notes: - N/A --- assets/settings/default.json | 8 ++++-- docs/src/configuring-zed.md | 56 ++++++++++++++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 3 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 8424c5733d..e04ab90f21 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -15,9 +15,11 @@ // text editor: // // 1. "VSCode" - // 2. "JetBrains" - // 3. "SublimeText" - // 4. "Atom" + // 2. "Atom" + // 3. "JetBrains" + // 4. "None" + // 5. "SublimeText" + // 6. "TextMate" "base_keymap": "VSCode", // Features that can be globally enabled or disabled "features": { diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 518dbb7f38..5d9a2843ed 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -93,6 +93,62 @@ Extensions that provide language servers may also provide default settings for t `boolean` values +## Base Keymap + +- Description: Base key bindings scheme. Base keymaps can be overridden with user keymaps. +- Setting: `base_keymap` +- Default: `VSCode` + +**Options** + +1. VSCode + +```json +{ + "base_keymap": "VSCode" +} +``` + +2. Atom + +```json +{ + "base_keymap": "Atom" +} +``` + +3. JetBrains + +```json +{ + "base_keymap": "JetBrains" +} +``` + +4. None + +```json +{ + "base_keymap": "None" +} +``` + +5. SublimeText + +```json +{ + "base_keymap": "SublimeText" +} +``` + +6. TextMate + +```json +{ + "base_keymap": "TextMate" +} +``` + ## Buffer Font Family - Description: The name of a font to use for rendering text in the editor. From 0f4ebdfbca721614f3cadafc3b44e4fbf099afda Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Sun, 22 Sep 2024 18:15:13 +0900 Subject: [PATCH 81/96] docs: Add missing `ui_font_size` option in `configuring-zed.md` (#18189) Added `ui_font_size`, an option that works in the editor but is missing from the documentation. Release Notes: - N/A --- docs/src/configuring-zed.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 5d9a2843ed..c0aa4c513a 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -215,7 +215,7 @@ For example, to use `Nerd Font` as a fallback, add the following to your setting **Options** -`integer` values +`integer` values from `6` to `100` pixels (inclusive) ## Buffer Font Weight @@ -2184,6 +2184,16 @@ Float values between `0.0` and `0.9`, where: } ``` +## UI Font Size + +- Description: The default font size for text in the UI. +- Setting: `ui_font_size` +- Default: `16` + +**Options** + +`integer` values from `6` to `100` pixels (inclusive) + ## An example configuration: ```json From 75cb199a54666032e7a62dfb64739283556ae96c Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Mon, 23 Sep 2024 00:50:51 +0800 Subject: [PATCH 82/96] project: Fix typo error cause remove worktree not stop lsp (#18198) Release Notes: - N/A --- crates/project/src/project.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index bd9c17ecb2..78584cbae0 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2006,7 +2006,7 @@ impl Project { cx.emit(Event::LanguageServerAdded(*language_server_id)) } LspStoreEvent::LanguageServerRemoved(language_server_id) => { - cx.emit(Event::LanguageServerAdded(*language_server_id)) + cx.emit(Event::LanguageServerRemoved(*language_server_id)) } LspStoreEvent::LanguageServerLog(server_id, log_type, string) => cx.emit( Event::LanguageServerLog(*server_id, log_type.clone(), string.clone()), From bb7d9d35256825c9b022a3c89c556c1521664c8d Mon Sep 17 00:00:00 2001 From: Junseong Park Date: Mon, 23 Sep 2024 12:26:01 +0900 Subject: [PATCH 83/96] docs: Remove `default_dock_anchor` in `configuring-zed.md` (#18210) Removed the deprecated option `default_dock_anchor` in `configuring-zed.md` Note: https://zed.dev/blog/new-panel-system Release Notes: - N/A --- docs/src/configuring-zed.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index c0aa4c513a..7837044a60 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -381,12 +381,6 @@ List of `string` values "cursor_shape": "hollow" ``` -## Default Dock Anchor - -- Description: The default anchor for new docks. -- Setting: `default_dock_anchor` -- Default: `bottom` - **Options** 1. Position the dock attached to the bottom of the workspace: `bottom` From 05d18321db59539b56520d25f2ee95850ad911fd Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 23 Sep 2024 12:53:57 +0300 Subject: [PATCH 84/96] Resolve completions properly (#18212) Related to https://github.com/rust-lang/rust-analyzer/pull/18167 * Declare more completion item fields in the client completion resolve capabilities * Do resolve completions even if their docs are present * Instead, do not resolve completions that could not be resolved when handling the remote client resolve requests * Do replace the old lsp completion data with the resolved one Release Notes: - Improved completion resolve mechanism --- crates/lsp/src/lsp.rs | 8 ++++++- crates/project/src/lsp_store.rs | 37 ++++++++++++++++++++++++--------- crates/proto/proto/zed.proto | 1 + 3 files changed, 35 insertions(+), 11 deletions(-) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 21671cd0b1..c2a5951de7 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -615,8 +615,14 @@ impl LanguageServer { snippet_support: Some(true), resolve_support: Some(CompletionItemCapabilityResolveSupport { properties: vec![ - "documentation".to_string(), "additionalTextEdits".to_string(), + "command".to_string(), + "detail".to_string(), + "documentation".to_string(), + "filterText".to_string(), + "labelDetails".to_string(), + "tags".to_string(), + "textEdit".to_string(), ], }), insert_replace_support: Some(true), diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 6a3788c879..95ca842360 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1615,10 +1615,6 @@ impl LspStore { let (server_id, completion) = { let completions_guard = completions.read(); let completion = &completions_guard[completion_index]; - if completion.documentation.is_some() { - continue; - } - did_resolve = true; let server_id = completion.server_id; let completion = completion.lsp_completion.clone(); @@ -1643,10 +1639,6 @@ impl LspStore { let (server_id, completion) = { let completions_guard = completions.read(); let completion = &completions_guard[completion_index]; - if completion.documentation.is_some() { - continue; - } - let server_id = completion.server_id; let completion = completion.lsp_completion.clone(); @@ -1743,6 +1735,10 @@ impl LspStore { completion.lsp_completion.insert_text_format = completion_item.insert_text_format; } } + + let mut completions = completions.write(); + let completion = &mut completions[completion_index]; + completion.lsp_completion = completion_item; } #[allow(clippy::too_many_arguments)] @@ -1771,6 +1767,10 @@ impl LspStore { else { return; }; + let Some(lsp_completion) = serde_json::from_slice(&response.lsp_completion).log_err() + else { + return; + }; let documentation = if response.documentation.is_empty() { Documentation::Undocumented @@ -1787,6 +1787,7 @@ impl LspStore { let mut completions = completions.write(); let completion = &mut completions[completion_index]; completion.documentation = Some(documentation); + completion.lsp_completion = lsp_completion; let old_range = response .old_start @@ -4192,17 +4193,32 @@ impl LspStore { let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?; let completion = this - .read_with(&cx, |this, _| { + .read_with(&cx, |this, cx| { let id = LanguageServerId(envelope.payload.language_server_id as usize); let Some(server) = this.language_server_for_id(id) else { return Err(anyhow!("No language server {id}")); }; - Ok(server.request::(lsp_completion)) + Ok(cx.background_executor().spawn(async move { + let can_resolve = server + .capabilities() + .completion_provider + .as_ref() + .and_then(|options| options.resolve_provider) + .unwrap_or(false); + if can_resolve { + server + .request::(lsp_completion) + .await + } else { + anyhow::Ok(lsp_completion) + } + })) })?? .await?; let mut documentation_is_markdown = false; + let lsp_completion = serde_json::to_string(&completion)?.into_bytes(); let documentation = match completion.documentation { Some(lsp::Documentation::String(text)) => text, @@ -4244,6 +4260,7 @@ impl LspStore { old_start, old_end, new_text, + lsp_completion, }) } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index a886b21855..a18bbe8ecf 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -1219,6 +1219,7 @@ message ResolveCompletionDocumentationResponse { Anchor old_start = 3; Anchor old_end = 4; string new_text = 5; + bytes lsp_completion = 6; } message ResolveInlayHint { From 8a36278c9590664e881dda454ccfa7685eb5b761 Mon Sep 17 00:00:00 2001 From: moshyfawn Date: Mon, 23 Sep 2024 08:59:45 -0400 Subject: [PATCH 85/96] docs: Fix long code blocks overflow (#18208) Closes #18207 Release Notes: - N/A | Before | After | |--------|-------| | image | image | --- docs/theme/highlight.css | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/theme/highlight.css b/docs/theme/highlight.css index 9d8f39d903..9bd80f3516 100644 --- a/docs/theme/highlight.css +++ b/docs/theme/highlight.css @@ -12,6 +12,7 @@ .hljs { color: #24292e; background: #ffffff; + overflow-x: auto; } .hljs-doctag, From d784e720274b2a9ced94aa6fcc703f53db132163 Mon Sep 17 00:00:00 2001 From: Charlie Egan Date: Mon, 23 Sep 2024 14:38:54 +0100 Subject: [PATCH 86/96] docs: Add Rego language (#18217) Release Notes: - N/A --------- Signed-off-by: Charlie Egan Co-authored-by: Charlie Egan Co-authored-by: Marshall Bowers --- docs/src/SUMMARY.md | 1 + docs/src/languages.md | 1 + docs/src/languages/rego.md | 38 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 docs/src/languages/rego.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 8bb8035c61..bb0c9d79f5 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -93,6 +93,7 @@ - [PureScript](./languages/purescript.md) - [Python](./languages/python.md) - [R](./languages/r.md) +- [Rego](./languages/rego.md) - [ReStructuredText](./languages/rst.md) - [Racket](./languages/racket.md) - [Roc](./languages/roc.md) diff --git a/docs/src/languages.md b/docs/src/languages.md index 7ec586f1f5..4bc6e7d3d7 100644 --- a/docs/src/languages.md +++ b/docs/src/languages.md @@ -45,6 +45,7 @@ Zed supports hundreds of programming languages and text formats. Some work out-o - [PureScript](./languages/purescript.md) - [Python](./languages/python.md) - [R](./languages/r.md) +- [Rego](./languages/rego.md) - [ReStructuredText](./languages/rst.md) - [Racket](./languages/racket.md) - [Roc](./languages/roc.md) diff --git a/docs/src/languages/rego.md b/docs/src/languages/rego.md new file mode 100644 index 0000000000..3709c6a1fe --- /dev/null +++ b/docs/src/languages/rego.md @@ -0,0 +1,38 @@ +# Rego + +Rego language support in Zed is provided by the community-maintained [Rego extension](https://github.com/StyraInc/zed-rego). + +- Tree Sitter: [FallenAngel97/tree-sitter-rego](https://github.com/FallenAngel97/tree-sitter-rego) +- Language Server: [StyraInc/regal](https://github.com/StyraInc/regal) + +## Installation + +The extensions is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information. + +## Configuration + +The extension's behavior is configured in the `.regal/config.yaml` file. The following is an example configuration which disables the `todo-comment` rule, customizes the `line-length` rule, and ignores test files for the `opa-fmt` rule: + +```yaml +rules: + style: + todo-comment: + # don't report on todo comments + level: ignore + line-length: + # custom rule configuration + max-line-length: 100 + # warn on too long lines, but don't fail + level: warning + opa-fmt: + # not needed as error is the default, but + # being explicit won't hurt + level: error + # files can be ignored for any individual rule + # in this example, test files are ignored + ignore: + files: + - "*_test.rego" +``` + +Read Regal's [configuration documentation](https://docs.styra.com/regal#configuration) for more information. From 2ff8dde925b75d62f030755843cd93c402a41022 Mon Sep 17 00:00:00 2001 From: jvmncs <7891333+jvmncs@users.noreply.github.com> Date: Mon, 23 Sep 2024 10:16:15 -0400 Subject: [PATCH 87/96] Use fenix toolchain in nix shell (#18227) In #17974 we explicitly depend on rustc/cargo for the nix devShell, however the fenix overlay that contains the latest stable versions was not being applied to that shell. This led to the shell inheriting whatever rustc/cargo was on nixos-unstable from nixpkgs, which sometimes lags behind. This change fixes that, and also restructures the flake to ensure that all outputs rely on the overlaid `pkgs`. Release Notes: - N/A --- flake.lock | 18 ++++++++-------- flake.nix | 58 +++++++++++++++++++++++++++------------------------ nix/shell.nix | 3 +-- 3 files changed, 41 insertions(+), 38 deletions(-) diff --git a/flake.lock b/flake.lock index a5b7a7a6ae..5666e73569 100644 --- a/flake.lock +++ b/flake.lock @@ -2,11 +2,11 @@ "nodes": { "crane": { "locked": { - "lastModified": 1725409566, - "narHash": "sha256-PrtLmqhM6UtJP7v7IGyzjBFhbG4eOAHT6LPYOFmYfbk=", + "lastModified": 1727060013, + "narHash": "sha256-/fC5YlJy4IoAW9GhkJiwyzk0K/gQd9Qi4rRcoweyG9E=", "owner": "ipetkov", "repo": "crane", - "rev": "7e4586bad4e3f8f97a9271def747cf58c4b68f3c", + "rev": "6b40cc876c929bfe1e3a24bf538ce3b5622646ba", "type": "github" }, "original": { @@ -23,11 +23,11 @@ "rust-analyzer-src": "rust-analyzer-src" }, "locked": { - "lastModified": 1726813972, - "narHash": "sha256-t6turZgoSAVgj7hn5mxzNlLOeVeZvymFo8+ymB52q34=", + "lastModified": 1727073227, + "narHash": "sha256-1kmkEQmFfGVuPBasqSZrNThqyMDV1SzTalQdRZxtDRs=", "owner": "nix-community", "repo": "fenix", - "rev": "251caeafc75b710282ee7e375800f75f4c8c5727", + "rev": "88cc292eb3c689073c784d6aecc0edbd47e12881", "type": "github" }, "original": { @@ -53,11 +53,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1726642912, - "narHash": "sha256-wiZzKGHRAhItEuoE599Wm3ic+Lg/NykuBvhb+awf7N8=", + "lastModified": 1726937504, + "narHash": "sha256-bvGoiQBvponpZh8ClUcmJ6QnsNKw0EMrCQJARK3bI1c=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "395c52d142ec1df377acd67db6d4a22950b02a98", + "rev": "9357f4f23713673f310988025d9dc261c20e70c6", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 7d1410ac7c..2ee86c4466 100644 --- a/flake.nix +++ b/flake.nix @@ -17,27 +17,34 @@ fenix, ... }: let - forAllSystems = function: - nixpkgs.lib.genAttrs [ - "x86_64-linux" - "aarch64-linux" - ] (system: - function (import nixpkgs { - inherit system; - overlays = [fenix.overlays.default]; - })); + systems = ["x86_64-linux" "aarch64-linux"]; + + overlays = { + fenix = fenix.overlays.default; + rust-toolchain = final: prev: { + rustToolchain = final.fenix.stable.toolchain; + }; + zed-editor = final: prev: { + zed-editor = final.callPackage ./nix/build.nix { + craneLib = (crane.mkLib final).overrideToolchain final.rustToolchain; + rustPlatform = final.makeRustPlatform { + inherit (final.rustToolchain) cargo rustc; + }; + }; + }; + }; + + mkPkgs = system: + import nixpkgs { + inherit system; + overlays = builtins.attrValues overlays; + }; + + forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f (mkPkgs system)); in { - packages = forAllSystems (pkgs: let - craneLib = (crane.mkLib pkgs).overrideToolchain (p: p.fenix.stable.toolchain); - rustPlatform = pkgs.makeRustPlatform { - inherit (pkgs.fenix.stable.toolchain) cargo rustc; - }; - nightlyBuild = pkgs.callPackage ./nix/build.nix { - inherit craneLib rustPlatform; - }; - in { - zed-editor = nightlyBuild; - default = nightlyBuild; + packages = forAllSystems (pkgs: { + zed-editor = pkgs.zed-editor; + default = pkgs.zed-editor; }); devShells = forAllSystems (pkgs: { @@ -46,13 +53,10 @@ formatter = forAllSystems (pkgs: pkgs.alejandra); - overlays.default = final: prev: { - zed-editor = final.callPackage ./nix/build.nix { - craneLib = (crane.mkLib final).overrideToolchain (p: p.fenix.stable.toolchain); - rustPlatform = final.makeRustPlatform { - inherit (final.fenix.stable.toolchain) cargo rustc; - }; + overlays = + overlays + // { + default = nixpkgs.lib.composeManyExtensions (builtins.attrValues overlays); }; - }; }; } diff --git a/nix/shell.nix b/nix/shell.nix index 476374b67e..e0b4018778 100644 --- a/nix/shell.nix +++ b/nix/shell.nix @@ -20,8 +20,7 @@ in wayland xorg.libxcb vulkan-loader - rustc - cargo + rustToolchain ]; in pkgs.mkShell.override {inherit stdenv;} { From 35a80f07e02054b281a946ead549d24499dcfcec Mon Sep 17 00:00:00 2001 From: Nathan Lovato <12694995+NathanLovato@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:01:32 +0200 Subject: [PATCH 88/96] docs: Split vim mode documentation into two pages, edit for clarity (#17614) Closes #17215 Release Notes: - N/A --- This PR builds upon the vim mode documentation page and aims bring the following improvements: - Separate vim mode-specific configuration from introducing vim mode. - Reformat some lists of provided commands and keymaps from code blocks to sub-sections containing tables. - Flesh out the text a little bit to make it more explicit in some parts. - Generally format notes and a couple of other things closer to some other docs pages. Checking the diff doesn't give a good idea of the changes, so here are some before after images for quick examples of the kinds of changes brought by this PR. **Introducing the key differences of Zed's vim mode** Before ![2024-09-09_22-12](https://github.com/user-attachments/assets/447418cb-a6e6-4f9c-8d4b-6d941126979e) After ![2024-09-09_22-16](https://github.com/user-attachments/assets/be69f2d9-c3ae-4b34-978a-344130bee37c) --- **Zed-specific vim key bindings** Before ![2024-09-09_22-17](https://github.com/user-attachments/assets/88fdc512-a50b-487d-85d1-5988f15c2a6f) After ![2024-09-09_22-18](https://github.com/user-attachments/assets/3b77c2f6-0ffa-4afc-a86d-1210ac706c8c) --- docs/src/SUMMARY.md | 2 +- docs/src/vim.md | 609 ++++++++++++++++++++++++++------------------ 2 files changed, 367 insertions(+), 244 deletions(-) diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index bb0c9d79f5..f0e4784f89 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -17,7 +17,7 @@ - [Snippets](./snippets.md) - [Themes](./themes.md) -- [Vim](./vim.md) +- [Vim Mode](./vim.md) # Using Zed diff --git a/docs/src/vim.md b/docs/src/vim.md index 777534813f..8bfa6aa73f 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -1,14 +1,35 @@ # Vim Mode -Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands are available, and how to customize keybindings. +Zed includes a Vim emulation layer known as "vim mode". On this page, you will learn how to turn Zed's vim mode on or off, what tools and commands Zed provides to help you navigate and edit your code, and generally how to make the most of vim mode in Zed. -## Philosophy +You'll learn how to: -Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. +- Understand the core differences between Zed's vim mode and traditional Vim +- Enable or disable vim mode +- Make the most of Zed-specific features within vim mode +- Customize vim mode key bindings +- Configure vim mode settings + +Whether you're new to vim mode or an experienced Vim user looking to optimize your Zed experience, this guide will help you harness the full power of modal editing in Zed. + +## Zed's vim mode design + +Vim mode tries to offer a familiar experience to Vim users: it replicates the behavior of motions and commands precisely when it makes sense and uses Zed-specific functionality to provide an editing experience that "just works" without requiring configuration on your part. + +This includes support for semantic navigation, multiple cursors, or other features usually provided by plugins like surrounding text. So, Zed's vim mode does not replicate Vim one-to-one, but it meshes Vim's modal design with Zed's modern features to provide a more fluid experience. It's also configurable, so you can add your own key bindings or override the defaults. -> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue](https://github.com/zed-industries/zed/issues). +### Core differences + +There are four types of features in vim mode that use Zed's core functionality, leading to some differences in behavior: + +1. **Motions**: vim mode uses Zed's semantic parsing to tune the behavior of motions per language. For example, in Rust, jumping to matching bracket with `%` works with the pipe character `|`. In JavaScript, `w` considers `$` to be a word character. +2. **Visual block selections**: vim mode uses Zed's multiple cursor to emulate visual block selections, making block selections a lot more flexible. For example, anything you insert after a block selection updates on every line in real-time, and you can add or remove cursors anytime. +3. **Macros**: vim mode uses Zed's recording system for vim macros. So, you can capture and replay more complex actions, like autocompletion. +4. **Search and replace**: vim mode uses Zed's search system, so, the syntax for regular expressions is slightly different compared to Vim. [Head to the Regex differences section](#regex-differences) for details. + +> **Note:** The foundations of Zed's vim mode should already cover many use cases, and we're always looking to improve it. If you find missing features that you rely on in your workflow, please [file an issue on GitHub](https://github.com/zed-industries/zed/issues). ## Enabling and disabling vim mode @@ -16,136 +37,351 @@ When you first open Zed, you'll see a checkbox on the welcome screen that allows If you missed this, you can toggle vim mode on or off anytime by opening the command palette and using the workspace command `toggle vim mode`. +> **Note**: This command toggles the following property in your user settings: +> +> ```json +> { +> "vim_mode": true +> } +> ``` + ## Zed-specific features Zed is built on a modern foundation that (among other things) uses tree-sitter and language servers to understand the content of the file you're editing and supports multiple cursors out of the box. Vim mode has several "core Zed" key bindings that will help you make the most of Zed's specific feature set. -``` -# Language server -g d Go to definition -g D Go to declaration -g y Go to type definition -g I Go to implementation +### Language server -c d Rename (change definition) -g A Go to All references to the current word +The following commands use the language server to help you navigate and refactor your code. -g s Find symbol in current file -g S Find symbol in entire project +| Command | Default Shortcut | +| ---------------------------------------- | ---------------- | +| Go to definition | `g d` | +| Go to declaration | `g D` | +| Go to type definition | `g y` | +| Go to implementation | `g I` | +| Rename (change definition) | `c d` | +| Go to All references to the current word | `g A` | +| Find symbol in current file | `g s` | +| Find symbol in entire project | `g S` | +| Go to next diagnostic | `g ]` or `] d` | +| Go to previous diagnostic | `g [` or `[ d` | +| Show inline error (hover) | `g h` | +| Open the code actions menu | `g .` | -g ] Go to next diagnostic -g [ Go to previous diagnostic -] d Go to next diagnostic -[ d Go to previous diagnostic -g h Show inline error (hover) -g . Open the code actions menu +### Git -# Git -] c Go to next git change -[ c Go to previous git change +| Command | Default Shortcut | +| ------------------------- | ---------------- | +| Go to next git change | `] c` | +| Go to previous git change | `[ c` | -# Treesitter -] x Select a smaller syntax node -[ x Select a larger syntax node +### Treesitter -# Multi cursor -g l Add a visual selection for the next copy of the current word -g L The same, but backwards -g > Skip latest word selection, and add next. -g < The same, but backwards -g a Add a visual selection for every copy of the current word +Treesitter is a powerful tool that Zed uses to understand the structure of your code. These commands help you navigate your code semantically. -# Pane management -g / Open a project-wide search -g Open the current search excerpt - Open the current search excerpt in a split - g d Go to definition in a split - g D Go to type definition in a split +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Select a smaller syntax node | `] x` | +| Select a larger syntax node | `[ x` | -# Insert mode -ctrl-x ctrl-o Open the completion menu -ctrl-x ctrl-c Request GitHub Copilot suggestion (if configured) -ctrl-x ctrl-a Open the inline AI assistant (if configured) -ctrl-x ctrl-l Open the code actions menu -ctrl-x ctrl-z Hides all suggestions +### Multi cursor -# Ex commands -:E[xplore] Open the project panel -:C[ollab] Open the collaboration panel -:Ch[at] Open the chat panel -:A[I] Open the AI panel -:No[tif] Open the notifications panel -:fe[edback] Open the feedback window -:cl[ist] Open the diagnostics window -:te[rm] Open the terminal -:Ext[ensions] Open the extensions window +These commands help you manage multiple cursors in Zed. + +| Command | Default Shortcut | +| ------------------------------------------------------------ | ---------------- | +| Add a cursor selecting the next copy of the current word | `g l` | +| Add a cursor selecting the previous copy of the current word | `g L` | +| Skip latest word selection, and add next | `g >` | +| Skip latest word selection, and add previous | `g <` | +| Add a visual selection for every copy of the current word | `g a` | + +### Pane management + +These commands open new panes or jump to specific panes. + +| Command | Default Shortcut | +| ------------------------------------------ | ------------------ | +| Open a project-wide search | `g /` | +| Open the current search excerpt | `g ` | +| Open the current search excerpt in a split | ` ` | +| Go to definition in a split | ` g d` | +| Go to type definition in a split | ` g D` | + +### In insert mode + +The following commands help you bring up Zed's completion menu, request a suggestion from GitHub Copilot, or open the inline AI assistant without leaving insert mode. + +| Command | Default Shortcut | +| ---------------------------------------------------------------------------- | ---------------- | +| Open the completion menu | `ctrl-x ctrl-o` | +| Request GitHub Copilot suggestion (requires GitHub Copilot to be configured) | `ctrl-x ctrl-c` | +| Open the inline AI assistant (requires a configured assistant) | `ctrl-x ctrl-a` | +| Open the code actions menu | `ctrl-x ctrl-l` | +| Hides all suggestions | `ctrl-x ctrl-z` | + +### Supported plugins + +Zed's vim mode includes some features that are usually provided by very popular plugins in the Vim ecosystem: + +- You can surround text objects with `ys` (yank surround), change surrounding with `cs`, and delete surrounding with `ds`. +- You can comment and uncomment selections with `gc` in visual mode and `gcc` in normal mode. +- The project panel supports many shortcuts modeled after the Vim plugin `netrw`: navigation with `hjkl`, open file with `o`, open file in a new tab with `t`, etc. +- You can add key bindings to your keymap to navigate "camelCase" names. [Head down to the Optional key bindings](#optional-key-bindings) section to learn how. + +## Command palette + +Vim mode allows you to open Zed's command palette with `:`. You can then type to access any usual Zed command. Additionally, vim mode adds aliases for popular Vim commands to ensure your muscle memory transfers to Zed. For example, you can write `:w` or `:write` to save the file. + +Below, you'll find tables listing the commands you can use in the command palette. We put optional characters in square brackets to indicate that you can omit them. + +> **Note**: We don't emulate the full power of Vim's command line yet. In particular, commands currently do not support arguments. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. + +### File and window management + +This table shows commands for managing windows, tabs, and panes. As commands don't support arguments currently, you cannot specify a filename when saving or creating a new file. + +| Command | Description | +| -------------- | ---------------------------------------------------- | +| `:w[rite][!]` | Save the current file | +| `:wq[!]` | Save the file and close the buffer | +| `:q[uit][!]` | Close the buffer | +| `:wa[ll][!]` | Save all open files | +| `:wqa[ll][!]` | Save all open files and close all buffers | +| `:qa[ll][!]` | Close all buffers | +| `:[e]x[it][!]` | Close the buffer | +| `:up[date]` | Save the current file | +| `:cq` | Quit completely (close all running instances of Zed) | +| `:vs[plit]` | Split the pane vertically | +| `:sp[lit]` | Split the pane horizontally | +| `:new` | Create a new file in a horizontal split | +| `:vne[w]` | Create a new file in a vertical split | +| `:tabedit` | Create a new file in a new tab | +| `:tabnew` | Create a new file in a new tab | +| `:tabn[ext]` | Go to the next tab | +| `:tabp[rev]` | Go to previous tab | +| `:tabc[lose]` | Close the current tab | + +> **Note:** The `!` character is used to force the command to execute without saving changes or prompting before overwriting a file. + +### Ex commands + +These ex commands open Zed's various panels and windows. + +| Command | Default Shortcut | +| ---------------------------- | ---------------- | +| Open the project panel | `:E[xplore]` | +| Open the collaboration panel | `:C[ollab]` | +| Open the chat panel | `:Ch[at]` | +| Open the AI panel | `:A[I]` | +| Open the notifications panel | `:No[tif]` | +| Open the feedback window | `:fe[edback]` | +| Open the diagnostics window | `:cl[ist]` | +| Open the terminal | `:te[rm]` | +| Open the extensions window | `:Ext[ensions]` | + +### Navigating diagnostics + +These commands navigate diagnostics. + +| Command | Description | +| ------------------------ | ------------------------------ | +| `:cn[ext]` or `:ln[ext]` | Go to the next diagnostic | +| `:cp[rev]` or `:lp[rev]` | Go to the previous diagnostics | +| `:cc` or `:ll` | Open the errors page | + +### Git + +These commands interact with the version control system git. + +| Command | Description | +| --------------- | ------------------------------------------------------- | +| `:dif[fupdate]` | View the diff under the cursor (`d o` in normal mode) | +| `:rev[ert]` | Revert the diff under the cursor (`d p` in normal mode) | + +### Jump + +These commands jump to specific positions in the file. + +| Command | Description | +| ------------------- | ----------------------------------- | +| `:` | Jump to a line number | +| `:$` | Jump to the end of the file | +| `:/foo` and `:?foo` | Jump to next/prev line matching foo | + +### Replacement + +This command replaces text. It emulates the substitute command in vim. The substitute command uses regular expressions, and Zed uses a slightly different syntax than vim. You can learn more about Zed's syntax below, [in the regex differences section](#regex-differences). Also, by default, Zed always replaces all occurrences of the search pattern in the current line. + +| Command | Description | +| -------------------- | --------------------------------- | +| `:[range]s/foo/bar/` | Replace instances of foo with bar | + +### Editing + +These commands help you edit text. + +| Command | Description | +| ----------------- | ------------------------------------------------------- | +| `:j[oin]` | Join the current line | +| `:d[elete][l][p]` | Delete the current line | +| `:s[ort] [i]` | Sort the current selection (with i, case-insensitively) | +| `:y[ank]` | Yank (copy) the current selection or line | + +### Command mnemonics + +As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: + +- `:diffs` for "toggle all hunk diffs" +- `:cpp` for "copy path to file" +- `:crp` for "copy relative path" +- `:reveal` for "reveal in finder" +- `:zlog` for "open zed log" +- `:clank` for "cancel language server work" + +## Customizing key bindings + +In this section, we'll learn how to customize the key bindings of Zed's vim mode. You'll learn: + +- How to select the correct context for your new key bindings. +- Useful contexts for vim mode key bindings. +- Common key bindings to customize for extra productivity. + +### Selecting the correct context + +Zed's key bindings are evaluated only when the `"context"` property matches your location in the editor. For example, if you add key bindings to the `"Editor"` context, they will only work when you're editing a file. If you add key bindings to the `"Workspace"` context, they will work everywhere in Zed. Here's an example of a key binding that saves when you're editing a file: + +```json +{ + "context": "Editor", + "bindings": { + "ctrl-s": "file::Save" + } +} ``` -Vim mode uses Zed to define concepts like "brackets" (for the `%` key) and "words" (for motions like `w` and `e`). This does lead to some differences, but they are mostly positive. For example `%` considers `|` to be a bracket in languages like Rust; and `w` considers `$` to be a word-character in languages like JavaScript. +Contexts are nested, so when you're editing a file, the context is the `"Editor"` context, which is inside the `"Pane"` context, which is inside the `"Workspace"` context. That's why any key bindings you add to the `"Workspace"` context will work when you're editing a file. Here's an example: -Vim mode emulates visual block mode using Zed's multiple cursor support. This again leads to some differences, but is much more powerful. +```json +// This key binding will work when you're editing a file. It comes built into Zed by default as the workspace: save command. +{ + "context": "Workspace", + "bindings": { + "ctrl-s": "file::Save" + } +} +``` -Vim's macro support (`q` and `@`) is implemented using Zed's actions. This lets us support recording and replaying of autocompleted code, etc. Unlike Vim, Zed does not re-use the yank registers for recording macros, they are two separate namespaces. +Contexts are expressions. They support boolean operators like `&&` (and) and `||` (or). For example, you can use the context `"Editor && vim_mode == normal"` to create key bindings that only work when you're editing a file _and_ you're in vim's normal mode. -Finally, vim mode's search and replace functionality is backed by Zed's. This means that the pattern syntax is slightly different, see the section on [Regex differences](#regex-differences) for details. +Vim mode adds several contexts to the `"Editor"` context: -## Custom key bindings +| Operator | Description | +| -------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| VimControl | Indicates that vim keybindings should work. Currently an alias for `vim_mode == normal \|\| vim_mode == visual \|\| vim_mode == operator`, but the definition may change over time | +| vim_mode == normal | Normal mode | +| vim_mode == visual | Visual mode | +| vim_mode == insert | Insert mode | +| vim_mode == replace | Replace mode | +| vim_mode == waiting | Waiting for an arbitrary key (e.g., after typing `f` or `t`) | +| vim_mode == operator | Waiting for another binding to trigger (e.g., after typing `c` or `d`) | +| vim_operator | Set to `none` unless `vim_mode == operator`, in which case it is set to the current operator's default keybinding (e.g., after typing `d`, `vim_operator == d`) | -You can edit your personal key bindings with `:keymap`. -For vim-specific shortcuts, you may find the following template a good place to start. +> **Note**: Contexts are matched only on one level at a time. So it is possible to use the expression `"Editor && vim_mode == normal"`, but `"Workspace && vim_mode == normal"` will never match because we set the vim context at the `"Editor"` level. + +### Useful contexts for vim mode key bindings + +Here's a template with useful vim mode contexts to help you customize your vim mode key bindings. You can copy it and integrate it into your user keymap. ```json [ { "context": "VimControl && !menu", "bindings": { - // put key-bindings here if you want them to work in normal & visual mode + // Put key bindings here if you want them to work in normal & visual mode. } }, { "context": "vim_mode == normal && !menu", "bindings": { - // "shift-y": ["workspace::SendKeystrokes", "y $"] // use nvim's Y behavior + // "shift-y": ["workspace::SendKeystrokes", "y $"] // Use neovim's yank behavior: yank to end of line. } }, { "context": "vim_mode == insert", "bindings": { - // "j k": "vim::NormalBefore" // remap jk in insert mode to escape. + // "j k": "vim::NormalBefore" // In insert mode, make jk escape to normal mode. } }, { "context": "EmptyPane || SharedScreen", "bindings": { - // put key-bindings here (in addition to above) if you want them to - // work when no editor exists + // Put key bindings here (in addition to the context above) if you want them to + // work when no editor exists. // "space f": "file_finder::Toggle" } } ] ``` -If you would like to emulate vim's `map` (`nmap` etc.) commands you can bind to the [`workspace::SendKeystrokes`](./key-bindings.md#remapping-keys) action in the correct context. +> **Note**: If you would like to emulate Vim's `map` commands (`nmap`, etc.), you can use the action `workspace::SendKeystrokes` in the correct context. -Check out the [bindings that are enabled by default in vim mode](https://github.com/zed-industries/zed/blob/main/assets/keymaps/vim.json). +### Optional key bindings -### Contexts +By default, you can navigate between the different files open in the editor with shortcuts like `ctrl+w` followed by one of `hjkl` to move to the left, down, up, or right, respectively. -Zed's keyboard bindings are evaluated only when the `"context"` matches the location you are in on the screen. Locations are nested, so when you're editing, you're in the `"Workspace"` location, which is at the top, containing a `"Pane"` that contains an `"Editor"`. +But you cannot use the same shortcuts to move between all the editor docks (the terminal, project panel, assistant panel, ...). If you want to use the same shortcuts to navigate to the docks, you can add the following key bindings to your user keymap. -Contexts are matched only on one level at a time. So, it is possible to combine `Editor && vim_mode == normal`, but `Workspace && vim_mode == normal` will never match because we set the vim context at the `Editor` level. +```json +{ + "context": "Dock", + "bindings": { + "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], + "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], + "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], + "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] + // ... or other keybindings + } +} +``` -Vim mode adds several contexts to the `Editor`: +Subword motion, which allows you to navigate and select individual words in camelCase or snake_case, is not enabled by default. To enable it, add these bindings to your keymap. -- `vim_mode` is similar to, but not identical to, the current mode. It starts as one of `normal`, `visual`, `insert` or `replace` (depending on your mode). If you are mid-way through typing a sequence, `vim_mode` will be either `waiting` if it's waiting for an arbitrary key (for example after typing `f` or `t`), or `operator` if it's waiting for another binding to trigger (for example after typing `c` or `d`). -- `vim_operator` is set to `none` unless `vim_mode == operator` in which case it is set to the current operator's default keybinding (for example after typing `d`, `vim_operator == d`). -- `"VimControl"` indicates that vim keybindings should work. It is currently an alias for `vim_mode == normal || vim_mode == visual || vim_mode == operator`, but the definition may change over time. +```json +[ + { + "context": "VimControl && !menu && vim_mode != operator", + "bindings": { + "w": "vim::NextSubwordStart", + "b": "vim::PreviousSubwordStart", + "e": "vim::NextSubwordEnd", + "g e": "vim::PreviousSubwordEnd" + } + } +] +``` + +Vim mode comes with shortcuts to surround the selection in normal mode (`ys`), but it doesn't have a shortcut to add surrounds in visual mode. By default, `shift-s` substitutes the selection (erases the text and enters insert mode). To use `shift-s` to add surrounds in visual mode, you can add the following object to your keymap. + +```json +{ + "context": "vim_mode == visual", + "bindings": { + "shift-s": [ + "vim::PushOperator", + { + "AddSurrounds": {} + } + ] + } +} +``` ### Restoring common text editing keybindings -If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: Ctrl+v to copy, Ctrl+f to search, etc. You can restore them by copying this data into your keymap: +If you're using vim mode on Linux or Windows, you may find it overrides keybindings you can't live without: `ctrl+v` to copy, `ctrl+f` to search, etc. You can restore them by copying this data into your keymap: ```json { @@ -162,109 +398,23 @@ If you're using vim mode on Linux or Windows, you may find it overrides keybindi }, ``` -## Command palette +## Changing vim mode settings -Vim mode allows you to enable Zed’s command palette with `:`. This means that you can use vim's command palette to run any action that Zed supports. +You can change the following settings to modify vim mode's behavior: -Additionally, vim mode contains a number of aliases for popular Vim commands to ensure that muscle memory works. For example, `:w` will save the file. +| Property | Description | Default Value | +| ---------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| use_system_clipboard | Determines how system clipboard is used:
  • "always": use for all operations
  • "never": only use when explicitly specified
  • "on_yank": use for yank operations
| "always" | +| use_multiline_find | If `true`, `f` and `t` motions extend across multiple lines. | false | +| use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false | +| toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false | +| custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} | -We do not (yet) emulate the full power of Vim’s command line, in particular, we do not support arguments to commands yet. Please [file issues on GitHub](https://github.com/zed-industries/zed) as you find things that are missing from the command palette. - -As mentioned above, one thing to be aware of is that the regex engine is slightly different from vim's in `:%s/a/b`. - -Currently supported Vim-specific commands: - -``` -# window management -:w[rite][!], :wq[!], :q[uit][!], :wa[ll][!], :wqa[ll][!], :qa[ll][!], :[e]x[it][!], :up[date] - to save/close tab(s) and pane(s) (no filename is supported yet) -:cq - to quit completely. -:vs[plit], :sp[lit] - to split vertically/horizontally (no filename is supported yet) -:new, :vne[w] - to create a new file in a new pane above or to the left -:tabedit, :tabnew - to create a new file in a new tab. -:tabn[ext], :tabp[rev] - to go to previous/next tabs -:tabc[lose] - to close the current tab - -# navigating diagnostics -:cn[ext], :cp[rev], :ln[ext], :lp[rev] - to go to the next/prev diagnostics -:cc, :ll - to open the errors page - -# handling git diff -:dif[fupdate] - to view the diff under the cursor ("d o" in normal mode) -:rev[ert] - to revert the diff under the cursor ("d p" in normal mode) - -# jump to position -: - to jump to a line number -:$ - to jump to the end of the file -:/foo and :?foo - to jump to next/prev line matching foo - -# replacement (/g is always assumed and Zed uses different regex syntax to vim) -:[range]s/foo/bar/ - to replace instances of foo with bar - -# editing -:j[oin] - to join the current line (no range is yet supported) -:d[elete][l][p] - to delete the current line (no range is yet supported) -:s[ort] [i] - to sort the current selection (with i, case-insensitively) -:y[ank] -``` - -As any Zed command is available, you may find that it's helpful to remember mnemonics that run the correct command. For example: - -``` -:diffs Toggle all Hunk [Diffs] -:cpp [C]o[p]y [P]ath to file -:crp [C]opy [r]elative [P]ath -:reveal [Reveal] in finder -:zlog Open [Z]ed Log -:clank [C]ancel [lan]guage server work[k] -``` - -## Settings - -Vim mode is not enabled by default. To enable vim mode, you need to add the following configuration to your settings file: - -```json -{ - "vim_mode": true -} -``` - -Alternatively, you can enable vim mode by running the `toggle vim mode` command from the command palette. - -Some vim settings are available to modify the default vim behavior: +Here's an example of adding a digraph for the zombie emoji. This allows you to type `ctrl-k f z` to insert a zombie emoji. You can add as many digraphs as you like. ```json { "vim": { - // "always": use system clipboard when no register is specified - // "never": don't use system clipboard unless "+ or "* is specified - // "on_yank": use system clipboard for yank operations when no register is specified - "use_system_clipboard": "always", - // Let `f` and `t` motions extend across multiple lines - "use_multiline_find": true, - // Let `f` and `t` motions match case insensitively if the target is lowercase - "use_smartcase_find": true, - // Use relative line numbers in normal mode, absolute in insert mode - // c.f. https://github.com/jeffkreeftmeijer/vim-numbertoggle - "toggle_relative_line_numbers": true, - // Add custom digraphs (e.g. ctrl-k f z will insert a zombie emoji) "custom_digraphs": { "fz": "🧟‍♀️" } @@ -272,22 +422,52 @@ Some vim settings are available to modify the default vim behavior: } ``` -There are also a few Zed settings that you may also enjoy if you use vim mode: +Here's an example of these settings changed: ```json { - // disable cursor blink + "vim": { + "use_system_clipboard": "never", + "use_multiline_find": true, + "use_smartcase_find": true, + "toggle_relative_line_numbers": true, + "custom_digraphs": { + "fz": "🧟‍♀️" + } + } +} +``` + +## Useful core Zed settings for vim mode + +Here are a few general Zed settings that can help you fine-tune your Vim experience: + +| Property | Description | Default Value | +| ----------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------- | +| cursor_blink | If `true`, the cursor blinks. | `true` | +| relative_line_numbers | If `true`, line numbers in the left gutter are relative to the cursor. | `true` | +| scrollbar | Object that controls the scrollbar display. Set to `{ "show": "never" }` to hide the scroll bar. | `{ "show": "always" }` | +| scroll_beyond_last_line | If set to `"one_page"`, allows scrolling up to one page beyond the last line. Set to `"off"` to prevent this behavior. | `"one_page"` | +| vertical_scroll_margin | The number of lines to keep above or below the cursor when scrolling. Set to `0` to allow the cursor to go up to the edges of the screen vertically. | `3` | +| gutter.line_numbers | Controls the display of line numbers in the gutter. Set the `"line_numbers"` property to `false` to hide line numbers. | `true` | +| command_aliases | Object that defines aliases for commands in the command palette. You can use it to define shortcut names for commands you use often. Read below for examples. | `{}` | + +Here's an example of these settings changed: + +```json +{ + // Disable cursor blink "cursor_blink": false, - // use relative line numbers + // Use relative line numbers "relative_line_numbers": true, - // hide the scroll bar + // Hide the scroll bar "scrollbar": { "show": "never" }, - // prevent the buffer from scrolling beyond the last line + // Prevent the buffer from scrolling beyond the last line "scroll_beyond_last_line": "off", - // allow cursor to reach edges of screen + // Allow the cursor to reach the edges of the screen "vertical_scroll_margin": 0, "gutter": { - // disable line numbers completely: + // Disable line numbers completely: "line_numbers": false }, "command_aliases": { @@ -298,74 +478,17 @@ There are also a few Zed settings that you may also enjoy if you use vim mode: } ``` -If you want to navigate between the editor and docks (terminal, project panel, AI assistant panel, etc...), just like you navigate between splits, you can use the following key bindings: - -```json -{ - "context": "Dock", - "bindings": { - "ctrl-w h": ["workspace::ActivatePaneInDirection", "Left"], - "ctrl-w l": ["workspace::ActivatePaneInDirection", "Right"], - "ctrl-w k": ["workspace::ActivatePaneInDirection", "Up"], - "ctrl-w j": ["workspace::ActivatePaneInDirection", "Down"] - // ... or other keybindings - } -} -``` - -Subword motion is not enabled by default. To enable it, add these bindings to your keymap. - -```json -[ - { - "context": "VimControl && !menu && vim_mode != operator", - "bindings": { - "w": "vim::NextSubwordStart", - "b": "vim::PreviousSubwordStart", - "e": "vim::NextSubwordEnd", - "g e": "vim::PreviousSubwordEnd" - } - } -] -``` - -Surrounding the selection in visual mode is also not enabled by default (`shift-s` normally behaves like `c`). To enable it, add the following to your keymap. - -```json -{ - "context": "vim_mode == visual", - "bindings": { - "shift-s": [ - "vim::PushOperator", - { - "AddSurrounds": {} - } - ] - } -} -``` - -## Supported plugins - -Zed has nascent support for some Vim plugins: - -- From `vim-surround`, `ys`, `cs` and `ds` work. Though you cannot add new HTML tags yet. -- From `vim-commentary`, `gc` in visual mode and `gcc` in normal mode. Though you cannot operate on arbitrary objects yet. -- From `netrw`, most keybindings are supported in the project panel. -- From `vim-spider`/`CamelCaseMotion` you can use subword motions as described above. +The `command_aliases` property is a single object that maps keys or key sequences to vim mode commands. The example above defines multiple aliases: `W` for `w`, `Wq` for `wq`, and `Q` for `q`. ## Regex differences -Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax for some things. +Zed uses a different regular expression engine from Vim. This means that you will have to use a different syntax in some cases. Here are the most common differences: -Notably: +- **Capture groups**: Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. On the flip side, in Vim, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. +- **Matches**: When replacing, Vim uses the backslash character followed by a number to represent a matched capture group. For example, `\1`. Zed uses the dollar sign instead. So, when in Vim you use `\0` to represent the entire match, in Zed the syntax is `$0` instead. Same for numbered capture groups: `\1` in Vim is `$1` in Zed. +- **Global option**: By default, in Vim, regex searches only match the first occurrence on a line, and you append `/g` at the end of your query to find all matches. In Zed, regex searches are global by default. +- **Case sensitivity**: Vim uses `/i` to indicate a case-insensitive search. In Zed you can either write `(?i)` at the start of the pattern or toggle case-sensitivity with the shortcut {#kb search::ToggleCaseSensitive}. -- Vim uses `\(` and `\)` to represent capture groups, in Zed these are `(` and `)`. -- On the flip side, `(` and `)` represent literal parentheses, but in Zed these must be escaped to `\(` and `\)`. -- When replacing, Vim uses `\0` to represent the entire match, in Zed this is `$0`, same for numbered capture groups `\1` -> `$1`. -- Vim uses `/g` to indicate "all matches on one line", in Zed this is implied -- Vim uses `/i` to indicate "case-insensitive", in Zed you can either use `(?i)` at the start of the pattern or toggle case-sensitivity with `cmd-option-c`. - -To help with the transition, the command palette will fix parentheses and replace groups for you when you run `:%s//`. So `%s:/\(a\)(b)/\1/` will be converted into a search for "(a)\(b\)" and a replacement of "$1". +> **Note**: To help with the transition, the command palette will fix parentheses and replace groups for you when you write a Vim-style substitute command, `:%s//`. So, Zed will convert `%s:/\(a\)(b)/\1/` into a search for "(a)\(b\)" and a replacement of "$1". For the full syntax supported by Zed's regex engine [see the regex crate documentation](https://docs.rs/regex/latest/regex/#syntax). From a36706aed6e7f582f731a4f33ef3b056dac25f36 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Sep 2024 09:11:58 -0600 Subject: [PATCH 89/96] Fix up/down project_id confusion (#18099) Release Notes: - ssh remoting: Fix LSP queries run over collab --- crates/project/src/lsp_store.rs | 137 +++++++++++-------- crates/project/src/project.rs | 19 +-- crates/project/src/worktree_store.rs | 103 +++++++++----- crates/remote_server/src/headless_project.rs | 2 +- 4 files changed, 161 insertions(+), 100 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 95ca842360..4506fcc6fe 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -50,7 +50,7 @@ use parking_lot::{Mutex, RwLock}; use postage::watch; use rand::prelude::*; -use rpc::AnyProtoClient; +use rpc::{proto::SSH_PROJECT_ID, AnyProtoClient}; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; use sha2::{Digest, Sha256}; @@ -132,6 +132,7 @@ impl LocalLspStore { pub struct RemoteLspStore { upstream_client: AnyProtoClient, + upstream_project_id: u64, } impl RemoteLspStore {} @@ -164,8 +165,7 @@ impl LspStoreMode { pub struct LspStore { mode: LspStoreMode, - downstream_client: Option, - project_id: u64, + downstream_client: Option<(AnyProtoClient, u64)>, nonce: u128, buffer_store: Model, worktree_store: Model, @@ -302,14 +302,16 @@ impl LspStore { } } - pub fn upstream_client(&self) -> Option { + pub fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { match &self.mode { LspStoreMode::Ssh(SshLspStore { upstream_client, .. - }) - | LspStoreMode::Remote(RemoteLspStore { - upstream_client, .. - }) => Some(upstream_client.clone()), + }) => Some((upstream_client.clone(), SSH_PROJECT_ID)), + LspStoreMode::Remote(RemoteLspStore { + upstream_client, + upstream_project_id, + .. + }) => Some((upstream_client.clone(), *upstream_project_id)), LspStoreMode::Local(_) => None, } } @@ -374,7 +376,6 @@ impl LspStore { }), }), downstream_client: None, - project_id: 0, buffer_store, worktree_store, languages: languages.clone(), @@ -395,10 +396,11 @@ impl LspStore { &self, buffer: Model, client: AnyProtoClient, + upstream_project_id: u64, request: R, cx: &mut ModelContext<'_, LspStore>, ) -> Task::Response>> { - let message = request.to_proto(self.project_id, buffer.read(cx)); + let message = request.to_proto(upstream_project_id, buffer.read(cx)); cx.spawn(move |this, cx| async move { let response = client.request(message).await?; let this = this.upgrade().context("project dropped")?; @@ -413,7 +415,6 @@ impl LspStore { worktree_store: Model, languages: Arc, upstream_client: AnyProtoClient, - project_id: u64, cx: &mut ModelContext, ) -> Self { cx.subscribe(&buffer_store, Self::on_buffer_store_event) @@ -429,7 +430,6 @@ impl LspStore { current_lsp_settings: Default::default(), }), downstream_client: None, - project_id, buffer_store, worktree_store, languages: languages.clone(), @@ -461,9 +461,11 @@ impl LspStore { .detach(); Self { - mode: LspStoreMode::Remote(RemoteLspStore { upstream_client }), + mode: LspStoreMode::Remote(RemoteLspStore { + upstream_client, + upstream_project_id: project_id, + }), downstream_client: None, - project_id, buffer_store, worktree_store, languages: languages.clone(), @@ -768,13 +770,13 @@ impl LspStore { } pub(crate) fn send_diagnostic_summaries(&self, worktree: &mut Worktree) { - if let Some(client) = self.downstream_client.clone() { + if let Some((client, downstream_project_id)) = self.downstream_client.clone() { if let Some(summaries) = self.diagnostic_summaries.get(&worktree.id()) { for (path, summaries) in summaries { for (&server_id, summary) in summaries { client .send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, + project_id: downstream_project_id, worktree_id: worktree.id().to_proto(), summary: Some(summary.to_proto(server_id, path)), }) @@ -798,8 +800,14 @@ impl LspStore { { let buffer = buffer_handle.read(cx); - if let Some(upstream_client) = self.upstream_client() { - return self.send_lsp_proto_request(buffer_handle, upstream_client, request, cx); + if let Some((upstream_client, upstream_project_id)) = self.upstream_client() { + return self.send_lsp_proto_request( + buffer_handle, + upstream_client, + upstream_project_id, + request, + cx, + ); } let language_server = match server { @@ -1077,9 +1085,9 @@ impl LspStore { push_to_history: bool, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request = proto::ApplyCodeAction { - project_id: self.project_id, + project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), action: Some(Self::serialize_code_action(&action)), }; @@ -1163,9 +1171,9 @@ impl LspStore { server_id: LanguageServerId, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request = proto::ResolveInlayHint { - project_id: self.project_id, + project_id, buffer_id: buffer_handle.read(cx).remote_id().into(), language_server_id: server_id.0 as u64, hint: Some(InlayHints::project_to_proto_hint(hint.clone())), @@ -1274,9 +1282,9 @@ impl LspStore { trigger: String, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = proto::OnTypeFormatting { - project_id: self.project_id, + project_id, buffer_id: buffer.read(cx).remote_id().into(), position: Some(serialize_anchor(&position)), trigger, @@ -1424,11 +1432,11 @@ impl LspStore { range: Range, cx: &mut ModelContext, ) -> Task> { - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), version: serialize_version(&buffer_handle.read(cx).version()), - project_id: self.project_id, + project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), @@ -1437,7 +1445,7 @@ impl LspStore { range: range.clone(), kinds: None, } - .to_proto(self.project_id, buffer_handle.read(cx)), + .to_proto(project_id, buffer_handle.read(cx)), )), }); let buffer = buffer_handle.clone(); @@ -1504,10 +1512,11 @@ impl LspStore { ) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client() { + if let Some((upstream_client, project_id)) = self.upstream_client() { let task = self.send_lsp_proto_request( buffer.clone(), upstream_client, + project_id, GetCompletions { position, context }, cx, ); @@ -1603,14 +1612,13 @@ impl LspStore { ) -> Task> { let client = self.upstream_client(); let language_registry = self.languages.clone(); - let project_id = self.project_id; let buffer_id = buffer.read(cx).remote_id(); let buffer_snapshot = buffer.read(cx).snapshot(); cx.spawn(move |this, cx| async move { let mut did_resolve = false; - if let Some(client) = client { + if let Some((client, project_id)) = client { for completion_index in completion_indices { let (server_id, completion) = { let completions_guard = completions.read(); @@ -1811,8 +1819,7 @@ impl LspStore { let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); - if let Some(client) = self.upstream_client() { - let project_id = self.project_id; + if let Some((client, project_id)) = self.upstream_client() { cx.spawn(move |_, mut cx| async move { let response = client .request(proto::ApplyCompletionAdditionalEdits { @@ -1927,9 +1934,9 @@ impl LspStore { let buffer_id = buffer.remote_id().into(); let lsp_request = InlayHints { range }; - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = proto::InlayHints { - project_id: self.project_id, + project_id, buffer_id, start: Some(serialize_anchor(&range_start)), end: Some(serialize_anchor(&range_end)), @@ -1977,16 +1984,16 @@ impl LspStore { ) -> Task> { let position = position.to_point_utf16(buffer.read(cx)); - if let Some(client) = self.upstream_client() { + if let Some((client, upstream_project_id)) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), - project_id: self.project_id, + project_id: upstream_project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetSignatureHelp( - GetSignatureHelp { position }.to_proto(self.project_id, buffer.read(cx)), + GetSignatureHelp { position }.to_proto(upstream_project_id, buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -2049,16 +2056,16 @@ impl LspStore { position: PointUtf16, cx: &mut ModelContext, ) -> Task> { - if let Some(client) = self.upstream_client() { + if let Some((client, upstream_project_id)) = self.upstream_client() { let request_task = client.request(proto::MultiLspQuery { buffer_id: buffer.read(cx).remote_id().into(), version: serialize_version(&buffer.read(cx).version()), - project_id: self.project_id, + project_id: upstream_project_id, strategy: Some(proto::multi_lsp_query::Strategy::All( proto::AllLanguageServers {}, )), request: Some(proto::multi_lsp_query::Request::GetHover( - GetHover { position }.to_proto(self.project_id, buffer.read(cx)), + GetHover { position }.to_proto(upstream_project_id, buffer.read(cx)), )), }); let buffer = buffer.clone(); @@ -2123,9 +2130,9 @@ impl LspStore { pub fn symbols(&self, query: &str, cx: &mut ModelContext) -> Task>> { let language_registry = self.languages.clone(); - if let Some(upstream_client) = self.upstream_client().as_ref() { + if let Some((upstream_client, project_id)) = self.upstream_client().as_ref() { let request = upstream_client.request(proto::GetProjectSymbols { - project_id: self.project_id, + project_id: *project_id, query: query.to_string(), }); cx.foreground_executor().spawn(async move { @@ -2598,8 +2605,7 @@ impl LspStore { downstream_client: AnyProtoClient, _: &mut ModelContext, ) { - self.project_id = project_id; - self.downstream_client = Some(downstream_client.clone()); + self.downstream_client = Some((downstream_client.clone(), project_id)); for (server_id, status) in &self.language_server_statuses { downstream_client @@ -2857,10 +2863,10 @@ impl LspStore { } if !old_summary.is_empty() || !new_summary.is_empty() { - if let Some(downstream_client) = &self.downstream_client { + if let Some((downstream_client, project_id)) = &self.downstream_client { downstream_client .send(proto::UpdateDiagnosticSummary { - project_id: self.project_id, + project_id: *project_id, worktree_id: worktree_id.to_proto(), summary: Some(proto::DiagnosticSummary { path: worktree_path.to_string_lossy().to_string(), @@ -2881,9 +2887,9 @@ impl LspStore { symbol: &Symbol, cx: &mut ModelContext, ) -> Task>> { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = client.request(proto::OpenBufferForSymbol { - project_id: self.project_id, + project_id, symbol: Some(Self::serialize_symbol(symbol)), }); cx.spawn(move |this, mut cx| async move { @@ -3184,6 +3190,17 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncAppContext, ) -> Result { + let response_from_ssh = this.update(&mut cx, |this, _| { + let ssh = this.as_ssh()?; + let mut payload = envelope.payload.clone(); + payload.project_id = SSH_PROJECT_ID; + + Some(ssh.upstream_client.request(payload)) + })?; + if let Some(response_from_ssh) = response_from_ssh { + return response_from_ssh.await; + } + let sender_id = envelope.original_sender_id().unwrap_or_default(); let buffer_id = BufferId::new(envelope.payload.buffer_id)?; let version = deserialize_version(&envelope.payload.version); @@ -4779,10 +4796,11 @@ impl LspStore { // TODO: We should use `adapter` here instead of reaching through the `CachedLspAdapter`. let lsp_adapter = adapter.adapter.clone(); - let project_id = self.project_id; + let Some((upstream_client, project_id)) = self.upstream_client() else { + return; + }; let worktree_id = worktree.read(cx).id().to_proto(); - let upstream_client = ssh.upstream_client.clone(); - let name = adapter.name(); + let name = adapter.name().to_string(); let Some(available_language) = self.languages.available_language_for_name(&language) else { log::error!("failed to find available language {language}"); @@ -5165,12 +5183,11 @@ impl LspStore { } }); - let project_id = self.project_id; for (worktree_id, summaries) in self.diagnostic_summaries.iter_mut() { summaries.retain(|path, summaries_by_server_id| { if summaries_by_server_id.remove(&server_id).is_some() { - if let Some(downstream_client) = self.downstream_client.clone() { - downstream_client + if let Some((client, project_id)) = self.downstream_client.clone() { + client .send(proto::UpdateDiagnosticSummary { project_id, worktree_id: worktree_id.to_proto(), @@ -5236,9 +5253,9 @@ impl LspStore { buffers: impl IntoIterator>, cx: &mut ModelContext, ) { - if let Some(client) = self.upstream_client() { + if let Some((client, project_id)) = self.upstream_client() { let request = client.request(proto::RestartLanguageServers { - project_id: self.project_id, + project_id, buffer_ids: buffers .into_iter() .map(|b| b.read(cx).remote_id().to_proto()) @@ -5694,9 +5711,9 @@ impl LspStore { async move { this.update(&mut cx, |this, cx| { cx.emit(LspStoreEvent::RefreshInlayHints); - this.downstream_client.as_ref().map(|client| { + this.downstream_client.as_ref().map(|(client, project_id)| { client.send(proto::RefreshInlayHints { - project_id: this.project_id, + project_id: *project_id, }) }) })? @@ -6073,9 +6090,9 @@ impl LspStore { cx.emit(LspStoreEvent::LanguageServerAdded(server_id)); - if let Some(downstream_client) = self.downstream_client.as_ref() { + if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { downstream_client.send(proto::StartLanguageServer { - project_id: self.project_id, + project_id: *project_id, server: Some(proto::LanguageServer { id: server_id.0 as u64, name: language_server.name().to_string(), diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 78584cbae0..0c54a16187 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -625,7 +625,7 @@ impl Project { let snippets = SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); - let worktree_store = cx.new_model(|_| WorktreeStore::new(None, false, fs.clone())); + let worktree_store = cx.new_model(|_| WorktreeStore::local(false, fs.clone())); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -722,7 +722,7 @@ impl Project { SnippetProvider::new(fs.clone(), BTreeSet::from_iter([global_snippets_dir]), cx); let worktree_store = - cx.new_model(|_| WorktreeStore::new(Some(ssh.clone().into()), false, fs.clone())); + cx.new_model(|_| WorktreeStore::remote(false, ssh.clone().into(), 0, None)); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); @@ -744,7 +744,6 @@ impl Project { worktree_store.clone(), languages.clone(), ssh.clone().into(), - 0, cx, ) }); @@ -874,11 +873,15 @@ impl Project { let role = response.payload.role(); let worktree_store = cx.new_model(|_| { - let mut store = WorktreeStore::new(Some(client.clone().into()), true, fs.clone()); - if let Some(dev_server_project_id) = response.payload.dev_server_project_id { - store.set_dev_server_project_id(DevServerProjectId(dev_server_project_id)); - } - store + WorktreeStore::remote( + true, + client.clone().into(), + response.payload.project_id, + response + .payload + .dev_server_project_id + .map(DevServerProjectId), + ) })?; let buffer_store = cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(remote_id), cx))?; diff --git a/crates/project/src/worktree_store.rs b/crates/project/src/worktree_store.rs index 5c3b2a00a9..9f25572fc7 100644 --- a/crates/project/src/worktree_store.rs +++ b/crates/project/src/worktree_store.rs @@ -36,19 +36,27 @@ struct MatchingEntry { respond: oneshot::Sender, } +enum WorktreeStoreState { + Local { + fs: Arc, + }, + Remote { + dev_server_project_id: Option, + upstream_client: AnyProtoClient, + upstream_project_id: u64, + }, +} + pub struct WorktreeStore { next_entry_id: Arc, - upstream_client: Option, - downstream_client: Option, - remote_id: u64, - dev_server_project_id: Option, + downstream_client: Option<(AnyProtoClient, u64)>, retain_worktrees: bool, worktrees: Vec, worktrees_reordered: bool, #[allow(clippy::type_complexity)] loading_worktrees: HashMap, Shared, Arc>>>>, - fs: Arc, + state: WorktreeStoreState, } pub enum WorktreeStoreEvent { @@ -69,27 +77,37 @@ impl WorktreeStore { client.add_model_request_handler(Self::handle_expand_project_entry); } - pub fn new( - upstream_client: Option, - retain_worktrees: bool, - fs: Arc, - ) -> Self { + pub fn local(retain_worktrees: bool, fs: Arc) -> Self { Self { next_entry_id: Default::default(), loading_worktrees: Default::default(), - dev_server_project_id: None, downstream_client: None, worktrees: Vec::new(), worktrees_reordered: false, retain_worktrees, - remote_id: 0, - upstream_client, - fs, + state: WorktreeStoreState::Local { fs }, } } - pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) { - self.dev_server_project_id = Some(id); + pub fn remote( + retain_worktrees: bool, + upstream_client: AnyProtoClient, + upstream_project_id: u64, + dev_server_project_id: Option, + ) -> Self { + Self { + next_entry_id: Default::default(), + loading_worktrees: Default::default(), + downstream_client: None, + worktrees: Vec::new(), + worktrees_reordered: false, + retain_worktrees, + state: WorktreeStoreState::Remote { + upstream_client, + upstream_project_id, + dev_server_project_id, + }, + } } /// Iterates through all worktrees, including ones that don't appear in the project panel @@ -159,14 +177,28 @@ impl WorktreeStore { ) -> Task>> { let path: Arc = abs_path.as_ref().into(); if !self.loading_worktrees.contains_key(&path) { - let task = if let Some(client) = self.upstream_client.clone() { - if let Some(dev_server_project_id) = self.dev_server_project_id { - self.create_dev_server_worktree(client, dev_server_project_id, abs_path, cx) - } else { - self.create_ssh_worktree(client, abs_path, visible, cx) + let task = match &self.state { + WorktreeStoreState::Remote { + upstream_client, + dev_server_project_id, + .. + } => { + if let Some(dev_server_project_id) = dev_server_project_id { + self.create_dev_server_worktree( + upstream_client.clone(), + *dev_server_project_id, + abs_path, + cx, + ) + } else if upstream_client.is_via_collab() { + Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab")))) + } else { + self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx) + } + } + WorktreeStoreState::Local { fs } => { + self.create_local_worktree(fs.clone(), abs_path, visible, cx) } - } else { - self.create_local_worktree(abs_path, visible, cx) }; self.loading_worktrees.insert(path.clone(), task.shared()); @@ -236,11 +268,11 @@ impl WorktreeStore { fn create_local_worktree( &mut self, + fs: Arc, abs_path: impl AsRef, visible: bool, cx: &mut ModelContext, ) -> Task, Arc>> { - let fs = self.fs.clone(); let next_entry_id = self.next_entry_id.clone(); let path: Arc = abs_path.as_ref().into(); @@ -374,6 +406,17 @@ impl WorktreeStore { self.worktrees_reordered = worktrees_reordered; } + fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> { + match &self.state { + WorktreeStoreState::Remote { + upstream_client, + upstream_project_id, + .. + } => Some((upstream_client.clone(), *upstream_project_id)), + WorktreeStoreState::Local { .. } => None, + } + } + pub fn set_worktrees_from_proto( &mut self, worktrees: Vec, @@ -389,8 +432,8 @@ impl WorktreeStore { }) .collect::>(); - let client = self - .upstream_client + let (client, project_id) = self + .upstream_client() .clone() .ok_or_else(|| anyhow!("invalid project"))?; @@ -408,7 +451,7 @@ impl WorktreeStore { self.worktrees.push(handle); } else { self.add( - &Worktree::remote(self.remote_id, replica_id, worktree, client.clone(), cx), + &Worktree::remote(project_id, replica_id, worktree, client.clone(), cx), cx, ); } @@ -477,10 +520,9 @@ impl WorktreeStore { } pub fn send_project_updates(&mut self, cx: &mut ModelContext) { - let Some(downstream_client) = self.downstream_client.clone() else { + let Some((downstream_client, project_id)) = self.downstream_client.clone() else { return; }; - let project_id = self.remote_id; let update = proto::UpdateProject { project_id, @@ -549,8 +591,7 @@ impl WorktreeStore { cx: &mut ModelContext, ) { self.retain_worktrees = true; - self.remote_id = remote_id; - self.downstream_client = Some(downsteam_client); + self.downstream_client = Some((downsteam_client, remote_id)); // When shared, retain all worktrees for worktree_handle in self.worktrees.iter_mut() { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 9d5c26d6c7..0d644a64a6 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -45,7 +45,7 @@ impl HeadlessProject { let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone())); let worktree_store = cx.new_model(|cx| { - let mut store = WorktreeStore::new(None, true, fs.clone()); + let mut store = WorktreeStore::local(true, fs.clone()); store.shared(SSH_PROJECT_ID, session.clone().into(), cx); store }); From bc751d6c1994634cea98bb855ec2981d3a976d8c Mon Sep 17 00:00:00 2001 From: Boris Verkhovskiy Date: Mon, 23 Sep 2024 10:03:55 -0600 Subject: [PATCH 90/96] Don't highlight Python import names as type (#17984) Works on #14892 Follow up to #17473 --- crates/languages/src/python/highlights.scm | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index df6b60466c..3255677bed 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -2,24 +2,6 @@ (attribute attribute: (identifier) @property) (type (identifier) @type) -; Module imports - -(import_statement - (dotted_name (identifier) @type)) - -(import_statement - (aliased_import - name: (dotted_name (identifier) @type) - alias: (identifier) @type)) - -(import_from_statement - (dotted_name (identifier) @type)) - -(import_from_statement - (aliased_import - name: (dotted_name (identifier) @type) - alias: (identifier) @type)) - ; Function calls (decorator) @function From 3c95a64a23c96303b864335ec55c3ec93ca0e414 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:11:26 -0400 Subject: [PATCH 91/96] Add a rather-conservative stale issue action in dry-run mode (#18233) Zed is becoming more popular and our issue tracker is only growing larger and larger. I realize that a stale issue action can be controversial, but the way we currently manage issues hasn't scaled well and it will only get worse. We need some crowd-sourced system. Let's ask those who have opened issues if their issues are still valid. This is rather conservative and only targets bugs and crashes. I'll run it in debug mode, report the results, and enable it if it feels right. We can always turn this off if users end up really not liking it. My original rules were: ```txt If an issue is old enough (12 months or older) AND if there are no recent comments from the team (last dev comment is older than 6 months) AND it has less than X upvotes (5) AND it does not have an open PR linked to it AND is a "defect" or "panic / crash" AND does not have a "ignore top-ranking issues" label AND was not opened by a org member AND is open AND is issue (not a pull request) THEN close the issue with a kind message. ``` But only some of these were actually supported in the configuration. Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 28 ++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .github/workflows/close_stale_issues.yml diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml new file mode 100644 index 0000000000..240403169c --- /dev/null +++ b/.github/workflows/close_stale_issues.yml @@ -0,0 +1,28 @@ +name: "Close Stale Issues" +on: + schedule: + - cron: "0 1 * * *" + workflow_dispatch: + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + stale-issue-message: > + Hi there! 👋 + + We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in a week. Feel free to open a new issue if you're seeing this message after the issue has been closed. + + Thanks for your help! + close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" + days-before-stale: 365 + days-before-close: 7 + only-issue-labels: "defect,panic / crash" + operations-per-run: 100 + ascending: true + enable-statistics: true + debug-only: true + stale-issue-label: "stale" From 20826336d9c2815da327ead99006b28dc8800082 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:15:33 -0400 Subject: [PATCH 92/96] update stale issue configuration to use `any-of-issue-labels` (#18236) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 240403169c..1cac6450e8 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -20,7 +20,7 @@ jobs: close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" days-before-stale: 365 days-before-close: 7 - only-issue-labels: "defect,panic / crash" + any-of-issue-labels: "defect,panic / crash" operations-per-run: 100 ascending: true enable-statistics: true From 65bb989c61e90271e544566b7999feaa9e8ff105 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 23 Sep 2024 12:16:51 -0400 Subject: [PATCH 93/96] gpui: Update doc comment for `SharedString::new_static` (#18234) This PR updates the doc comment for `SharedString::new_static`. Release Notes: - N/A --- crates/gpui/src/shared_string.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/shared_string.rs b/crates/gpui/src/shared_string.rs index f5aef6adf8..e1fd4f1a5c 100644 --- a/crates/gpui/src/shared_string.rs +++ b/crates/gpui/src/shared_string.rs @@ -10,9 +10,9 @@ use util::arc_cow::ArcCow; pub struct SharedString(ArcCow<'static, str>); impl SharedString { - /// creates a static SharedString - pub const fn new_static(s: &'static str) -> Self { - Self(ArcCow::Borrowed(s)) + /// Creates a static [`SharedString`] from a `&'static str`. + pub const fn new_static(str: &'static str) -> Self { + Self(ArcCow::Borrowed(str)) } } From 11953bbc16c12c61363f6e15d023c6ff9488114a Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:24:49 -0400 Subject: [PATCH 94/96] Disable debug mode for stale issue action (#18237) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index 1cac6450e8..be4f6f4af0 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -24,5 +24,4 @@ jobs: operations-per-run: 100 ascending: true enable-statistics: true - debug-only: true stale-issue-label: "stale" From 1efe87029bffc2b23784247db3a851dc11ba9ae8 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 23 Sep 2024 12:32:31 -0400 Subject: [PATCH 95/96] Update stale issues configuration to use 180 days (#18238) Release Notes: - N/A --- .github/workflows/close_stale_issues.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/close_stale_issues.yml b/.github/workflows/close_stale_issues.yml index be4f6f4af0..afc28ec180 100644 --- a/.github/workflows/close_stale_issues.yml +++ b/.github/workflows/close_stale_issues.yml @@ -18,7 +18,7 @@ jobs: Thanks for your help! close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!" - days-before-stale: 365 + days-before-stale: 180 days-before-close: 7 any-of-issue-labels: "defect,panic / crash" operations-per-run: 100 From 7051bc00c2fe8d7407480a805e950cb73343bb45 Mon Sep 17 00:00:00 2001 From: Antonio Scandurra Date: Mon, 23 Sep 2024 11:40:34 -0600 Subject: [PATCH 96/96] Add "Fix with Assistant" code action on lines with diagnostics (#18163) Release Notes: - Added a new "Fix with Assistant" action on code with errors or warnings. --------- Co-authored-by: Nathan --- Cargo.lock | 1 + crates/assistant/Cargo.toml | 1 + crates/assistant/src/inline_assistant.rs | 195 ++++++++++++++++-- crates/assistant/src/workflow.rs | 1 + .../remote_editing_collaboration_tests.rs | 1 + crates/editor/src/editor.rs | 169 +++++++++++---- crates/gpui/src/executor.rs | 6 +- crates/multi_buffer/src/multi_buffer.rs | 67 ++++++ crates/project/src/lsp_store.rs | 34 +-- crates/project/src/project.rs | 2 +- crates/project/src/project_tests.rs | 3 +- crates/search/src/project_search.rs | 2 +- crates/workspace/src/workspace.rs | 8 +- 13 files changed, 418 insertions(+), 72 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c0f6751b89..e345736295 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -404,6 +404,7 @@ dependencies = [ "language_model", "languages", "log", + "lsp", "markdown", "menu", "multi_buffer", diff --git a/crates/assistant/Cargo.toml b/crates/assistant/Cargo.toml index 9f715d8224..9e61eee18a 100644 --- a/crates/assistant/Cargo.toml +++ b/crates/assistant/Cargo.toml @@ -51,6 +51,7 @@ indoc.workspace = true language.workspace = true language_model.workspace = true log.workspace = true +lsp.workspace = true markdown.workspace = true menu.workspace = true multi_buffer.workspace = true diff --git a/crates/assistant/src/inline_assistant.rs b/crates/assistant/src/inline_assistant.rs index f2428c3a2e..9c117e6665 100644 --- a/crates/assistant/src/inline_assistant.rs +++ b/crates/assistant/src/inline_assistant.rs @@ -12,8 +12,9 @@ use editor::{ BlockContext, BlockDisposition, BlockProperties, BlockStyle, CustomBlockId, RenderBlock, ToDisplayPoint, }, - Anchor, AnchorRangeExt, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, - ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint, + Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorElement, EditorEvent, EditorMode, + EditorStyle, ExcerptId, ExcerptRange, GutterDimensions, MultiBuffer, MultiBufferSnapshot, + ToOffset as _, ToPoint, }; use feature_flags::{FeatureFlagAppExt as _, ZedPro}; use fs::Fs; @@ -35,6 +36,7 @@ use language_model::{ }; use multi_buffer::MultiBufferRow; use parking_lot::Mutex; +use project::{CodeAction, ProjectTransaction}; use rope::Rope; use settings::{Settings, SettingsStore}; use smol::future::FutureExt; @@ -49,10 +51,11 @@ use std::{ time::{Duration, Instant}, }; use terminal_view::terminal_panel::TerminalPanel; +use text::{OffsetRangeExt, ToPoint as _}; use theme::ThemeSettings; use ui::{prelude::*, CheckboxWithLabel, IconButtonShape, Popover, Tooltip}; use util::{RangeExt, ResultExt}; -use workspace::{notifications::NotificationId, Toast, Workspace}; +use workspace::{notifications::NotificationId, ItemHandle, Toast, Workspace}; pub fn init( fs: Arc, @@ -129,8 +132,10 @@ impl InlineAssistant { } pub fn register_workspace(&mut self, workspace: &View, cx: &mut WindowContext) { - cx.subscribe(workspace, |_, event, cx| { - Self::update_global(cx, |this, cx| this.handle_workspace_event(event, cx)); + cx.subscribe(workspace, |workspace, event, cx| { + Self::update_global(cx, |this, cx| { + this.handle_workspace_event(workspace, event, cx) + }); }) .detach(); @@ -150,19 +155,49 @@ impl InlineAssistant { .detach(); } - fn handle_workspace_event(&mut self, event: &workspace::Event, cx: &mut WindowContext) { - // When the user manually saves an editor, automatically accepts all finished transformations. - if let workspace::Event::UserSavedItem { item, .. } = event { - if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { - if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { - for assist_id in editor_assists.assist_ids.clone() { - let assist = &self.assists[&assist_id]; - if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { - self.finish_assist(assist_id, false, cx) + fn handle_workspace_event( + &mut self, + workspace: View, + event: &workspace::Event, + cx: &mut WindowContext, + ) { + match event { + workspace::Event::UserSavedItem { item, .. } => { + // When the user manually saves an editor, automatically accepts all finished transformations. + if let Some(editor) = item.upgrade().and_then(|item| item.act_as::(cx)) { + if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) { + for assist_id in editor_assists.assist_ids.clone() { + let assist = &self.assists[&assist_id]; + if let CodegenStatus::Done = assist.codegen.read(cx).status(cx) { + self.finish_assist(assist_id, false, cx) + } } } } } + workspace::Event::ItemAdded { item } => { + self.register_workspace_item(&workspace, item.as_ref(), cx); + } + _ => (), + } + } + + fn register_workspace_item( + &mut self, + workspace: &View, + item: &dyn ItemHandle, + cx: &mut WindowContext, + ) { + if let Some(editor) = item.act_as::(cx) { + editor.update(cx, |editor, cx| { + editor.push_code_action_provider( + Arc::new(AssistantCodeActionProvider { + editor: cx.view().downgrade(), + workspace: workspace.downgrade(), + }), + cx, + ); + }); } } @@ -332,6 +367,7 @@ impl InlineAssistant { mut range: Range, initial_prompt: String, initial_transaction_id: Option, + focus: bool, workspace: Option>, assistant_panel: Option<&View>, cx: &mut WindowContext, @@ -404,6 +440,11 @@ impl InlineAssistant { assist_group.assist_ids.push(assist_id); editor_assists.assist_ids.push(assist_id); self.assist_groups.insert(assist_group_id, assist_group); + + if focus { + self.focus_assist(assist_id, cx); + } + assist_id } @@ -3289,6 +3330,132 @@ where } } +struct AssistantCodeActionProvider { + editor: WeakView, + workspace: WeakView, +} + +impl CodeActionProvider for AssistantCodeActionProvider { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>> { + let snapshot = buffer.read(cx).snapshot(); + let mut range = range.to_point(&snapshot); + + // Expand the range to line boundaries. + range.start.column = 0; + range.end.column = snapshot.line_len(range.end.row); + + let mut has_diagnostics = false; + for diagnostic in snapshot.diagnostics_in_range::<_, Point>(range.clone(), false) { + range.start = cmp::min(range.start, diagnostic.range.start); + range.end = cmp::max(range.end, diagnostic.range.end); + has_diagnostics = true; + } + if has_diagnostics { + if let Some(symbols_containing_start) = snapshot.symbols_containing(range.start, None) { + if let Some(symbol) = symbols_containing_start.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + if let Some(symbols_containing_end) = snapshot.symbols_containing(range.end, None) { + if let Some(symbol) = symbols_containing_end.last() { + range.start = cmp::min(range.start, symbol.range.start.to_point(&snapshot)); + range.end = cmp::max(range.end, symbol.range.end.to_point(&snapshot)); + } + } + + Task::ready(Ok(vec![CodeAction { + server_id: language::LanguageServerId(0), + range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end), + lsp_action: lsp::CodeAction { + title: "Fix with Assistant".into(), + ..Default::default() + }, + }])) + } else { + Task::ready(Ok(Vec::new())) + } + } + + fn apply_code_action( + &self, + buffer: Model, + action: CodeAction, + excerpt_id: ExcerptId, + _push_to_history: bool, + cx: &mut WindowContext, + ) -> Task> { + let editor = self.editor.clone(); + let workspace = self.workspace.clone(); + cx.spawn(|mut cx| async move { + let editor = editor.upgrade().context("editor was released")?; + let range = editor + .update(&mut cx, |editor, cx| { + editor.buffer().update(cx, |multibuffer, cx| { + let buffer = buffer.read(cx); + let multibuffer_snapshot = multibuffer.read(cx); + + let old_context_range = + multibuffer_snapshot.context_range_for_excerpt(excerpt_id)?; + let mut new_context_range = old_context_range.clone(); + if action + .range + .start + .cmp(&old_context_range.start, buffer) + .is_lt() + { + new_context_range.start = action.range.start; + } + if action.range.end.cmp(&old_context_range.end, buffer).is_gt() { + new_context_range.end = action.range.end; + } + drop(multibuffer_snapshot); + + if new_context_range != old_context_range { + multibuffer.resize_excerpt(excerpt_id, new_context_range, cx); + } + + let multibuffer_snapshot = multibuffer.read(cx); + Some( + multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.start)? + ..multibuffer_snapshot + .anchor_in_excerpt(excerpt_id, action.range.end)?, + ) + }) + })? + .context("invalid range")?; + let assistant_panel = workspace.update(&mut cx, |workspace, cx| { + workspace + .panel::(cx) + .context("assistant panel was released") + })??; + + cx.update_global(|assistant: &mut InlineAssistant, cx| { + let assist_id = assistant.suggest_assist( + &editor, + range, + "Fix Diagnostics".into(), + None, + true, + Some(workspace), + Some(&assistant_panel), + cx, + ); + assistant.start_assist(assist_id, cx); + })?; + + Ok(ProjectTransaction::default()) + }) + } +} + fn prefixes(text: &str) -> impl Iterator { (0..text.len() - 1).map(|ix| &text[..ix + 1]) } diff --git a/crates/assistant/src/workflow.rs b/crates/assistant/src/workflow.rs index 75c65ed0a7..8a770e21aa 100644 --- a/crates/assistant/src/workflow.rs +++ b/crates/assistant/src/workflow.rs @@ -187,6 +187,7 @@ impl WorkflowSuggestion { suggestion_range, initial_prompt, initial_transaction_id, + false, Some(workspace.clone()), Some(assistant_panel), cx, diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index cdcf69cf7e..a81166bb00 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -53,6 +53,7 @@ async fn test_sharing_an_ssh_remote_project( let (project_a, worktree_id) = client_a .build_ssh_project("/code/project1", client_ssh, cx_a) .await; + executor.run_until_parked(); // User A shares the remote project. let active_call_a = cx_a.read(ActiveCall::global); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index b1a3d95a0d..cbc272d995 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -68,7 +68,7 @@ use element::LineWithInvisibles; pub use element::{ CursorLayout, EditorElement, HighlightedRange, HighlightedRangeLine, PointForPosition, }; -use futures::FutureExt; +use futures::{future, FutureExt}; use fuzzy::{StringMatch, StringMatchCandidate}; use git::blame::GitBlame; use git::diff_hunk_to_display; @@ -569,8 +569,8 @@ pub struct Editor { find_all_references_task_sources: Vec, next_completion_id: CompletionId, completion_documentation_pre_resolve_debounce: DebouncedDelay, - available_code_actions: Option<(Location, Arc<[CodeAction]>)>, - code_actions_task: Option>, + available_code_actions: Option<(Location, Arc<[AvailableCodeAction]>)>, + code_actions_task: Option>>, document_highlights_task: Option>, linked_editing_range_task: Option>>, linked_edit_ranges: linked_editing_ranges::LinkedEditingRanges, @@ -590,6 +590,7 @@ pub struct Editor { gutter_hovered: bool, hovered_link_state: Option, inline_completion_provider: Option, + code_action_providers: Vec>, active_inline_completion: Option, // enable_inline_completions is a switch that Vim can use to disable // inline completions based on its mode. @@ -1360,10 +1361,16 @@ impl CompletionsMenu { } } +struct AvailableCodeAction { + excerpt_id: ExcerptId, + action: CodeAction, + provider: Arc, +} + #[derive(Clone)] struct CodeActionContents { tasks: Option>, - actions: Option>, + actions: Option>, } impl CodeActionContents { @@ -1395,9 +1402,11 @@ impl CodeActionContents { .map(|(kind, task)| CodeActionsItem::Task(kind.clone(), task.clone())) }) .chain(self.actions.iter().flat_map(|actions| { - actions - .iter() - .map(|action| CodeActionsItem::CodeAction(action.clone())) + actions.iter().map(|available| CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + }) })) } fn get(&self, index: usize) -> Option { @@ -1410,10 +1419,13 @@ impl CodeActionContents { .cloned() .map(|(kind, task)| CodeActionsItem::Task(kind, task)) } else { - actions - .get(index - tasks.templates.len()) - .cloned() - .map(CodeActionsItem::CodeAction) + actions.get(index - tasks.templates.len()).map(|available| { + CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + } + }) } } (Some(tasks), None) => tasks @@ -1421,7 +1433,15 @@ impl CodeActionContents { .get(index) .cloned() .map(|(kind, task)| CodeActionsItem::Task(kind, task)), - (None, Some(actions)) => actions.get(index).cloned().map(CodeActionsItem::CodeAction), + (None, Some(actions)) => { + actions + .get(index) + .map(|available| CodeActionsItem::CodeAction { + excerpt_id: available.excerpt_id, + action: available.action.clone(), + provider: available.provider.clone(), + }) + } (None, None) => None, } } @@ -1431,7 +1451,11 @@ impl CodeActionContents { #[derive(Clone)] enum CodeActionsItem { Task(TaskSourceKind, ResolvedTask), - CodeAction(CodeAction), + CodeAction { + excerpt_id: ExcerptId, + action: CodeAction, + provider: Arc, + }, } impl CodeActionsItem { @@ -1442,14 +1466,14 @@ impl CodeActionsItem { Some(task) } fn as_code_action(&self) -> Option<&CodeAction> { - let Self::CodeAction(action) = self else { + let Self::CodeAction { action, .. } = self else { return None; }; Some(action) } fn label(&self) -> String { match self { - Self::CodeAction(action) => action.lsp_action.title.clone(), + Self::CodeAction { action, .. } => action.lsp_action.title.clone(), Self::Task(_, task) => task.resolved_label.clone(), } } @@ -1588,7 +1612,9 @@ impl CodeActionsMenu { .enumerate() .max_by_key(|(_, action)| match action { CodeActionsItem::Task(_, task) => task.resolved_label.chars().count(), - CodeActionsItem::CodeAction(action) => action.lsp_action.title.chars().count(), + CodeActionsItem::CodeAction { action, .. } => { + action.lsp_action.title.chars().count() + } }) .map(|(ix, _)| ix), ) @@ -1864,6 +1890,11 @@ impl Editor { None }; + let mut code_action_providers = Vec::new(); + if let Some(project) = project.clone() { + code_action_providers.push(Arc::new(project) as Arc<_>); + } + let mut this = Self { focus_handle, show_cursor_when_unfocused: false, @@ -1915,6 +1946,7 @@ impl Editor { next_completion_id: 0, completion_documentation_pre_resolve_debounce: DebouncedDelay::new(), next_inlay_id: 0, + code_action_providers, available_code_actions: Default::default(), code_actions_task: Default::default(), document_highlights_task: Default::default(), @@ -4553,7 +4585,7 @@ impl Editor { let action = action.clone(); cx.spawn(|editor, mut cx| async move { while let Some(prev_task) = task { - prev_task.await; + prev_task.await.log_err(); task = editor.update(&mut cx, |this, _| this.code_actions_task.take())?; } @@ -4727,17 +4759,16 @@ impl Editor { Some(Task::ready(Ok(()))) }) } - CodeActionsItem::CodeAction(action) => { - let apply_code_actions = workspace - .read(cx) - .project() - .clone() - .update(cx, |project, cx| { - project.apply_code_action(buffer, action, true, cx) - }); + CodeActionsItem::CodeAction { + excerpt_id, + action, + provider, + } => { + let apply_code_action = + provider.apply_code_action(buffer, action, excerpt_id, true, cx); let workspace = workspace.downgrade(); Some(cx.spawn(|editor, cx| async move { - let project_transaction = apply_code_actions.await?; + let project_transaction = apply_code_action.await?; Self::open_project_transaction( &editor, workspace, @@ -4835,8 +4866,16 @@ impl Editor { Ok(()) } + pub fn push_code_action_provider( + &mut self, + provider: Arc, + cx: &mut ViewContext, + ) { + self.code_action_providers.push(provider); + self.refresh_code_actions(cx); + } + fn refresh_code_actions(&mut self, cx: &mut ViewContext) -> Option<()> { - let project = self.project.clone()?; let buffer = self.buffer.read(cx); let newest_selection = self.selections.newest_anchor().clone(); let (start_buffer, start) = buffer.text_anchor_for_position(newest_selection.start, cx)?; @@ -4850,13 +4889,30 @@ impl Editor { .timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT) .await; - let actions = if let Ok(code_actions) = project.update(&mut cx, |project, cx| { - project.code_actions(&start_buffer, start..end, cx) - }) { - code_actions.await - } else { - Vec::new() - }; + let (providers, tasks) = this.update(&mut cx, |this, cx| { + let providers = this.code_action_providers.clone(); + let tasks = this + .code_action_providers + .iter() + .map(|provider| provider.code_actions(&start_buffer, start..end, cx)) + .collect::>(); + (providers, tasks) + })?; + + let mut actions = Vec::new(); + for (provider, provider_actions) in + providers.into_iter().zip(future::join_all(tasks).await) + { + if let Some(provider_actions) = provider_actions.log_err() { + actions.extend(provider_actions.into_iter().map(|action| { + AvailableCodeAction { + excerpt_id: newest_selection.start.excerpt_id, + action, + provider: provider.clone(), + } + })); + } + } this.update(&mut cx, |this, cx| { this.available_code_actions = if actions.is_empty() { @@ -4872,7 +4928,6 @@ impl Editor { }; cx.notify(); }) - .log_err(); })); None } @@ -9685,7 +9740,7 @@ impl Editor { }) .context("location tasks preparation")?; - let locations = futures::future::join_all(location_tasks) + let locations = future::join_all(location_tasks) .await .into_iter() .filter_map(|location| location.transpose()) @@ -12574,6 +12629,48 @@ pub trait CompletionProvider { } } +pub trait CodeActionProvider { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>>; + + fn apply_code_action( + &self, + buffer_handle: Model, + action: CodeAction, + excerpt_id: ExcerptId, + push_to_history: bool, + cx: &mut WindowContext, + ) -> Task>; +} + +impl CodeActionProvider for Model { + fn code_actions( + &self, + buffer: &Model, + range: Range, + cx: &mut WindowContext, + ) -> Task>> { + self.update(cx, |project, cx| project.code_actions(buffer, range, cx)) + } + + fn apply_code_action( + &self, + buffer_handle: Model, + action: CodeAction, + _excerpt_id: ExcerptId, + push_to_history: bool, + cx: &mut WindowContext, + ) -> Task> { + self.update(cx, |project, cx| { + project.apply_code_action(buffer_handle, action, push_to_history, cx) + }) + } +} + fn snippet_completions( project: &Project, buffer: &Model, diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index b909e63271..3035892d7a 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -407,7 +407,11 @@ impl BackgroundExecutor { /// How many CPUs are available to the dispatcher. pub fn num_cpus(&self) -> usize { - num_cpus::get() + #[cfg(any(test, feature = "test-support"))] + return 4; + + #[cfg(not(any(test, feature = "test-support")))] + return num_cpus::get(); } /// Whether we're on the main thread. diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index d406f9bfaf..0df196bb98 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -1810,6 +1810,69 @@ impl MultiBuffer { self.as_singleton().unwrap().read(cx).is_parsing() } + pub fn resize_excerpt( + &mut self, + id: ExcerptId, + range: Range, + cx: &mut ModelContext, + ) { + self.sync(cx); + + let snapshot = self.snapshot(cx); + let locator = snapshot.excerpt_locator_for_id(id); + let mut new_excerpts = SumTree::default(); + let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(&()); + let mut edits = Vec::>::new(); + + let prefix = cursor.slice(&Some(locator), Bias::Left, &()); + new_excerpts.append(prefix, &()); + + let mut excerpt = cursor.item().unwrap().clone(); + let old_text_len = excerpt.text_summary.len; + + excerpt.range.context.start = range.start; + excerpt.range.context.end = range.end; + excerpt.max_buffer_row = range.end.to_point(&excerpt.buffer).row; + + excerpt.text_summary = excerpt + .buffer + .text_summary_for_range(excerpt.range.context.clone()); + + let new_start_offset = new_excerpts.summary().text.len; + let old_start_offset = cursor.start().1; + let edit = Edit { + old: old_start_offset..old_start_offset + old_text_len, + new: new_start_offset..new_start_offset + excerpt.text_summary.len, + }; + + if let Some(last_edit) = edits.last_mut() { + if last_edit.old.end == edit.old.start { + last_edit.old.end = edit.old.end; + last_edit.new.end = edit.new.end; + } else { + edits.push(edit); + } + } else { + edits.push(edit); + } + + new_excerpts.push(excerpt, &()); + + cursor.next(&()); + + new_excerpts.append(cursor.suffix(&()), &()); + + drop(cursor); + self.snapshot.borrow_mut().excerpts = new_excerpts; + + self.subscriptions.publish_mut(edits); + cx.emit(Event::Edited { + singleton_buffer_edited: false, + }); + cx.emit(Event::ExcerptsExpanded { ids: vec![id] }); + cx.notify(); + } + pub fn expand_excerpts( &mut self, ids: impl IntoIterator, @@ -3139,6 +3202,10 @@ impl MultiBufferSnapshot { None } + pub fn context_range_for_excerpt(&self, excerpt_id: ExcerptId) -> Option> { + Some(self.excerpt(excerpt_id)?.range.context.clone()) + } + pub fn can_resolve(&self, anchor: &Anchor) -> bool { if anchor.excerpt_id == ExcerptId::min() || anchor.excerpt_id == ExcerptId::max() { true diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 4506fcc6fe..b2920bc791 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -1431,7 +1431,7 @@ impl LspStore { buffer_handle: &Model, range: Range, cx: &mut ModelContext, - ) -> Task> { + ) -> Task>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request_task = upstream_client.request(proto::MultiLspQuery { buffer_id: buffer_handle.read(cx).remote_id().into(), @@ -1451,14 +1451,11 @@ impl LspStore { let buffer = buffer_handle.clone(); cx.spawn(|weak_project, cx| async move { let Some(project) = weak_project.upgrade() else { - return Vec::new(); + return Ok(Vec::new()); }; - join_all( - request_task - .await - .log_err() - .map(|response| response.responses) - .unwrap_or_default() + let responses = request_task.await?.responses; + let actions = join_all( + responses .into_iter() .filter_map(|lsp_response| match lsp_response.response? { proto::lsp_response::Response::GetCodeActionsResponse(response) => { @@ -1470,7 +1467,7 @@ impl LspStore { } }) .map(|code_actions_response| { - let response = GetCodeActions { + GetCodeActions { range: range.clone(), kinds: None, } @@ -1479,14 +1476,17 @@ impl LspStore { project.clone(), buffer.clone(), cx.clone(), - ); - async move { response.await.log_err().unwrap_or_default() } + ) }), ) - .await - .into_iter() - .flatten() - .collect() + .await; + + Ok(actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .collect()) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -1498,7 +1498,9 @@ impl LspStore { }, cx, ); - cx.spawn(|_, _| async move { all_actions_task.await.into_iter().flatten().collect() }) + cx.spawn( + |_, _| async move { Ok(all_actions_task.await.into_iter().flatten().collect()) }, + ) } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 0c54a16187..b1347c6d06 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -3247,7 +3247,7 @@ impl Project { buffer_handle: &Model, range: Range, cx: &mut ModelContext, - ) -> Task> { + ) -> Task>> { let buffer = buffer_handle.read(cx); let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); self.lsp_store.update(cx, |lsp_store, cx| { diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index d0d67f0cda..a7d2e6766c 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -2708,7 +2708,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { .next() .await; - let action = actions.await[0].clone(); + let action = actions.await.unwrap()[0].clone(); let apply = project.update(cx, |project, cx| { project.apply_code_action(buffer.clone(), action, true, cx) }); @@ -5046,6 +5046,7 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { vec!["TailwindServer code action", "TypeScriptServer code action"], code_actions_task .await + .unwrap() .into_iter() .map(|code_action| code_action.lsp_action.title) .sorted() diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index fac3c55bf4..d5b719a657 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -2745,7 +2745,7 @@ pub mod tests { search_view .results_editor .update(cx, |editor, cx| editor.display_text(cx)), - "\n\n\nconst ONE: usize = 1;\n\n\n\n\nconst TWO: usize = one::ONE + one::ONE;\n", + "\n\n\nconst TWO: usize = one::ONE + one::ONE;\n\n\n\n\nconst ONE: usize = 1;\n", "New search in directory should have a filter that matches a certain directory" ); }) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 1fbeab38a2..92bfc8c5c5 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -675,7 +675,9 @@ impl DelayedDebouncedEditAction { pub enum Event { PaneAdded(View), PaneRemoved, - ItemAdded, + ItemAdded { + item: Box, + }, ItemRemoved, ActiveItemChanged, UserSavedItem { @@ -2984,7 +2986,9 @@ impl Workspace { match event { pane::Event::AddItem { item } => { item.added_to_pane(self, pane, cx); - cx.emit(Event::ItemAdded); + cx.emit(Event::ItemAdded { + item: item.boxed_clone(), + }); } pane::Event::Split(direction) => { self.split_and_clone(pane, *direction, cx);