From 27a6bacab898325e1f4f6afb411885cbcd7a0536 Mon Sep 17 00:00:00 2001 From: Joseph Lyons Date: Wed, 19 Apr 2023 08:31:47 -0400 Subject: [PATCH 01/26] Save panics as structured data --- crates/zed/src/main.rs | 99 ++++++++++++++++++++++++++---------------- 1 file changed, 62 insertions(+), 37 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index aec012db0b..6364abba8a 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -21,7 +21,7 @@ use log::LevelFilter; use node_runtime::NodeRuntime; use parking_lot::Mutex; use project::Fs; -use serde_json::json; +use serde::{Deserialize, Serialize}; use settings::{ self, settings_file::SettingsFile, KeymapFileContent, Settings, SettingsFileContent, WorkingDirectory, @@ -317,6 +317,30 @@ fn init_logger() { } } +#[derive(Serialize, Deserialize)] +struct LocationData { + file: String, + line: u32, +} + +#[derive(Serialize, Deserialize)] +struct Panic { + thread: String, + payload: String, + #[serde(skip_serializing_if = "Option::is_none")] + location_data: Option, + backtrace: Vec, + // TODO + // stripped_backtrace: String, +} + +#[derive(Serialize)] +struct PanicRequest { + panic: Panic, + version: String, + token: String, +} + fn init_panic_hook(app_version: String) { let is_pty = stdout_is_a_pty(); panic::set_hook(Box::new(move |info| { @@ -333,39 +357,38 @@ fn init_panic_hook(app_version: String) { }, }; - let message = match info.location() { - Some(location) => { - format!( - "thread '{}' panicked at '{}'\n{}:{}\n{:?}", - thread, - payload, - location.file(), - location.line(), - backtrace - ) - } - None => format!( - "thread '{}' panicked at '{}'\n{:?}", - thread, payload, backtrace - ), + let panic_data = Panic { + thread: thread.into(), + payload: payload.into(), + location_data: info.location().map(|location| LocationData { + file: location.file().into(), + line: location.line(), + }), + backtrace: format!("{:?}", backtrace) + .split("\n") + .map(|a| a.to_string()) + .collect(), + // modified_backtrace: None, }; - if is_pty { - eprintln!("{}", message); - return; - } + if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() { + if is_pty { + eprintln!("{}", panic_data_json); + return; + } - let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); - let panic_file_path = - paths::LOGS_DIR.join(format!("zed-{}-{}.panic", app_version, timestamp)); - let panic_file = std::fs::OpenOptions::new() - .append(true) - .create(true) - .open(&panic_file_path) - .log_err(); - if let Some(mut panic_file) = panic_file { - write!(&mut panic_file, "{}", message).log_err(); - panic_file.flush().log_err(); + let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); + let panic_file_path = + paths::LOGS_DIR.join(format!("zed-{}-{}.panic", app_version, timestamp)); + let panic_file = std::fs::OpenOptions::new() + .append(true) + .create(true) + .open(&panic_file_path) + .log_err(); + if let Some(mut panic_file) = panic_file { + write!(&mut panic_file, "{}", panic_data_json).log_err(); + panic_file.flush().log_err(); + } } })); } @@ -402,15 +425,17 @@ fn upload_previous_panics(http: Arc, cx: &mut AppContext) { }; if diagnostics_telemetry { - let text = smol::fs::read_to_string(&child_path) + let panic_data_text = smol::fs::read_to_string(&child_path) .await .context("error reading panic file")?; - let body = serde_json::to_string(&json!({ - "text": text, - "version": version, - "token": ZED_SECRET_CLIENT_TOKEN, - })) + + let body = serde_json::to_string(&PanicRequest { + panic: serde_json::from_str(&panic_data_text)?, + version: version.to_string(), + token: ZED_SECRET_CLIENT_TOKEN.into(), + }) .unwrap(); + let request = Request::post(&panic_report_url) .redirect_policy(isahc::config::RedirectPolicy::Follow) .header("Content-Type", "application/json") From 0326a45a91ad8ee44453abe912a3015e98d047be Mon Sep 17 00:00:00 2001 From: Joseph Lyons Date: Wed, 19 Apr 2023 14:21:53 -0400 Subject: [PATCH 02/26] Give closure parameter a name --- crates/zed/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 6364abba8a..40a36cf2db 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -366,7 +366,7 @@ fn init_panic_hook(app_version: String) { }), backtrace: format!("{:?}", backtrace) .split("\n") - .map(|a| a.to_string()) + .map(|line| line.to_string()) .collect(), // modified_backtrace: None, }; From 5c8b41dd5460a39f0b0461cb05be3662177c34ee Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 20 Apr 2023 08:33:45 -0700 Subject: [PATCH 03/26] Remove stable guard for copilot --- crates/copilot/src/copilot.rs | 9 --------- 1 file changed, 9 deletions(-) diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 1967c3cd14..09ee894340 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -35,15 +35,6 @@ actions!( ); pub fn init(http: Arc, node_runtime: Arc, cx: &mut AppContext) { - // Disable Copilot for stable releases. - if *cx.global::() == ReleaseChannel::Stable { - cx.update_global::(|filter, _cx| { - filter.filtered_namespaces.insert(COPILOT_NAMESPACE); - filter.filtered_namespaces.insert(COPILOT_AUTH_NAMESPACE); - }); - return; - } - let copilot = cx.add_model({ let node_runtime = node_runtime.clone(); move |cx| Copilot::start(http, node_runtime, cx) From f5bbb41cc2e1e36199a95ca8d51d580393bc4593 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 20 Apr 2023 08:34:50 -0700 Subject: [PATCH 04/26] Remove import --- crates/copilot/src/copilot.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index 09ee894340..8229aaa36a 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -21,8 +21,7 @@ use std::{ sync::Arc, }; use util::{ - channel::ReleaseChannel, fs::remove_matching, github::latest_github_release, http::HttpClient, - paths, ResultExt, + fs::remove_matching, github::latest_github_release, http::HttpClient, paths, ResultExt, }; const COPILOT_AUTH_NAMESPACE: &'static str = "copilot_auth"; From ad8162fc9cf0109b2ebca8420ffc4201e606195d Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 20 Apr 2023 08:36:29 -0700 Subject: [PATCH 05/26] Make sign_in init conditional --- crates/copilot/src/sign_in.rs | 74 +++++++++++++++++------------------ 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/crates/copilot/src/sign_in.rs b/crates/copilot/src/sign_in.rs index dc09ddf3f2..3b949217bc 100644 --- a/crates/copilot/src/sign_in.rs +++ b/crates/copilot/src/sign_in.rs @@ -23,51 +23,51 @@ struct OpenGithub; const COPILOT_SIGN_UP_URL: &'static str = "https://github.com/features/copilot"; pub fn init(cx: &mut AppContext) { - let copilot = Copilot::global(cx).unwrap(); + if let Some(copilot) = Copilot::global(cx) { + let mut code_verification: Option> = None; + cx.observe(&copilot, move |copilot, cx| { + let status = copilot.read(cx).status(); - let mut code_verification: Option> = None; - cx.observe(&copilot, move |copilot, cx| { - let status = copilot.read(cx).status(); - - match &status { - crate::Status::SigningIn { prompt } => { - if let Some(code_verification_handle) = code_verification.as_mut() { - if cx.has_window(code_verification_handle.window_id()) { - code_verification_handle.update(cx, |code_verification_view, cx| { - code_verification_view.set_status(status, cx) - }); - cx.activate_window(code_verification_handle.window_id()); - } else { + match &status { + crate::Status::SigningIn { prompt } => { + if let Some(code_verification_handle) = code_verification.as_mut() { + if cx.has_window(code_verification_handle.window_id()) { + code_verification_handle.update(cx, |code_verification_view, cx| { + code_verification_view.set_status(status, cx) + }); + cx.activate_window(code_verification_handle.window_id()); + } else { + create_copilot_auth_window(cx, &status, &mut code_verification); + } + } else if let Some(_prompt) = prompt { create_copilot_auth_window(cx, &status, &mut code_verification); } - } else if let Some(_prompt) = prompt { - create_copilot_auth_window(cx, &status, &mut code_verification); } - } - Status::Authorized | Status::Unauthorized => { - if let Some(code_verification) = code_verification.as_ref() { - code_verification.update(cx, |code_verification, cx| { - code_verification.set_status(status, cx) - }); + Status::Authorized | Status::Unauthorized => { + if let Some(code_verification) = code_verification.as_ref() { + code_verification.update(cx, |code_verification, cx| { + code_verification.set_status(status, cx) + }); - cx.platform().activate(true); - cx.activate_window(code_verification.window_id()); + cx.platform().activate(true); + cx.activate_window(code_verification.window_id()); + } + } + _ => { + if let Some(code_verification) = code_verification.take() { + cx.remove_window(code_verification.window_id()); + } } } - _ => { - if let Some(code_verification) = code_verification.take() { - cx.remove_window(code_verification.window_id()); - } - } - } - }) - .detach(); + }) + .detach(); - cx.add_action( - |code_verification: &mut CopilotCodeVerification, _: &ClickedConnect, _| { - code_verification.connect_clicked = true; - }, - ); + cx.add_action( + |code_verification: &mut CopilotCodeVerification, _: &ClickedConnect, _| { + code_verification.connect_clicked = true; + }, + ); + } } fn create_copilot_auth_window( From c1daf0fc36f5b95d76e6bee4c7b499983fe6a1cd Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 20 Apr 2023 08:54:44 -0700 Subject: [PATCH 06/26] Fix format --- crates/editor/src/editor_tests.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index ce293ed064..646a8f33d6 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4371,7 +4371,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) cx.set_state( &[ "one ", // - "twoˇ", // + "twoˇ", // "three ", // "four", // ] @@ -4446,7 +4446,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) &[ "one", // "", // - "twoˇ", // + "twoˇ", // "", // "three", // "four", // @@ -4461,7 +4461,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) cx.assert_editor_state( &[ "one ", // - "twoˇ", // + "twoˇ", // "three ", // "four", // ] From ba7233f2654e1196dde78d578317a965b4aa2a01 Mon Sep 17 00:00:00 2001 From: Julia Date: Fri, 7 Apr 2023 11:46:05 -0400 Subject: [PATCH 07/26] Incomplete refactor to allow for multiple adapters per language --- crates/editor/src/multi_buffer.rs | 9 + crates/language/src/buffer.rs | 1 + crates/language/src/language.rs | 163 ++-- crates/language/src/proto.rs | 2 + crates/project/src/lsp_command.rs | 32 +- crates/project/src/project.rs | 988 ++++++++++++++----------- crates/project/src/project_tests.rs | 5 +- crates/rpc/proto/zed.proto | 7 +- crates/zed/src/languages.rs | 84 +-- crates/zed/src/languages/typescript.rs | 104 ++- 10 files changed, 818 insertions(+), 577 deletions(-) diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 824c108e46..f3a3c9b00f 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2764,6 +2764,15 @@ impl MultiBufferSnapshot { .and_then(|(buffer, offset)| buffer.language_scope_at(offset)) } + pub fn language_indent_size_at( + &self, + position: T, + cx: &AppContext, + ) -> Option { + let (buffer_snapshot, offset) = self.point_to_buffer_offset(position)?; + Some(buffer_snapshot.language_indent_size_at(offset, cx)) + } + pub fn is_dirty(&self) -> bool { self.is_dirty } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 7325ca9af5..d24b9f7033 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -156,6 +156,7 @@ pub struct Completion { #[derive(Clone, Debug)] pub struct CodeAction { + pub server_id: usize, pub range: Range, pub lsp_action: lsp::CodeAction, } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 81aa1de7bd..7e65a73ffc 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -414,7 +414,7 @@ pub struct BracketPair { pub struct Language { pub(crate) config: LanguageConfig, pub(crate) grammar: Option>, - pub(crate) adapter: Option>, + pub(crate) adapters: Vec>, #[cfg(any(test, feature = "test-support"))] fake_adapter: Option<( @@ -492,7 +492,7 @@ struct AvailableLanguage { path: &'static str, config: LanguageConfig, grammar: tree_sitter::Language, - lsp_adapter: Option>, + lsp_adapters: Vec>, get_queries: fn(&str) -> LanguageQueries, } @@ -513,6 +513,7 @@ pub struct LanguageRegistry { } struct LanguageRegistryState { + next_language_server_id: usize, languages: Vec>, available_languages: Vec, next_available_language_id: AvailableLanguageId, @@ -522,11 +523,17 @@ struct LanguageRegistryState { version: usize, } +pub struct PendingLanguageServer { + pub server_id: usize, + pub task: Task>, +} + impl LanguageRegistry { pub fn new(login_shell_env_loaded: Task<()>) -> Self { let (lsp_binary_statuses_tx, lsp_binary_statuses_rx) = async_broadcast::broadcast(16); Self { state: RwLock::new(LanguageRegistryState { + next_language_server_id: 0, languages: vec![PLAIN_TEXT.clone()], available_languages: Default::default(), next_available_language_id: 0, @@ -558,7 +565,7 @@ impl LanguageRegistry { path: &'static str, config: LanguageConfig, grammar: tree_sitter::Language, - lsp_adapter: Option>, + lsp_adapters: Vec>, get_queries: fn(&str) -> LanguageQueries, ) { let state = &mut *self.state.write(); @@ -567,7 +574,7 @@ impl LanguageRegistry { path, config, grammar, - lsp_adapter, + lsp_adapters, get_queries, }); } @@ -590,12 +597,13 @@ impl LanguageRegistry { state .available_languages .iter() - .filter_map(|l| l.lsp_adapter.clone()) + .flat_map(|l| l.lsp_adapters.clone()) .chain( state .languages .iter() - .filter_map(|l| l.adapter.as_ref().map(|a| a.adapter.clone())), + .flat_map(|language| &language.adapters) + .map(|adapter| adapter.adapter.clone()), ) .collect::>() }; @@ -721,7 +729,7 @@ impl LanguageRegistry { let queries = (language.get_queries)(&language.path); let language = Language::new(language.config, Some(language.grammar)) - .with_lsp_adapter(language.lsp_adapter) + .with_lsp_adapters(language.lsp_adapters) .await; let name = language.name(); match language.with_queries(queries) { @@ -774,18 +782,16 @@ impl LanguageRegistry { self.state.read().languages.iter().cloned().collect() } - pub fn start_language_server( + pub fn start_language_servers( self: &Arc, - server_id: usize, language: Arc, root_path: Arc, http_client: Arc, cx: &mut AppContext, - ) -> Option>> { + ) -> Vec { #[cfg(any(test, feature = "test-support"))] if language.fake_adapter.is_some() { - let language = language; - return Some(cx.spawn(|cx| async move { + let task = cx.spawn(|cx| async move { let (servers_tx, fake_adapter) = language.fake_adapter.as_ref().unwrap(); let (server, mut fake_server) = lsp::LanguageServer::fake( fake_adapter.name.to_string(), @@ -810,53 +816,71 @@ impl LanguageRegistry { }) .detach(); Ok(server) - })); + }); + return vec![PendingLanguageServer { server_id: 0, task }]; } let download_dir = self .language_server_download_dir .clone() .ok_or_else(|| anyhow!("language server download directory has not been assigned")) - .log_err()?; + .log_err(); + let download_dir = match download_dir { + Some(download_dir) => download_dir, + None => return Vec::new(), + }; - let this = self.clone(); - let adapter = language.adapter.clone()?; - let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone(); - let login_shell_env_loaded = self.login_shell_env_loaded.clone(); + let mut results = Vec::new(); - Some(cx.spawn(|cx| async move { - login_shell_env_loaded.await; + for adapter in &language.adapters { + let this = self.clone(); + let language = language.clone(); + let http_client = http_client.clone(); + let download_dir = download_dir.clone(); + let root_path = root_path.clone(); + let adapter = adapter.clone(); + let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone(); + let login_shell_env_loaded = self.login_shell_env_loaded.clone(); + let server_id = post_inc(&mut self.state.write().next_language_server_id); - let mut lock = this.lsp_binary_paths.lock(); - let entry = lock - .entry(adapter.name.clone()) - .or_insert_with(|| { - get_binary( - adapter.clone(), - language.clone(), - http_client, - download_dir, - lsp_binary_statuses, - ) - .map_err(Arc::new) - .boxed() - .shared() - }) - .clone(); - drop(lock); - let binary = entry.clone().map_err(|e| anyhow!(e)).await?; + let task = cx.spawn(|cx| async move { + login_shell_env_loaded.await; - let server = lsp::LanguageServer::new( - server_id, - &binary.path, - &binary.arguments, - &root_path, - adapter.code_action_kinds(), - cx, - )?; + let mut lock = this.lsp_binary_paths.lock(); + let entry = lock + .entry(adapter.name.clone()) + .or_insert_with(|| { + get_binary( + adapter.clone(), + language.clone(), + http_client, + download_dir, + lsp_binary_statuses, + ) + .map_err(Arc::new) + .boxed() + .shared() + }) + .clone(); + drop(lock); + let binary = entry.clone().map_err(|e| anyhow!(e)).await?; - Ok(server) - })) + let server = lsp::LanguageServer::new( + server_id, + &binary.path, + &binary.arguments, + &root_path, + adapter.code_action_kinds(), + cx, + )?; + + Ok(server) + }); + + results.push(PendingLanguageServer { server_id, task }); + } + + results } pub fn language_server_binary_statuses( @@ -974,15 +998,15 @@ impl Language { highlight_map: Default::default(), }) }), - adapter: None, + adapters: Vec::new(), #[cfg(any(test, feature = "test-support"))] fake_adapter: None, } } - pub fn lsp_adapter(&self) -> Option> { - self.adapter.clone() + pub fn lsp_adapters(&self) -> &[Arc] { + &self.adapters } pub fn id(&self) -> Option { @@ -1209,9 +1233,9 @@ impl Language { Arc::get_mut(self.grammar.as_mut().unwrap()).unwrap() } - pub async fn with_lsp_adapter(mut self, lsp_adapter: Option>) -> Self { - if let Some(adapter) = lsp_adapter { - self.adapter = Some(CachedLspAdapter::new(adapter).await); + pub async fn with_lsp_adapters(mut self, lsp_adapters: Vec>) -> Self { + for adapter in lsp_adapters { + self.adapters.push(CachedLspAdapter::new(adapter).await); } self } @@ -1224,7 +1248,7 @@ impl Language { let (servers_tx, servers_rx) = mpsc::unbounded(); self.fake_adapter = Some((servers_tx, fake_lsp_adapter.clone())); let adapter = CachedLspAdapter::new(Arc::new(fake_lsp_adapter)).await; - self.adapter = Some(adapter); + self.adapters = vec![adapter]; servers_rx } @@ -1233,28 +1257,31 @@ impl Language { } pub async fn disk_based_diagnostic_sources(&self) -> &[String] { - match self.adapter.as_ref() { + match self.adapters.first().as_ref() { Some(adapter) => &adapter.disk_based_diagnostic_sources, None => &[], } } pub async fn disk_based_diagnostics_progress_token(&self) -> Option<&str> { - if let Some(adapter) = self.adapter.as_ref() { - adapter.disk_based_diagnostics_progress_token.as_deref() - } else { - None + for adapter in &self.adapters { + let token = adapter.disk_based_diagnostics_progress_token.as_deref(); + if token.is_some() { + return token; + } } + + None } pub async fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) { - if let Some(processor) = self.adapter.as_ref() { - processor.process_diagnostics(diagnostics).await; + for adapter in &self.adapters { + adapter.process_diagnostics(diagnostics).await; } } pub async fn process_completion(self: &Arc, completion: &mut lsp::CompletionItem) { - if let Some(adapter) = self.adapter.as_ref() { + for adapter in &self.adapters { adapter.process_completion(completion).await; } } @@ -1263,7 +1290,8 @@ impl Language { self: &Arc, completion: &lsp::CompletionItem, ) -> Option { - self.adapter + self.adapters + .first() .as_ref()? .label_for_completion(completion, self) .await @@ -1274,7 +1302,8 @@ impl Language { name: &str, kind: lsp::SymbolKind, ) -> Option { - self.adapter + self.adapters + .first() .as_ref()? .label_for_symbol(name, kind, self) .await @@ -1595,7 +1624,7 @@ mod tests { ..Default::default() }, tree_sitter_json::language(), - None, + vec![], |_| Default::default(), ); languages.register( @@ -1606,7 +1635,7 @@ mod tests { ..Default::default() }, tree_sitter_rust::language(), - None, + vec![], |_| Default::default(), ); assert_eq!( diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 1f6ecd0a90..fb50f2a743 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -462,6 +462,7 @@ pub async fn deserialize_completion( pub fn serialize_code_action(action: &CodeAction) -> proto::CodeAction { proto::CodeAction { + server_id: action.server_id as u64, start: Some(serialize_anchor(&action.range.start)), end: Some(serialize_anchor(&action.range.end)), lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(), @@ -479,6 +480,7 @@ pub fn deserialize_code_action(action: proto::CodeAction) -> Result .ok_or_else(|| anyhow!("invalid end"))?; let lsp_action = serde_json::from_slice(&action.lsp_action)?; Ok(CodeAction { + server_id: action.server_id as usize, range: start..end, lsp_action, }) diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index fb69df8766..96e44d6f84 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -33,21 +33,25 @@ pub(crate) trait LspCommand: 'static + Sized { language_server: &Arc, cx: &AppContext, ) -> ::Params; + async fn response_from_lsp( self, message: ::Result, project: ModelHandle, buffer: ModelHandle, + server_id: usize, cx: AsyncAppContext, ) -> Result; fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest; + async fn from_proto( message: Self::ProtoRequest, project: ModelHandle, buffer: ModelHandle, cx: AsyncAppContext, ) -> Result; + fn response_to_proto( response: Self::Response, project: &mut Project, @@ -55,6 +59,7 @@ pub(crate) trait LspCommand: 'static + Sized { buffer_version: &clock::Global, cx: &mut AppContext, ) -> ::Response; + async fn response_from_proto( self, message: ::Response, @@ -62,6 +67,7 @@ pub(crate) trait LspCommand: 'static + Sized { buffer: ModelHandle, cx: AsyncAppContext, ) -> Result; + fn buffer_id_from_proto(message: &Self::ProtoRequest) -> u64; } @@ -137,6 +143,7 @@ impl LspCommand for PrepareRename { message: Option, _: ModelHandle, buffer: ModelHandle, + _: usize, cx: AsyncAppContext, ) -> Result>> { buffer.read_with(&cx, |buffer, _| { @@ -263,10 +270,12 @@ impl LspCommand for PerformRename { message: Option, project: ModelHandle, buffer: ModelHandle, + server_id: usize, mut cx: AsyncAppContext, ) -> Result { if let Some(edit) = message { - let (lsp_adapter, lsp_server) = language_server_for_buffer(&project, &buffer, &mut cx)?; + let (lsp_adapter, lsp_server) = + language_server_for_buffer(&project, &buffer, server_id, &mut cx)?; Project::deserialize_workspace_edit( project, edit, @@ -380,9 +389,10 @@ impl LspCommand for GetDefinition { message: Option, project: ModelHandle, buffer: ModelHandle, + server_id: usize, cx: AsyncAppContext, ) -> Result> { - location_links_from_lsp(message, project, buffer, cx).await + location_links_from_lsp(message, project, buffer, server_id, cx).await } fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDefinition { @@ -472,9 +482,10 @@ impl LspCommand for GetTypeDefinition { message: Option, project: ModelHandle, buffer: ModelHandle, + server_id: usize, cx: AsyncAppContext, ) -> Result> { - location_links_from_lsp(message, project, buffer, cx).await + location_links_from_lsp(message, project, buffer, server_id, cx).await } fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetTypeDefinition { @@ -537,12 +548,13 @@ impl LspCommand for GetTypeDefinition { fn language_server_for_buffer( project: &ModelHandle, buffer: &ModelHandle, + server_id: usize, cx: &mut AsyncAppContext, ) -> Result<(Arc, Arc)> { project .read_with(cx, |project, cx| { project - .language_server_for_buffer(buffer.read(cx), cx) + .language_server_for_buffer(buffer.read(cx), server_id, cx) .map(|(adapter, server)| (adapter.clone(), server.clone())) }) .ok_or_else(|| anyhow!("no language server found for buffer")) @@ -614,6 +626,7 @@ async fn location_links_from_lsp( message: Option, project: ModelHandle, buffer: ModelHandle, + server_id: usize, mut cx: AsyncAppContext, ) -> Result> { let message = match message { @@ -642,7 +655,8 @@ async fn location_links_from_lsp( } } - let (lsp_adapter, language_server) = language_server_for_buffer(&project, &buffer, &mut cx)?; + let (lsp_adapter, language_server) = + language_server_for_buffer(&project, &buffer, server_id, &mut cx)?; let mut definitions = Vec::new(); for (origin_range, target_uri, target_range) in unresolved_links { let target_buffer_handle = project @@ -756,11 +770,12 @@ impl LspCommand for GetReferences { locations: Option>, project: ModelHandle, buffer: ModelHandle, + server_id: usize, mut cx: AsyncAppContext, ) -> Result> { let mut references = Vec::new(); let (lsp_adapter, language_server) = - language_server_for_buffer(&project, &buffer, &mut cx)?; + language_server_for_buffer(&project, &buffer, server_id, &mut cx)?; if let Some(locations) = locations { for lsp_location in locations { @@ -917,6 +932,7 @@ impl LspCommand for GetDocumentHighlights { lsp_highlights: Option>, _: ModelHandle, buffer: ModelHandle, + _: usize, cx: AsyncAppContext, ) -> Result> { buffer.read_with(&cx, |buffer, _| { @@ -1062,6 +1078,7 @@ impl LspCommand for GetHover { message: Option, _: ModelHandle, buffer: ModelHandle, + _: usize, cx: AsyncAppContext, ) -> Result { Ok(message.and_then(|hover| { @@ -1283,6 +1300,7 @@ impl LspCommand for GetCompletions { completions: Option, _: ModelHandle, buffer: ModelHandle, + _: usize, cx: AsyncAppContext, ) -> Result> { let completions = if let Some(completions) = completions { @@ -1502,6 +1520,7 @@ impl LspCommand for GetCodeActions { actions: Option, _: ModelHandle, _: ModelHandle, + server_id: usize, _: AsyncAppContext, ) -> Result> { Ok(actions @@ -1510,6 +1529,7 @@ impl LspCommand for GetCodeActions { .filter_map(|entry| { if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry { Some(CodeAction { + server_id, range: self.range.clone(), lsp_action, }) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index d5b7ac3f3f..36cd76fe3d 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -31,8 +31,8 @@ use language::{ range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch, - PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, - Unclipped, + PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16, + Transaction, Unclipped, }; use lsp::{ DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, @@ -99,7 +99,6 @@ pub struct Project { language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>, language_server_statuses: BTreeMap, last_workspace_edits_by_language_server: HashMap, - next_language_server_id: usize, client: Arc, next_entry_id: Arc, join_project_response_message_id: u32, @@ -124,7 +123,7 @@ pub struct Project { /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it. /// Used for re-issuing buffer requests when peers temporarily disconnect incomplete_remote_buffers: HashMap>>, - buffer_snapshots: HashMap>, + buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots buffers_being_formatted: HashSet, nonce: u128, _maintain_buffer_languages: Task<()>, @@ -133,6 +132,11 @@ pub struct Project { copilot_enabled: bool, } +struct LspBufferSnapshot { + version: i32, + snapshot: TextBufferSnapshot, +} + enum BufferMessage { Operation { buffer_id: u64, @@ -469,7 +473,6 @@ impl Project { language_server_statuses: Default::default(), last_workspace_edits_by_language_server: Default::default(), buffers_being_formatted: Default::default(), - next_language_server_id: 0, nonce: StdRng::from_entropy().gen(), terminals: Terminals { local_handles: Vec::new(), @@ -554,7 +557,6 @@ impl Project { }) .collect(), last_workspace_edits_by_language_server: Default::default(), - next_language_server_id: 0, opened_buffers: Default::default(), buffers_being_formatted: Default::default(), buffer_snapshots: Default::default(), @@ -645,7 +647,7 @@ impl Project { let mut language_servers_to_stop = Vec::new(); for language in self.languages.to_vec() { - if let Some(lsp_adapter) = language.lsp_adapter() { + for lsp_adapter in language.lsp_adapters() { if !settings.enable_language_server(Some(&language.name())) { let lsp_name = &lsp_adapter.name; for (worktree_id, started_lsp_name) in self.language_server_ids.keys() { @@ -665,7 +667,7 @@ impl Project { // Start all the newly-enabled language servers. for (worktree_id, worktree_path, language) in language_servers_to_start { - self.start_language_server(worktree_id, worktree_path, language, cx); + self.start_language_servers(worktree_id, worktree_path, language, cx); } if !self.copilot_enabled && Copilot::global(cx).is_some() { @@ -1550,7 +1552,7 @@ impl Project { cx.spawn(|this, mut cx| async move { if let Some(old_path) = old_path { this.update(&mut cx, |this, cx| { - this.unregister_buffer_from_language_server(&buffer, old_path, cx); + this.unregister_buffer_from_language_servers(&buffer, old_path, cx); }); } let (worktree, path) = worktree_task.await?; @@ -1564,7 +1566,7 @@ impl Project { .await?; this.update(&mut cx, |this, cx| { this.detect_language_for_buffer(&buffer, cx); - this.register_buffer_with_language_server(&buffer, cx); + this.register_buffer_with_language_servers(&buffer, cx); }); Ok(()) }) @@ -1628,14 +1630,15 @@ impl Project { .detach(); self.detect_language_for_buffer(buffer, cx); - self.register_buffer_with_language_server(buffer, cx); + self.register_buffer_with_language_servers(buffer, cx); self.register_buffer_with_copilot(buffer, cx); cx.observe_release(buffer, |this, buffer, cx| { if let Some(file) = File::from_dyn(buffer.file()) { if file.is_local() { let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) { + for server in this.language_servers_for_buffer(buffer, cx) { server + .1 .notify::( lsp::DidCloseTextDocumentParams { text_document: lsp::TextDocumentIdentifier::new(uri), @@ -1652,46 +1655,50 @@ impl Project { Ok(()) } - fn register_buffer_with_language_server( + fn register_buffer_with_language_servers( &mut self, buffer_handle: &ModelHandle, cx: &mut ModelContext, ) { let buffer = buffer_handle.read(cx); let buffer_id = buffer.remote_id(); + if let Some(file) = File::from_dyn(buffer.file()) { - if file.is_local() { - let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - let initial_snapshot = buffer.text_snapshot(); + if !file.is_local() { + return; + } - let mut language_server = None; - let mut language_id = None; - if let Some(language) = buffer.language() { - let worktree_id = file.worktree_id(cx); - if let Some(adapter) = language.lsp_adapter() { - language_id = adapter.language_ids.get(language.name().as_ref()).cloned(); - language_server = self - .language_server_ids - .get(&(worktree_id, adapter.name.clone())) - .and_then(|id| self.language_servers.get(id)) - .and_then(|server_state| { - if let LanguageServerState::Running { server, .. } = server_state { - Some(server.clone()) - } else { - None - } - }); - } + let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); + let initial_snapshot = buffer.text_snapshot(); + + if let Some(local_worktree) = file.worktree.read(cx).as_local() { + if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) { + self.update_buffer_diagnostics(buffer_handle, diagnostics, None, cx) + .log_err(); } + } - if let Some(local_worktree) = file.worktree.read(cx).as_local() { - if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) { - self.update_buffer_diagnostics(buffer_handle, diagnostics, None, cx) - .log_err(); - } - } + if let Some(language) = buffer.language() { + let worktree_id = file.worktree_id(cx); + + for adapter in language.lsp_adapters() { + let language_id = adapter.language_ids.get(language.name().as_ref()).cloned(); + let server = self + .language_server_ids + .get(&(worktree_id, adapter.name.clone())) + .and_then(|id| self.language_servers.get(id)) + .and_then(|server_state| { + if let LanguageServerState::Running { server, .. } = server_state { + Some(server.clone()) + } else { + None + } + }); + let server = match server { + Some(server) => server, + None => continue, + }; - if let Some(server) = language_server { server .notify::( lsp::DidOpenTextDocumentParams { @@ -1704,6 +1711,7 @@ impl Project { }, ) .log_err(); + buffer_handle.update(cx, |buffer, cx| { buffer.set_completion_triggers( server @@ -1713,16 +1721,23 @@ impl Project { .and_then(|provider| provider.trigger_characters.clone()) .unwrap_or_default(), cx, - ) + ); }); + + let snapshot = LspBufferSnapshot { + version: 0, + snapshot: initial_snapshot, + }; self.buffer_snapshots - .insert(buffer_id, vec![(0, initial_snapshot)]); + .entry(buffer_id) + .or_default() + .insert(server.server_id(), vec![snapshot]); } } } } - fn unregister_buffer_from_language_server( + fn unregister_buffer_from_language_servers( &mut self, buffer: &ModelHandle, old_path: PathBuf, @@ -1731,7 +1746,7 @@ impl Project { buffer.update(cx, |buffer, cx| { buffer.update_diagnostics(Default::default(), cx); self.buffer_snapshots.remove(&buffer.remote_id()); - if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) { + for (_, language_server) in self.language_servers_for_buffer(buffer, cx) { language_server .notify::( lsp::DidCloseTextDocumentParams { @@ -1833,52 +1848,62 @@ impl Project { }) .ok(); } + BufferEvent::Edited { .. } => { - let language_server = self - .language_server_for_buffer(buffer.read(cx), cx) - .map(|(_, server)| server.clone())?; let buffer = buffer.read(cx); let file = File::from_dyn(buffer.file())?; let abs_path = file.as_local()?.abs_path(cx); let uri = lsp::Url::from_file_path(abs_path).unwrap(); - let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?; - let (version, prev_snapshot) = buffer_snapshots.last()?; let next_snapshot = buffer.text_snapshot(); - let next_version = version + 1; - let content_changes = buffer - .edits_since::<(PointUtf16, usize)>(prev_snapshot.version()) - .map(|edit| { - let edit_start = edit.new.start.0; - let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0); - let new_text = next_snapshot - .text_for_range(edit.new.start.1..edit.new.end.1) - .collect(); - lsp::TextDocumentContentChangeEvent { - range: Some(lsp::Range::new( - point_to_lsp(edit_start), - point_to_lsp(edit_end), - )), - range_length: None, - text: new_text, - } - }) - .collect(); + for (_, language_server) in self.language_servers_for_buffer(buffer, cx) { + let language_server = language_server.clone(); - buffer_snapshots.push((next_version, next_snapshot)); + let buffer_snapshots = self + .buffer_snapshots + .get_mut(&buffer.remote_id()) + .and_then(|m| m.get_mut(&language_server.server_id()))?; + let previous_snapshot = buffer_snapshots.last()?; + let next_version = previous_snapshot.version + 1; - language_server - .notify::( - lsp::DidChangeTextDocumentParams { - text_document: lsp::VersionedTextDocumentIdentifier::new( - uri, - next_version, - ), - content_changes, - }, - ) - .log_err(); + let content_changes = buffer + .edits_since::<(PointUtf16, usize)>(previous_snapshot.snapshot.version()) + .map(|edit| { + let edit_start = edit.new.start.0; + let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0); + let new_text = next_snapshot + .text_for_range(edit.new.start.1..edit.new.end.1) + .collect(); + lsp::TextDocumentContentChangeEvent { + range: Some(lsp::Range::new( + point_to_lsp(edit_start), + point_to_lsp(edit_end), + )), + range_length: None, + text: new_text, + } + }) + .collect(); + + buffer_snapshots.push(LspBufferSnapshot { + version: next_version, + snapshot: next_snapshot, + }); + + language_server + .notify::( + lsp::DidChangeTextDocumentParams { + text_document: lsp::VersionedTextDocumentIdentifier::new( + uri, + next_version, + ), + content_changes, + }, + ) + .log_err(); + } } + BufferEvent::Saved => { let file = File::from_dyn(buffer.read(cx).file())?; let worktree_id = file.worktree_id(cx); @@ -1898,13 +1923,17 @@ impl Project { .log_err(); } - let language_server_id = self.language_server_id_for_buffer(buffer.read(cx), cx)?; - if let Some(LanguageServerState::Running { - adapter, - simulate_disk_based_diagnostics_completion, - .. - }) = self.language_servers.get_mut(&language_server_id) - { + let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx); + for language_server_id in language_server_ids { + let LanguageServerState::Running { + adapter, + simulate_disk_based_diagnostics_completion, + .. + } = match self.language_servers.get_mut(&language_server_id) { + Some(state) => state, + None => continue, + }; + // After saving a buffer using a language server that doesn't provide // a disk-based progress token, kick off a timer that will reset every // time the buffer is saved. If the timer eventually fires, simulate @@ -1933,6 +1962,7 @@ impl Project { } } } + _ => {} } @@ -1987,7 +2017,7 @@ impl Project { for buffer in plain_text_buffers { project.detect_language_for_buffer(&buffer, cx); - project.register_buffer_with_language_server(&buffer, cx); + project.register_buffer_with_language_servers(&buffer, cx); } for buffer in buffers_with_unknown_injections { @@ -2071,12 +2101,12 @@ impl Project { if let Some(worktree) = file.worktree.read(cx).as_local() { let worktree_id = worktree.id(); let worktree_abs_path = worktree.abs_path().clone(); - self.start_language_server(worktree_id, worktree_abs_path, new_language, cx); + self.start_language_servers(worktree_id, worktree_abs_path, new_language, cx); } } } - fn start_language_server( + fn start_language_servers( &mut self, worktree_id: WorktreeId, worktree_path: Arc, @@ -2090,313 +2120,333 @@ impl Project { return; } - let adapter = if let Some(adapter) = language.lsp_adapter() { - adapter - } else { - return; - }; - let key = (worktree_id, adapter.name.clone()); + let adapters = language.lsp_adapters(); + let language_servers = self.languages.start_language_servers( + language.clone(), + worktree_path, + self.client.http_client(), + cx, + ); + debug_assert_eq!(adapters.len(), language_servers.len()); - let mut initialization_options = adapter.initialization_options.clone(); + for (adapter, pending_server) in adapters.into_iter().zip(language_servers.into_iter()) { + let key = (worktree_id, adapter.name.clone()); + let lsp = &cx.global::().lsp.get(&adapter.name.0); + let override_options = lsp.map(|s| s.initialization_options.clone()).flatten(); - let lsp = &cx.global::().lsp.get(&adapter.name.0); - let override_options = lsp.map(|s| s.initialization_options.clone()).flatten(); - match (&mut initialization_options, override_options) { - (Some(initialization_options), Some(override_options)) => { - merge_json_value_into(override_options, initialization_options); + let mut initialization_options = adapter.initialization_options.clone(); + match (&mut initialization_options, override_options) { + (Some(initialization_options), Some(override_options)) => { + merge_json_value_into(override_options, initialization_options); + } + (None, override_options) => initialization_options = override_options, + _ => {} } - (None, override_options) => initialization_options = override_options, - _ => {} + + self.language_server_ids + .entry(key.clone()) + .or_insert_with(|| { + self.setup_language_adapter( + worktree_path, + initialization_options, + pending_server, + adapter, + &language, + key, + cx, + ) + }); } + } - self.language_server_ids - .entry(key.clone()) - .or_insert_with(|| { - let languages = self.languages.clone(); - let server_id = post_inc(&mut self.next_language_server_id); - let language_server = self.languages.start_language_server( - server_id, - language.clone(), - worktree_path, - self.client.http_client(), - cx, - ); - self.language_servers.insert( - server_id, - LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move { - let workspace_config = - cx.update(|cx| languages.workspace_configuration(cx)).await; - let language_server = language_server?.await.log_err()?; - let language_server = language_server - .initialize(initialization_options) - .await - .log_err()?; - let this = this.upgrade(&cx)?; + fn setup_language_adapter( + &mut self, + worktree_path: Arc, + initialization_options: Option, + pending_server: PendingLanguageServer, + adapter: &Arc, + language: &Arc, + key: (WorktreeId, LanguageServerName), + cx: &mut ModelContext, + ) -> usize { + let server_id = pending_server.server_id; + let languages = self.languages.clone(); - language_server - .on_notification::({ - let this = this.downgrade(); - let adapter = adapter.clone(); - move |mut params, cx| { - let this = this; - let adapter = adapter.clone(); - cx.spawn(|mut cx| async move { - adapter.process_diagnostics(&mut params).await; - if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, cx| { - this.update_diagnostics( - server_id, - params, - &adapter.disk_based_diagnostic_sources, - cx, - ) - .log_err(); - }); + self.language_servers.insert( + server_id, + LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move { + let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await; + let language_server = pending_server.task.await.log_err()?; + let language_server = language_server + .initialize(initialization_options) + .await + .log_err()?; + let this = this.upgrade(&cx)?; + + language_server + .on_notification::({ + let this = this.downgrade(); + let adapter = adapter.clone(); + move |mut params, cx| { + let this = this; + let adapter = adapter.clone(); + cx.spawn(|mut cx| async move { + adapter.process_diagnostics(&mut params).await; + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, cx| { + this.update_diagnostics( + server_id, + params, + &adapter.disk_based_diagnostic_sources, + cx, + ) + .log_err(); + }); + } + }) + .detach(); + } + }) + .detach(); + + language_server + .on_request::({ + let languages = languages.clone(); + move |params, mut cx| { + let languages = languages.clone(); + async move { + let workspace_config = + cx.update(|cx| languages.workspace_configuration(cx)).await; + Ok(params + .items + .into_iter() + .map(|item| { + if let Some(section) = &item.section { + workspace_config + .get(section) + .cloned() + .unwrap_or(serde_json::Value::Null) + } else { + workspace_config.clone() } }) - .detach(); - } - }) - .detach(); + .collect()) + } + } + }) + .detach(); - language_server - .on_request::({ - let languages = languages.clone(); - move |params, mut cx| { - let languages = languages.clone(); - async move { - let workspace_config = cx - .update(|cx| languages.workspace_configuration(cx)) - .await; - Ok(params - .items - .into_iter() - .map(|item| { - if let Some(section) = &item.section { - workspace_config - .get(section) - .cloned() - .unwrap_or(serde_json::Value::Null) - } else { - workspace_config.clone() - } - }) - .collect()) - } - } - }) - .detach(); - - // Even though we don't have handling for these requests, respond to them to - // avoid stalling any language server like `gopls` which waits for a response - // to these requests when initializing. - language_server - .on_request::({ - let this = this.downgrade(); - move |params, mut cx| async move { - if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, _| { - if let Some(status) = - this.language_server_statuses.get_mut(&server_id) - { - if let lsp::NumberOrString::String(token) = - params.token - { - status.progress_tokens.insert(token); - } - } - }); - } - Ok(()) - } - }) - .detach(); - language_server - .on_request::({ - let this = this.downgrade(); - move |params, mut cx| async move { - let this = this - .upgrade(&cx) - .ok_or_else(|| anyhow!("project dropped"))?; - for reg in params.registrations { - if reg.method == "workspace/didChangeWatchedFiles" { - if let Some(options) = reg.register_options { - let options = serde_json::from_value(options)?; - this.update(&mut cx, |this, cx| { - this.on_lsp_did_change_watched_files( - server_id, options, cx, - ); - }); - } + // Even though we don't have handling for these requests, respond to them to + // avoid stalling any language server like `gopls` which waits for a response + // to these requests when initializing. + language_server + .on_request::({ + let this = this.downgrade(); + move |params, mut cx| async move { + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, _| { + if let Some(status) = + this.language_server_statuses.get_mut(&server_id) + { + if let lsp::NumberOrString::String(token) = params.token { + status.progress_tokens.insert(token); } } - Ok(()) - } - }) - .detach(); - - language_server - .on_request::({ - let this = this.downgrade(); - let adapter = adapter.clone(); - let language_server = language_server.clone(); - move |params, cx| { - Self::on_lsp_workspace_edit( - this, - params, - server_id, - adapter.clone(), - language_server.clone(), - cx, - ) - } - }) - .detach(); - - let disk_based_diagnostics_progress_token = - adapter.disk_based_diagnostics_progress_token.clone(); - - language_server - .on_notification::({ - let this = this.downgrade(); - move |params, mut cx| { - if let Some(this) = this.upgrade(&cx) { + }); + } + Ok(()) + } + }) + .detach(); + language_server + .on_request::({ + let this = this.downgrade(); + move |params, mut cx| async move { + let this = this + .upgrade(&cx) + .ok_or_else(|| anyhow!("project dropped"))?; + for reg in params.registrations { + if reg.method == "workspace/didChangeWatchedFiles" { + if let Some(options) = reg.register_options { + let options = serde_json::from_value(options)?; this.update(&mut cx, |this, cx| { - this.on_lsp_progress( - params, - server_id, - disk_based_diagnostics_progress_token.clone(), - cx, + this.on_lsp_did_change_watched_files( + server_id, options, cx, ); }); } } - }) - .detach(); + } + Ok(()) + } + }) + .detach(); - language_server - .notify::( - lsp::DidChangeConfigurationParams { - settings: workspace_config, - }, + language_server + .on_request::({ + let this = this.downgrade(); + let adapter = adapter.clone(); + let language_server = language_server.clone(); + move |params, cx| { + Self::on_lsp_workspace_edit( + this, + params, + server_id, + adapter.clone(), + language_server.clone(), + cx, ) - .ok(); + } + }) + .detach(); - this.update(&mut cx, |this, cx| { - // If the language server for this key doesn't match the server id, don't store the - // server. Which will cause it to be dropped, killing the process - if this - .language_server_ids - .get(&key) - .map(|id| id != &server_id) - .unwrap_or(false) - { - return None; + let disk_based_diagnostics_progress_token = + adapter.disk_based_diagnostics_progress_token.clone(); + + language_server + .on_notification::({ + let this = this.downgrade(); + move |params, mut cx| { + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, cx| { + this.on_lsp_progress( + params, + server_id, + disk_based_diagnostics_progress_token.clone(), + cx, + ); + }); } + } + }) + .detach(); - // Update language_servers collection with Running variant of LanguageServerState - // indicating that the server is up and running and ready - this.language_servers.insert( - server_id, - LanguageServerState::Running { - adapter: adapter.clone(), - language, - watched_paths: Default::default(), - server: language_server.clone(), - simulate_disk_based_diagnostics_completion: None, - }, - ); - this.language_server_statuses.insert( - server_id, - LanguageServerStatus { + language_server + .notify::( + lsp::DidChangeConfigurationParams { + settings: workspace_config, + }, + ) + .ok(); + + this.update(&mut cx, |this, cx| { + // If the language server for this key doesn't match the server id, don't store the + // server. Which will cause it to be dropped, killing the process + if this + .language_server_ids + .get(&key) + .map(|id| id != &server_id) + .unwrap_or(false) + { + return None; + } + + // Update language_servers collection with Running variant of LanguageServerState + // indicating that the server is up and running and ready + this.language_servers.insert( + server_id, + LanguageServerState::Running { + adapter: adapter.clone(), + language: language.clone(), + watched_paths: Default::default(), + server: language_server.clone(), + simulate_disk_based_diagnostics_completion: None, + }, + ); + this.language_server_statuses.insert( + server_id, + LanguageServerStatus { + name: language_server.name().to_string(), + pending_work: Default::default(), + has_pending_diagnostic_updates: false, + progress_tokens: Default::default(), + }, + ); + + if let Some(project_id) = this.remote_id() { + this.client + .send(proto::StartLanguageServer { + project_id, + server: Some(proto::LanguageServer { + id: server_id as u64, name: language_server.name().to_string(), - pending_work: Default::default(), - has_pending_diagnostic_updates: false, - progress_tokens: Default::default(), - }, - ); + }), + }) + .log_err(); + } - if let Some(project_id) = this.remote_id() { - this.client - .send(proto::StartLanguageServer { - project_id, - server: Some(proto::LanguageServer { - id: server_id as u64, - name: language_server.name().to_string(), - }), - }) - .log_err(); + // Tell the language server about every open buffer in the worktree that matches the language. + for buffer in this.opened_buffers.values() { + if let Some(buffer_handle) = buffer.upgrade(cx) { + let buffer = buffer_handle.read(cx); + let file = match File::from_dyn(buffer.file()) { + Some(file) => file, + None => continue, + }; + let language = match buffer.language() { + Some(language) => language, + None => continue, + }; + + if file.worktree.read(cx).id() != key.0 + || !language.lsp_adapters().iter().any(|a| a.name == key.1) + { + continue; } - // Tell the language server about every open buffer in the worktree that matches the language. - for buffer in this.opened_buffers.values() { - if let Some(buffer_handle) = buffer.upgrade(cx) { - let buffer = buffer_handle.read(cx); - let file = if let Some(file) = File::from_dyn(buffer.file()) { - file - } else { - continue; - }; - let language = if let Some(language) = buffer.language() { - language - } else { - continue; - }; - if file.worktree.read(cx).id() != key.0 - || language.lsp_adapter().map(|a| a.name.clone()) - != Some(key.1.clone()) - { - continue; - } + let file = file.as_local()?; + let versions = this + .buffer_snapshots + .entry(buffer.remote_id()) + .or_default() + .entry(server_id) + .or_insert_with(|| { + vec![LspBufferSnapshot { + version: 0, + snapshot: buffer.text_snapshot(), + }] + }); - let file = file.as_local()?; - let versions = this - .buffer_snapshots - .entry(buffer.remote_id()) - .or_insert_with(|| vec![(0, buffer.text_snapshot())]); - - let (version, initial_snapshot) = versions.last().unwrap(); - let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - language_server - .notify::( - lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - uri, - adapter - .language_ids - .get(language.name().as_ref()) - .cloned() - .unwrap_or_default(), - *version, - initial_snapshot.text(), - ), - }, - ) - .log_err()?; - buffer_handle.update(cx, |buffer, cx| { - buffer.set_completion_triggers( - language_server - .capabilities() - .completion_provider - .as_ref() - .and_then(|provider| { - provider.trigger_characters.clone() - }) + let snapshot = versions.last().unwrap(); + let version = snapshot.version; + let initial_snapshot = snapshot.snapshot; + let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); + language_server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + uri, + adapter + .language_ids + .get(language.name().as_ref()) + .cloned() .unwrap_or_default(), - cx, - ) - }); - } - } + version, + initial_snapshot.text(), + ), + }, + ) + .log_err()?; + buffer_handle.update(cx, |buffer, cx| { + buffer.set_completion_triggers( + language_server + .capabilities() + .completion_provider + .as_ref() + .and_then(|provider| provider.trigger_characters.clone()) + .unwrap_or_default(), + cx, + ) + }); + } + } - cx.notify(); - Some(language_server) - }) - })), - ); - - server_id - }); + cx.notify(); + Some(language_server) + }) + })), + ); + server_id } // Returns a list of all of the worktrees which no longer have a language server and the root path @@ -2476,52 +2526,64 @@ impl Project { }) .collect(); for (worktree_id, worktree_abs_path, language) in language_server_lookup_info { - self.restart_language_server(worktree_id, worktree_abs_path, language, cx); + self.restart_language_servers(worktree_id, worktree_abs_path, language, cx); } None } - fn restart_language_server( + fn restart_language_servers( &mut self, worktree_id: WorktreeId, fallback_path: Arc, language: Arc, cx: &mut ModelContext, ) { - let adapter = if let Some(adapter) = language.lsp_adapter() { - adapter - } else { + let mut stops = Vec::new(); + for adapter in language.lsp_adapters() { + stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx)); + } + + if stops.is_empty() { return; - }; + } + let mut stops = stops.into_iter(); - let server_name = adapter.name.clone(); - let stop = self.stop_language_server(worktree_id, server_name.clone(), cx); cx.spawn_weak(|this, mut cx| async move { - let (original_root_path, orphaned_worktrees) = stop.await; - if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, cx| { - // Attempt to restart using original server path. Fallback to passed in - // path if we could not retrieve the root path - let root_path = original_root_path - .map(|path_buf| Arc::from(path_buf.as_path())) - .unwrap_or(fallback_path); + let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await; + for stop in stops { + let (_, worktrees) = stop.await; + orphaned_worktrees.extend_from_slice(&worktrees); + } - this.start_language_server(worktree_id, root_path, language, cx); + let this = match this.upgrade(&cx) { + Some(this) => this, + None => return, + }; - // Lookup new server id and set it for each of the orphaned worktrees + this.update(&mut cx, |this, cx| { + // Attempt to restart using original server path. Fallback to passed in + // path if we could not retrieve the root path + let root_path = original_root_path + .map(|path_buf| Arc::from(path_buf.as_path())) + .unwrap_or(fallback_path); + + this.start_language_servers(worktree_id, root_path, language, cx); + + // Lookup new server ids and set them for each of the orphaned worktrees + for adapter in language.lsp_adapters() { if let Some(new_server_id) = this .language_server_ids - .get(&(worktree_id, server_name.clone())) + .get(&(worktree_id, adapter.name.clone())) .cloned() { for orphaned_worktree in orphaned_worktrees { this.language_server_ids - .insert((orphaned_worktree, server_name.clone()), new_server_id); + .insert((orphaned_worktree, adapter.name.clone()), new_server_id); } } - }); - } + } + }); }) .detach(); } @@ -3074,7 +3136,7 @@ impl Project { let file = File::from_dyn(buffer.file())?; let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx)); let server = self - .language_server_for_buffer(buffer, cx) + .primary_language_servers_for_buffer(buffer, cx) .map(|s| s.1.clone()); Some((buffer_handle, buffer_abs_path, server)) }) @@ -3323,7 +3385,7 @@ impl Project { if let Some(lsp_edits) = lsp_edits { this.update(cx, |this, cx| { - this.edits_from_lsp(buffer, lsp_edits, None, cx) + this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx) }) .await } else { @@ -3654,7 +3716,7 @@ impl Project { let buffer_id = buffer.remote_id(); if self.is_local() { - let lang_server = match self.language_server_for_buffer(buffer, cx) { + let lang_server = match self.primary_language_servers_for_buffer(buffer, cx) { Some((_, server)) => server.clone(), _ => return Task::ready(Ok(Default::default())), }; @@ -3667,7 +3729,13 @@ impl Project { if let Some(edits) = resolved_completion.additional_text_edits { let edits = this .update(&mut cx, |this, cx| { - this.edits_from_lsp(&buffer_handle, edits, None, cx) + this.edits_from_lsp( + &buffer_handle, + edits, + lang_server.server_id(), + None, + cx, + ) }) .await?; @@ -3757,12 +3825,13 @@ impl Project { ) -> Task> { if self.is_local() { let buffer = buffer_handle.read(cx); - let (lsp_adapter, lang_server) = - if let Some((adapter, server)) = self.language_server_for_buffer(buffer, cx) { - (adapter.clone(), server.clone()) - } else { - return Task::ready(Ok(Default::default())); - }; + let (lsp_adapter, lang_server) = if let Some((adapter, server)) = + self.language_server_for_buffer(buffer, action.server_id, cx) + { + (adapter.clone(), server.clone()) + } else { + return Task::ready(Ok(Default::default())); + }; let range = action.range.to_point_utf16(buffer); cx.spawn(|this, mut cx| async move { @@ -3896,6 +3965,7 @@ impl Project { .await?; } } + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => { let source_abs_path = op .old_uri @@ -3912,6 +3982,7 @@ impl Project { ) .await?; } + lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => { let abs_path = op .uri @@ -3924,6 +3995,7 @@ impl Project { fs.remove_file(&abs_path, options).await?; } } + lsp::DocumentChangeOperation::Edit(op) => { let buffer_to_edit = this .update(cx, |this, cx| { @@ -3945,6 +4017,7 @@ impl Project { this.edits_from_lsp( &buffer_to_edit, edits, + language_server.server_id(), op.text_document.version, cx, ) @@ -4214,6 +4287,7 @@ impl Project { } } + // TODO: Wire this up to allow selecting a server? fn request_lsp( &self, buffer_handle: ModelHandle, @@ -4227,7 +4301,7 @@ impl Project { if self.is_local() { let file = File::from_dyn(buffer.file()).and_then(File::as_local); if let Some((file, language_server)) = file.zip( - self.language_server_for_buffer(buffer, cx) + self.primary_language_servers_for_buffer(buffer, cx) .map(|(_, server)| server.clone()), ) { let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx); @@ -4241,7 +4315,13 @@ impl Project { .await .context("lsp request failed")?; request - .response_from_lsp(response, this, buffer_handle, cx) + .response_from_lsp( + response, + this, + buffer_handle, + language_server.server_id(), + cx, + ) .await }); } @@ -4491,9 +4571,9 @@ impl Project { } for (buffer, old_path) in renamed_buffers { - self.unregister_buffer_from_language_server(&buffer, old_path, cx); + self.unregister_buffer_from_language_servers(&buffer, old_path, cx); self.detect_language_for_buffer(&buffer, cx); - self.register_buffer_with_language_server(&buffer, cx); + self.register_buffer_with_language_servers(&buffer, cx); } } @@ -6048,10 +6128,11 @@ impl Project { &mut self, buffer: &ModelHandle, lsp_edits: impl 'static + Send + IntoIterator, + server_id: usize, version: Option, cx: &mut ModelContext, ) -> Task, String)>>> { - let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx); + let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx); cx.background().spawn(async move { let snapshot = snapshot?; let mut lsp_edits = lsp_edits @@ -6150,6 +6231,7 @@ impl Project { fn buffer_snapshot_for_lsp_version( &mut self, buffer: &ModelHandle, + server_id: usize, version: Option, cx: &AppContext, ) -> Result { @@ -6160,51 +6242,85 @@ impl Project { let snapshots = self .buffer_snapshots .get_mut(&buffer_id) - .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?; - let found_snapshot = snapshots - .binary_search_by_key(&version, |e| e.0) - .map(|ix| snapshots[ix].1.clone()) - .map_err(|_| { - anyhow!( - "snapshot not found for buffer {} at version {}", - buffer_id, - version - ) + .and_then(|m| m.get_mut(&server_id)) + .ok_or_else(|| { + anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}") })?; - snapshots.retain(|(snapshot_version, _)| { - snapshot_version + OLD_VERSIONS_TO_RETAIN >= version - }); + + let found_snapshot = snapshots + .binary_search_by_key(&version, |e| e.version) + .map(|ix| snapshots[ix].snapshot.clone()) + .map_err(|_| { + anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}") + })?; + + snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version); Ok(found_snapshot) } else { Ok((buffer.read(cx)).text_snapshot()) } } - fn language_server_for_buffer( + fn running_language_servers_for_buffer( + &self, + buffer: &Buffer, + cx: &AppContext, + ) -> impl Iterator, &Arc)> { + self.language_server_ids_for_buffer(buffer, cx) + .into_iter() + .filter_map(|server_id| { + let server = self.language_servers.get(&server_id)?; + if let LanguageServerState::Running { + adapter, server, .. + } = server + { + Some((adapter, server)) + } else { + None + } + }) + } + + fn language_servers_for_buffer( + &self, + buffer: &Buffer, + cx: &AppContext, + ) -> Vec<(&Arc, &Arc)> { + self.running_language_servers_for_buffer(buffer, cx) + .collect() + } + + fn primary_language_servers_for_buffer( &self, buffer: &Buffer, cx: &AppContext, ) -> Option<(&Arc, &Arc)> { - let server_id = self.language_server_id_for_buffer(buffer, cx)?; - let server = self.language_servers.get(&server_id)?; - if let LanguageServerState::Running { - adapter, server, .. - } = server - { - Some((adapter, server)) - } else { - None - } + self.running_language_servers_for_buffer(buffer, cx).next() } - fn language_server_id_for_buffer(&self, buffer: &Buffer, cx: &AppContext) -> Option { + fn language_server_for_buffer( + &self, + buffer: &Buffer, + server_id: usize, + cx: &AppContext, + ) -> Option<(&Arc, &Arc)> { + self.running_language_servers_for_buffer(buffer, cx) + .find(|(_, s)| s.server_id() == server_id) + } + + fn language_server_ids_for_buffer(&self, buffer: &Buffer, cx: &AppContext) -> Vec { if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) { - let name = language.lsp_adapter()?.name.clone(); let worktree_id = file.worktree_id(cx); - let key = (worktree_id, name); - self.language_server_ids.get(&key).copied() + language + .lsp_adapters() + .iter() + .flat_map(|adapter| { + let key = (worktree_id, adapter.name.clone()); + self.language_server_ids.get(&key).copied() + }) + .collect() } else { - None + Vec::new() } } } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index b4bcba24db..09c3326739 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1573,6 +1573,7 @@ async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) { new_text: "".into(), }, ], + 0, Some(lsp_document_version), cx, ) @@ -1667,6 +1668,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestApp new_text: "".into(), }, ], + 0, None, cx, ) @@ -1770,6 +1772,7 @@ async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) { .unindent(), }, ], + 0, None, cx, ) @@ -2258,7 +2261,7 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) { ..Default::default() }, tree_sitter_rust::language(), - None, + vec![], |_| Default::default(), ); diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index ff7a882f1a..72b66f3d78 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -684,9 +684,10 @@ message SearchProjectResponse { } message CodeAction { - Anchor start = 1; - Anchor end = 2; - bytes lsp_action = 3; + uint64 server_id = 1; + Anchor start = 2; + Anchor end = 3; + bytes lsp_action = 4; } message ProjectTransaction { diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index 12e6c1b1f2..db5d5913cb 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -37,121 +37,107 @@ pub fn init( themes: Arc, node_runtime: Arc, ) { - for (name, grammar, lsp_adapter) in [ + fn adapter_arc(adapter: impl LspAdapter) -> Arc { + Arc::new(adapter) + } + + let languages_list = [ ( "c", tree_sitter_c::language(), - Some(Arc::new(c::CLspAdapter) as Arc), + vec![adapter_arc(c::CLspAdapter)], ), ( "cpp", tree_sitter_cpp::language(), - Some(Arc::new(c::CLspAdapter)), - ), - ( - "css", - tree_sitter_css::language(), - None, // + vec![adapter_arc(c::CLspAdapter)], ), + ("css", tree_sitter_css::language(), vec![]), ( "elixir", tree_sitter_elixir::language(), - Some(Arc::new(elixir::ElixirLspAdapter)), + vec![adapter_arc(elixir::ElixirLspAdapter)], ), ( "go", tree_sitter_go::language(), - Some(Arc::new(go::GoLspAdapter)), + vec![adapter_arc(go::GoLspAdapter)], ), ( "json", tree_sitter_json::language(), - Some(Arc::new(json::JsonLspAdapter::new( + vec![adapter_arc(json::JsonLspAdapter::new( node_runtime.clone(), languages.clone(), themes.clone(), - ))), - ), - ( - "markdown", - tree_sitter_markdown::language(), - None, // + ))], ), + ("markdown", tree_sitter_markdown::language(), vec![]), ( "python", tree_sitter_python::language(), - Some(Arc::new(python::PythonLspAdapter::new( + vec![adapter_arc(python::PythonLspAdapter::new( node_runtime.clone(), - ))), + ))], ), ( "rust", tree_sitter_rust::language(), - Some(Arc::new(rust::RustLspAdapter)), - ), - ( - "toml", - tree_sitter_toml::language(), - None, // + vec![adapter_arc(rust::RustLspAdapter)], ), + ("toml", tree_sitter_toml::language(), vec![]), ( "tsx", tree_sitter_typescript::language_tsx(), - Some(Arc::new(typescript::TypeScriptLspAdapter::new( + vec![adapter_arc(typescript::TypeScriptLspAdapter::new( node_runtime.clone(), - ))), + ))], ), ( "typescript", tree_sitter_typescript::language_typescript(), - Some(Arc::new(typescript::TypeScriptLspAdapter::new( + vec![adapter_arc(typescript::TypeScriptLspAdapter::new( node_runtime.clone(), - ))), + ))], ), ( "javascript", tree_sitter_typescript::language_tsx(), - Some(Arc::new(typescript::TypeScriptLspAdapter::new( + vec![adapter_arc(typescript::TypeScriptLspAdapter::new( node_runtime.clone(), - ))), + ))], ), ( "html", tree_sitter_html::language(), - Some(Arc::new(html::HtmlLspAdapter::new(node_runtime.clone()))), + vec![adapter_arc(html::HtmlLspAdapter::new(node_runtime.clone()))], ), ( "ruby", tree_sitter_ruby::language(), - Some(Arc::new(ruby::RubyLanguageServer)), + vec![adapter_arc(ruby::RubyLanguageServer)], ), ( "erb", tree_sitter_embedded_template::language(), - Some(Arc::new(ruby::RubyLanguageServer)), - ), - ( - "scheme", - tree_sitter_scheme::language(), - None, // - ), - ( - "racket", - tree_sitter_racket::language(), - None, // + vec![adapter_arc(ruby::RubyLanguageServer)], ), + ("scheme", tree_sitter_scheme::language(), vec![]), + ("racket", tree_sitter_racket::language(), vec![]), ( "lua", tree_sitter_lua::language(), - Some(Arc::new(lua::LuaLspAdapter)), + vec![adapter_arc(lua::LuaLspAdapter)], ), ( "yaml", tree_sitter_yaml::language(), - Some(Arc::new(yaml::YamlLspAdapter::new(node_runtime.clone()))), + vec![adapter_arc(yaml::YamlLspAdapter::new(node_runtime.clone()))], ), - ] { - languages.register(name, load_config(name), grammar, lsp_adapter, load_queries); + ]; + + for (name, grammar, lsp_adapters) in languages_list { + languages.register(name, load_config(name), grammar, lsp_adapters, load_queries); } } @@ -163,7 +149,7 @@ pub async fn language( ) -> Arc { Arc::new( Language::new(load_config(name), Some(grammar)) - .with_lsp_adapter(lsp_adapter) + .with_lsp_adapters(lsp_adapter) .await .with_queries(load_queries(name)) .unwrap(), diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index d6097d9b06..3121bfe81f 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -37,7 +37,7 @@ impl TypeScriptLspAdapter { } } -struct Versions { +struct TypeScriptVersions { typescript_version: String, server_version: String, } @@ -52,7 +52,8 @@ impl LspAdapter for TypeScriptLspAdapter { &self, _: Arc, ) -> Result> { - Ok(Box::new(Versions { + dbg!(); + Ok(Box::new(TypeScriptVersions { typescript_version: self.node.npm_package_latest_version("typescript").await?, server_version: self .node @@ -67,7 +68,8 @@ impl LspAdapter for TypeScriptLspAdapter { _: Arc, container_dir: PathBuf, ) -> Result { - let versions = versions.downcast::().unwrap(); + dbg!(); + let versions = versions.downcast::().unwrap(); let server_path = container_dir.join(Self::NEW_SERVER_PATH); if fs::metadata(&server_path).await.is_err() { @@ -92,18 +94,10 @@ impl LspAdapter for TypeScriptLspAdapter { } async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + dbg!(); (|| async move { - let mut last_version_dir = None; - let mut entries = fs::read_dir(&container_dir).await?; - while let Some(entry) = entries.next().await { - let entry = entry?; - if entry.file_type().await?.is_dir() { - last_version_dir = Some(entry.path()); - } - } - let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?; - let old_server_path = last_version_dir.join(Self::OLD_SERVER_PATH); - let new_server_path = last_version_dir.join(Self::NEW_SERVER_PATH); + let old_server_path = container_dir.join(Self::OLD_SERVER_PATH); + let new_server_path = container_dir.join(Self::NEW_SERVER_PATH); if new_server_path.exists() { Ok(LanguageServerBinary { path: self.node.binary_path().await?, @@ -117,7 +111,7 @@ impl LspAdapter for TypeScriptLspAdapter { } else { Err(anyhow!( "missing executable in directory {:?}", - last_version_dir + container_dir )) } })() @@ -170,6 +164,86 @@ impl LspAdapter for TypeScriptLspAdapter { } } +pub struct EsLintLspAdapter { + node: Arc, +} + +impl EsLintLspAdapter { + const SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; + + pub fn new(node: Arc) -> Self { + EsLintLspAdapter { node } + } +} + +#[async_trait] +impl LspAdapter for EsLintLspAdapter { + async fn name(&self) -> LanguageServerName { + LanguageServerName("eslint".into()) + } + + async fn fetch_latest_server_version( + &self, + _: Arc, + ) -> Result> { + Ok(Box::new( + self.node.npm_package_latest_version("eslint").await?, + )) + } + + async fn fetch_server_binary( + &self, + versions: Box, + _: Arc, + container_dir: PathBuf, + ) -> Result { + let version = versions.downcast::().unwrap(); + let server_path = container_dir.join(Self::SERVER_PATH); + + if fs::metadata(&server_path).await.is_err() { + self.node + .npm_install_packages([("eslint", version.as_str())], &container_dir) + .await?; + } + + Ok(LanguageServerBinary { + path: self.node.binary_path().await?, + arguments: server_binary_arguments(&server_path), + }) + } + + async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + (|| async move { + let server_path = container_dir.join(Self::SERVER_PATH); + if server_path.exists() { + Ok(LanguageServerBinary { + path: self.node.binary_path().await?, + arguments: server_binary_arguments(&server_path), + }) + } else { + Err(anyhow!( + "missing executable in directory {:?}", + container_dir + )) + } + })() + .await + .log_err() + } + + async fn label_for_completion( + &self, + item: &lsp::CompletionItem, + language: &Arc, + ) -> Option { + None + } + + async fn initialization_options(&self) -> Option { + None + } +} + #[cfg(test)] mod tests { use gpui::TestAppContext; From 6e68ff5a50088e56bc05277c516dc413a29419f3 Mon Sep 17 00:00:00 2001 From: Julia Date: Mon, 17 Apr 2023 16:55:10 -0400 Subject: [PATCH 08/26] =?UTF-8?q?Get=20it=20to=20build=20with=20multiple?= =?UTF-8?q?=20adapters=20per=20language!=20=F0=9F=8E=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Max Brunsfeld --- crates/project/src/project.rs | 128 ++++++++++++------------- crates/project/src/project_tests.rs | 1 + crates/project/src/worktree.rs | 19 ++-- crates/zed/src/languages.rs | 23 +++-- crates/zed/src/languages/typescript.rs | 18 ++-- 5 files changed, 100 insertions(+), 89 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 36cd76fe3d..5b571c0c0c 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1641,7 +1641,7 @@ impl Project { .1 .notify::( lsp::DidCloseTextDocumentParams { - text_document: lsp::TextDocumentIdentifier::new(uri), + text_document: lsp::TextDocumentIdentifier::new(uri.clone()), }, ) .log_err(); @@ -1670,17 +1670,17 @@ impl Project { let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); let initial_snapshot = buffer.text_snapshot(); + let language = buffer.language().cloned(); + let worktree_id = file.worktree_id(cx); if let Some(local_worktree) = file.worktree.read(cx).as_local() { - if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) { - self.update_buffer_diagnostics(buffer_handle, diagnostics, None, cx) + for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) { + self.update_buffer_diagnostics(buffer_handle, diagnostics, server_id, None, cx) .log_err(); } } - if let Some(language) = buffer.language() { - let worktree_id = file.worktree_id(cx); - + if let Some(language) = language { for adapter in language.lsp_adapters() { let language_id = adapter.language_ids.get(language.name().as_ref()).cloned(); let server = self @@ -1703,7 +1703,7 @@ impl Project { .notify::( lsp::DidOpenTextDocumentParams { text_document: lsp::TextDocumentItem::new( - uri, + uri.clone(), language_id.unwrap_or_default(), 0, initial_snapshot.text(), @@ -1726,7 +1726,7 @@ impl Project { let snapshot = LspBufferSnapshot { version: 0, - snapshot: initial_snapshot, + snapshot: initial_snapshot.clone(), }; self.buffer_snapshots .entry(buffer_id) @@ -1746,13 +1746,12 @@ impl Project { buffer.update(cx, |buffer, cx| { buffer.update_diagnostics(Default::default(), cx); self.buffer_snapshots.remove(&buffer.remote_id()); + let file_url = lsp::Url::from_file_path(old_path).unwrap(); for (_, language_server) in self.language_servers_for_buffer(buffer, cx) { language_server .notify::( lsp::DidCloseTextDocumentParams { - text_document: lsp::TextDocumentIdentifier::new( - lsp::Url::from_file_path(old_path).unwrap(), - ), + text_document: lsp::TextDocumentIdentifier::new(file_url.clone()), }, ) .log_err(); @@ -1856,7 +1855,12 @@ impl Project { let uri = lsp::Url::from_file_path(abs_path).unwrap(); let next_snapshot = buffer.text_snapshot(); - for (_, language_server) in self.language_servers_for_buffer(buffer, cx) { + let language_servers: Vec<_> = self + .language_servers_iter_for_buffer(buffer, cx) + .map(|i| i.1.clone()) + .collect(); + + for language_server in language_servers { let language_server = language_server.clone(); let buffer_snapshots = self @@ -1887,14 +1891,14 @@ impl Project { buffer_snapshots.push(LspBufferSnapshot { version: next_version, - snapshot: next_snapshot, + snapshot: next_snapshot.clone(), }); language_server .notify::( lsp::DidChangeTextDocumentParams { text_document: lsp::VersionedTextDocumentIdentifier::new( - uri, + uri.clone(), next_version, ), content_changes, @@ -1925,26 +1929,24 @@ impl Project { let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx); for language_server_id in language_server_ids { - let LanguageServerState::Running { + if let Some(LanguageServerState::Running { adapter, simulate_disk_based_diagnostics_completion, .. - } = match self.language_servers.get_mut(&language_server_id) { - Some(state) => state, - None => continue, - }; + }) = self.language_servers.get_mut(&language_server_id) + { + // After saving a buffer using a language server that doesn't provide + // a disk-based progress token, kick off a timer that will reset every + // time the buffer is saved. If the timer eventually fires, simulate + // disk-based diagnostics being finished so that other pieces of UI + // (e.g., project diagnostics view, diagnostic status bar) can update. + // We don't emit an event right away because the language server might take + // some time to publish diagnostics. + if adapter.disk_based_diagnostics_progress_token.is_none() { + const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration = + Duration::from_secs(1); - // After saving a buffer using a language server that doesn't provide - // a disk-based progress token, kick off a timer that will reset every - // time the buffer is saved. If the timer eventually fires, simulate - // disk-based diagnostics being finished so that other pieces of UI - // (e.g., project diagnostics view, diagnostic status bar) can update. - // We don't emit an event right away because the language server might take - // some time to publish diagnostics. - if adapter.disk_based_diagnostics_progress_token.is_none() { - const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration = Duration::from_secs(1); - - let task = cx.spawn_weak(|this, mut cx| async move { + let task = cx.spawn_weak(|this, mut cx| async move { cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await; if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx | { @@ -1958,7 +1960,8 @@ impl Project { }); } }); - *simulate_disk_based_diagnostics_completion = Some(task); + *simulate_disk_based_diagnostics_completion = Some(task); + } } } } @@ -2123,7 +2126,7 @@ impl Project { let adapters = language.lsp_adapters(); let language_servers = self.languages.start_language_servers( language.clone(), - worktree_path, + worktree_path.clone(), self.client.http_client(), cx, ); @@ -2143,19 +2146,18 @@ impl Project { _ => {} } - self.language_server_ids - .entry(key.clone()) - .or_insert_with(|| { - self.setup_language_adapter( - worktree_path, - initialization_options, - pending_server, - adapter, - &language, - key, - cx, - ) - }); + if !self.language_server_ids.contains_key(&key) { + let adapter = self.setup_language_adapter( + worktree_path.clone(), + initialization_options, + pending_server, + adapter.clone(), + language.clone(), + key.clone(), + cx, + ); + self.language_server_ids.insert(key.clone(), adapter); + } } } @@ -2164,8 +2166,8 @@ impl Project { worktree_path: Arc, initialization_options: Option, pending_server: PendingLanguageServer, - adapter: &Arc, - language: &Arc, + adapter: Arc, + language: Arc, key: (WorktreeId, LanguageServerName), cx: &mut ModelContext, ) -> usize { @@ -2409,7 +2411,7 @@ impl Project { let snapshot = versions.last().unwrap(); let version = snapshot.version; - let initial_snapshot = snapshot.snapshot; + let initial_snapshot = &snapshot.snapshot; let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); language_server .notify::( @@ -2532,6 +2534,7 @@ impl Project { None } + // TODO This will break in the case where the adapter's root paths and worktrees are not equal fn restart_language_servers( &mut self, worktree_id: WorktreeId, @@ -2568,7 +2571,7 @@ impl Project { .map(|path_buf| Arc::from(path_buf.as_path())) .unwrap_or(fallback_path); - this.start_language_servers(worktree_id, root_path, language, cx); + this.start_language_servers(worktree_id, root_path, language.clone(), cx); // Lookup new server ids and set them for each of the orphaned worktrees for adapter in language.lsp_adapters() { @@ -2577,7 +2580,7 @@ impl Project { .get(&(worktree_id, adapter.name.clone())) .cloned() { - for orphaned_worktree in orphaned_worktrees { + for &orphaned_worktree in &orphaned_worktrees { this.language_server_ids .insert((orphaned_worktree, adapter.name.clone()), new_server_id); } @@ -2948,7 +2951,7 @@ impl Project { pub fn update_diagnostic_entries( &mut self, - language_server_id: usize, + server_id: usize, abs_path: PathBuf, version: Option, diagnostics: Vec>>, @@ -2964,23 +2967,18 @@ impl Project { }; if let Some(buffer) = self.get_open_buffer(&project_path, cx) { - self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?; + self.update_buffer_diagnostics(&buffer, diagnostics.clone(), server_id, version, cx)?; } let updated = worktree.update(cx, |worktree, cx| { worktree .as_local_mut() .ok_or_else(|| anyhow!("not a local worktree"))? - .update_diagnostics( - language_server_id, - project_path.path.clone(), - diagnostics, - cx, - ) + .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx) })?; if updated { cx.emit(Event::DiagnosticsUpdated { - language_server_id, + language_server_id: server_id, path: project_path, }); } @@ -2991,6 +2989,7 @@ impl Project { &mut self, buffer: &ModelHandle, mut diagnostics: Vec>>, + server_id: usize, version: Option, cx: &mut ModelContext, ) -> Result<()> { @@ -3002,7 +3001,7 @@ impl Project { .then_with(|| a.message.cmp(&b.message)) } - let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?; + let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?; diagnostics.sort_unstable_by(|a, b| { Ordering::Equal @@ -6261,7 +6260,7 @@ impl Project { } } - fn running_language_servers_for_buffer( + pub fn language_servers_iter_for_buffer( &self, buffer: &Buffer, cx: &AppContext, @@ -6286,8 +6285,7 @@ impl Project { buffer: &Buffer, cx: &AppContext, ) -> Vec<(&Arc, &Arc)> { - self.running_language_servers_for_buffer(buffer, cx) - .collect() + self.language_servers_iter_for_buffer(buffer, cx).collect() } fn primary_language_servers_for_buffer( @@ -6295,7 +6293,7 @@ impl Project { buffer: &Buffer, cx: &AppContext, ) -> Option<(&Arc, &Arc)> { - self.running_language_servers_for_buffer(buffer, cx).next() + self.language_servers_iter_for_buffer(buffer, cx).next() } fn language_server_for_buffer( @@ -6304,7 +6302,7 @@ impl Project { server_id: usize, cx: &AppContext, ) -> Option<(&Arc, &Arc)> { - self.running_language_servers_for_buffer(buffer, cx) + self.language_servers_iter_for_buffer(buffer, cx) .find(|(_, s)| s.server_id() == server_id) } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 09c3326739..08f1768766 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1420,6 +1420,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) { }, }, ], + 0, None, cx, ) diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index d0cf2faa7e..0e3c4d9ce1 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -67,7 +67,7 @@ pub struct LocalWorktree { is_scanning: (watch::Sender, watch::Receiver), _background_scanner_task: Task<()>, share: Option, - diagnostics: HashMap, Vec>>>, + diagnostics: HashMap, Vec<(usize, Vec>>)>>, diagnostic_summaries: TreeMap, client: Arc, fs: Arc, @@ -514,13 +514,13 @@ impl LocalWorktree { pub fn diagnostics_for_path( &self, path: &Path, - ) -> Option>>> { - self.diagnostics.get(path).cloned() + ) -> Vec<(usize, Vec>>)> { + self.diagnostics.get(path).cloned().unwrap_or_default() } pub fn update_diagnostics( &mut self, - language_server_id: usize, + server_id: usize, worktree_path: Arc, diagnostics: Vec>>, _: &mut ModelContext, @@ -530,11 +530,16 @@ impl LocalWorktree { .diagnostic_summaries .remove(&PathKey(worktree_path.clone())) .unwrap_or_default(); - let new_summary = DiagnosticSummary::new(language_server_id, &diagnostics); + let new_summary = DiagnosticSummary::new(server_id, &diagnostics); if !new_summary.is_empty() { self.diagnostic_summaries .insert(PathKey(worktree_path.clone()), new_summary); - self.diagnostics.insert(worktree_path.clone(), diagnostics); + let diagnostics_by_server_id = + self.diagnostics.entry(worktree_path.clone()).or_default(); + let ix = match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { + Ok(ix) | Err(ix) => ix, + }; + diagnostics_by_server_id[ix] = (server_id, diagnostics); } let updated = !old_summary.is_empty() || !new_summary.is_empty(); @@ -546,7 +551,7 @@ impl LocalWorktree { worktree_id: self.id().to_proto(), summary: Some(proto::DiagnosticSummary { path: worktree_path.to_string_lossy().to_string(), - language_server_id: language_server_id as u64, + language_server_id: server_id as u64, error_count: new_summary.error_count as u32, warning_count: new_summary.warning_count as u32, }), diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index db5d5913cb..4dc54f7a9b 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -89,23 +89,26 @@ pub fn init( ( "tsx", tree_sitter_typescript::language_tsx(), - vec![adapter_arc(typescript::TypeScriptLspAdapter::new( - node_runtime.clone(), - ))], + vec![ + adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), + ], ), ( "typescript", tree_sitter_typescript::language_typescript(), - vec![adapter_arc(typescript::TypeScriptLspAdapter::new( - node_runtime.clone(), - ))], + vec![ + adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), + ], ), ( "javascript", tree_sitter_typescript::language_tsx(), - vec![adapter_arc(typescript::TypeScriptLspAdapter::new( - node_runtime.clone(), - ))], + vec![ + adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), + ], ), ( "html", @@ -149,7 +152,7 @@ pub async fn language( ) -> Arc { Arc::new( Language::new(load_config(name), Some(grammar)) - .with_lsp_adapters(lsp_adapter) + .with_lsp_adapters(lsp_adapter.into_iter().collect()) .await .with_queries(load_queries(name)) .unwrap(), diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index 3121bfe81f..26fb3831e3 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -15,7 +15,7 @@ use std::{ use util::http::HttpClient; use util::ResultExt; -fn server_binary_arguments(server_path: &Path) -> Vec { +fn typescript_server_binary_arguments(server_path: &Path) -> Vec { vec![ server_path.into(), "--stdio".into(), @@ -24,6 +24,10 @@ fn server_binary_arguments(server_path: &Path) -> Vec { ] } +fn eslint_server_binary_arguments(server_path: &Path) -> Vec { + vec![server_path.into(), "--stdin".into()] +} + pub struct TypeScriptLspAdapter { node: Arc, } @@ -89,7 +93,7 @@ impl LspAdapter for TypeScriptLspAdapter { Ok(LanguageServerBinary { path: self.node.binary_path().await?, - arguments: server_binary_arguments(&server_path), + arguments: typescript_server_binary_arguments(&server_path), }) } @@ -101,12 +105,12 @@ impl LspAdapter for TypeScriptLspAdapter { if new_server_path.exists() { Ok(LanguageServerBinary { path: self.node.binary_path().await?, - arguments: server_binary_arguments(&new_server_path), + arguments: typescript_server_binary_arguments(&new_server_path), }) } else if old_server_path.exists() { Ok(LanguageServerBinary { path: self.node.binary_path().await?, - arguments: server_binary_arguments(&old_server_path), + arguments: typescript_server_binary_arguments(&old_server_path), }) } else { Err(anyhow!( @@ -169,7 +173,7 @@ pub struct EsLintLspAdapter { } impl EsLintLspAdapter { - const SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs"; + const SERVER_PATH: &'static str = "node_modules/eslint/bin/eslint.js"; pub fn new(node: Arc) -> Self { EsLintLspAdapter { node } @@ -208,7 +212,7 @@ impl LspAdapter for EsLintLspAdapter { Ok(LanguageServerBinary { path: self.node.binary_path().await?, - arguments: server_binary_arguments(&server_path), + arguments: eslint_server_binary_arguments(&server_path), }) } @@ -218,7 +222,7 @@ impl LspAdapter for EsLintLspAdapter { if server_path.exists() { Ok(LanguageServerBinary { path: self.node.binary_path().await?, - arguments: server_binary_arguments(&server_path), + arguments: eslint_server_binary_arguments(&server_path), }) } else { Err(anyhow!( From 2a5c0fa5f8e719877e916b262a80bd389fb29877 Mon Sep 17 00:00:00 2001 From: Julia Date: Tue, 18 Apr 2023 10:44:58 -0400 Subject: [PATCH 09/26] Get ESLint to launch and provide diagnostics Co-Authored-By: Antonio Scandurra --- crates/lsp/src/lsp.rs | 4 ++ crates/project/src/project.rs | 1 + crates/project/src/worktree.rs | 13 +++-- crates/zed/src/languages.rs | 6 +-- crates/zed/src/languages/typescript.rs | 72 +++++++++++++++++++++++--- 5 files changed, 82 insertions(+), 14 deletions(-) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 1e37c9765b..b968cbbb30 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -250,6 +250,10 @@ impl LanguageServer { log::trace!("incoming message:{}", String::from_utf8_lossy(&buffer)); if let Ok(msg) = serde_json::from_slice::(&buffer) { + dbg!( + msg.method, + notification_handlers.lock().keys().collect::>() + ); if let Some(handler) = notification_handlers.lock().get_mut(msg.method) { handler(msg.id, msg.params.get(), cx.clone()); } else { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 5b571c0c0c..536d803fea 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2217,6 +2217,7 @@ impl Project { move |params, mut cx| { let languages = languages.clone(); async move { + dbg!(¶ms.items); let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await; Ok(params diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 0e3c4d9ce1..5a0559f54a 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -536,10 +536,15 @@ impl LocalWorktree { .insert(PathKey(worktree_path.clone()), new_summary); let diagnostics_by_server_id = self.diagnostics.entry(worktree_path.clone()).or_default(); - let ix = match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { - Ok(ix) | Err(ix) => ix, - }; - diagnostics_by_server_id[ix] = (server_id, diagnostics); + match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { + Ok(ix) => { + diagnostics_by_server_id[ix] = (server_id, diagnostics); + } + + Err(ix) => { + diagnostics_by_server_id.insert(ix, (server_id, diagnostics)); + } + } } let updated = !old_summary.is_empty() || !new_summary.is_empty(); diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index 4dc54f7a9b..75d12a555d 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -90,7 +90,7 @@ pub fn init( "tsx", tree_sitter_typescript::language_tsx(), vec![ - adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + // adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), ], ), @@ -98,7 +98,7 @@ pub fn init( "typescript", tree_sitter_typescript::language_typescript(), vec![ - adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + // adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), ], ), @@ -106,7 +106,7 @@ pub fn init( "javascript", tree_sitter_typescript::language_tsx(), vec![ - adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + // adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), ], ), diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index 26fb3831e3..ac3baaa8e4 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -1,14 +1,16 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; -use futures::StreamExt; +use futures::{future::BoxFuture, FutureExt, StreamExt}; +use gpui::{AppContext, Task}; use language::{LanguageServerBinary, LanguageServerName, LspAdapter}; use lsp::CodeActionKind; use node_runtime::NodeRuntime; -use serde_json::json; +use serde_json::{json, Map, Value}; use smol::fs; use std::{ any::Any, ffi::OsString, + future, path::{Path, PathBuf}, sync::Arc, }; @@ -25,7 +27,7 @@ fn typescript_server_binary_arguments(server_path: &Path) -> Vec { } fn eslint_server_binary_arguments(server_path: &Path) -> Vec { - vec![server_path.into(), "--stdin".into()] + vec![server_path.into(), "--stdio".into()] } pub struct TypeScriptLspAdapter { @@ -56,7 +58,6 @@ impl LspAdapter for TypeScriptLspAdapter { &self, _: Arc, ) -> Result> { - dbg!(); Ok(Box::new(TypeScriptVersions { typescript_version: self.node.npm_package_latest_version("typescript").await?, server_version: self @@ -173,15 +174,67 @@ pub struct EsLintLspAdapter { } impl EsLintLspAdapter { - const SERVER_PATH: &'static str = "node_modules/eslint/bin/eslint.js"; + const SERVER_PATH: &'static str = + "node_modules/vscode-langservers-extracted/lib/eslint-language-server/eslintServer.js"; pub fn new(node: Arc) -> Self { EsLintLspAdapter { node } } } +// "workspaceFolder": { +// "name": "testing_ts", +// "uri": "file:///Users/julia/Stuff/testing_ts" +// }, +// "workingDirectory": "file:///Users/julia/Stuff/testing_ts", +// "nodePath": "/opt/homebrew/opt/node@18/bin/node", +// "experimental": {}, + #[async_trait] impl LspAdapter for EsLintLspAdapter { + fn workspace_configuration(&self, _: &mut AppContext) -> Option> { + Some( + future::ready(json!({ + "": { + "validate": "on", + "packageManager": "npm", + "useESLintClass": false, + "experimental": { + "useFlatConfig": false + }, + "codeActionOnSave": { + "mode": "all" + }, + "format": false, + "quiet": false, + "onIgnoredFiles": "off", + "options": {}, + "rulesCustomizations": [], + "run": "onType", + "problems": { + "shortenToSingleLine": false + }, + "nodePath": null, + "workspaceFolder": { + "name": "testing_ts", + "uri": "file:///Users/julia/Stuff/testing_ts" + }, + "codeAction": { + "disableRuleComment": { + "enable": true, + "location": "separateLine", + "commentStyle": "line" + }, + "showDocumentation": { + "enable": true + } + } + } + })) + .boxed(), + ) + } + async fn name(&self) -> LanguageServerName { LanguageServerName("eslint".into()) } @@ -191,7 +244,9 @@ impl LspAdapter for EsLintLspAdapter { _: Arc, ) -> Result> { Ok(Box::new( - self.node.npm_package_latest_version("eslint").await?, + self.node + .npm_package_latest_version("vscode-langservers-extracted") + .await?, )) } @@ -206,7 +261,10 @@ impl LspAdapter for EsLintLspAdapter { if fs::metadata(&server_path).await.is_err() { self.node - .npm_install_packages([("eslint", version.as_str())], &container_dir) + .npm_install_packages( + [("vscode-langservers-extracted", version.as_str())], + &container_dir, + ) .await?; } From bb4de47b15bda56aca412547b253195f3c1b85a6 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 19 Apr 2023 13:55:29 -0400 Subject: [PATCH 10/26] Start getting diagnostics sets to work with multiple servers Co-Authored-By: Nathan Sobo --- crates/language/src/buffer.rs | 74 ++++++++++++++++++++++++----- crates/language/src/buffer_tests.rs | 2 +- crates/language/src/proto.rs | 11 ++++- crates/project/src/project.rs | 10 ++-- crates/rpc/proto/zed.proto | 3 +- 5 files changed, 79 insertions(+), 21 deletions(-) diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index d24b9f7033..90d6194801 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -13,6 +13,7 @@ use crate::{ }; use anyhow::{anyhow, Result}; use clock::ReplicaId; +use collections::HashMap; use fs::LineEnding; use futures::FutureExt as _; use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task}; @@ -71,7 +72,7 @@ pub struct Buffer { syntax_map: Mutex, parsing_in_background: bool, parse_count: usize, - diagnostics: DiagnosticSet, + diagnostics: HashMap, // server_id -> diagnostic set remote_selections: TreeMap, selections_update_count: usize, diagnostics_update_count: usize, @@ -88,7 +89,7 @@ pub struct BufferSnapshot { pub git_diff: git::diff::BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, - diagnostics: DiagnosticSet, + diagnostics: HashMap, // server_id -> diagnostic set diagnostics_update_count: usize, file_update_count: usize, git_diff_update_count: usize, @@ -164,16 +165,20 @@ pub struct CodeAction { #[derive(Clone, Debug, PartialEq, Eq)] pub enum Operation { Buffer(text::Operation), + UpdateDiagnostics { + server_id: usize, diagnostics: Arc<[DiagnosticEntry]>, lamport_timestamp: clock::Lamport, }, + UpdateSelections { selections: Arc<[Selection]>, lamport_timestamp: clock::Lamport, line_mode: bool, cursor_shape: CursorShape, }, + UpdateCompletionTriggers { triggers: Vec, lamport_timestamp: clock::Lamport, @@ -409,6 +414,7 @@ impl Buffer { ) -> Task> { let mut operations = Vec::new(); operations.extend(self.deferred_ops.iter().map(proto::serialize_operation)); + operations.extend(self.remote_selections.iter().map(|(_, set)| { proto::serialize_operation(&Operation::UpdateSelections { selections: set.selections.clone(), @@ -417,10 +423,15 @@ impl Buffer { cursor_shape: set.cursor_shape, }) })); - operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics { - diagnostics: self.diagnostics.iter().cloned().collect(), - lamport_timestamp: self.diagnostics_timestamp, - })); + + for (server_id, diagnostics) in &self.diagnostics { + operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics { + lamport_timestamp: self.diagnostics_timestamp, + server_id: *server_id, + diagnostics: diagnostics.iter().cloned().collect(), + })); + } + operations.push(proto::serialize_operation( &Operation::UpdateCompletionTriggers { triggers: self.completion_triggers.clone(), @@ -866,13 +877,19 @@ impl Buffer { cx.notify(); } - pub fn update_diagnostics(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext) { + pub fn update_diagnostics( + &mut self, + server_id: usize, + diagnostics: DiagnosticSet, + cx: &mut ModelContext, + ) { let lamport_timestamp = self.text.lamport_clock.tick(); let op = Operation::UpdateDiagnostics { + server_id, diagnostics: diagnostics.iter().cloned().collect(), lamport_timestamp, }; - self.apply_diagnostic_update(diagnostics, lamport_timestamp, cx); + self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx); self.send_operation(op, cx); } @@ -1580,11 +1597,13 @@ impl Buffer { unreachable!("buffer operations should never be applied at this layer") } Operation::UpdateDiagnostics { + server_id, diagnostics: diagnostic_set, lamport_timestamp, } => { let snapshot = self.snapshot(); self.apply_diagnostic_update( + server_id, DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot), lamport_timestamp, cx, @@ -1626,12 +1645,13 @@ impl Buffer { fn apply_diagnostic_update( &mut self, + server_id: usize, diagnostics: DiagnosticSet, lamport_timestamp: clock::Lamport, cx: &mut ModelContext, ) { if lamport_timestamp > self.diagnostics_timestamp { - self.diagnostics = diagnostics; + self.diagnostics.insert(server_id, diagnostics); self.diagnostics_timestamp = lamport_timestamp; self.diagnostics_update_count += 1; self.text.lamport_clock.observe(lamport_timestamp); @@ -2505,14 +2525,40 @@ impl BufferSnapshot { ) -> impl 'a + Iterator> where T: 'a + Clone + ToOffset, - O: 'a + FromAnchor, + O: 'a + FromAnchor + Ord, { - self.diagnostics.range(search_range, self, true, reversed) + let mut iterators: Vec<_> = self + .diagnostics + .values() + .map(|collection| { + collection + .range::(search_range.clone(), self, true, reversed) + .peekable() + }) + .collect(); + + std::iter::from_fn(move || { + let (next_ix, _) = iterators + .iter_mut() + .enumerate() + .flat_map(|(ix, iter)| Some((ix, iter.peek()?))) + .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?; + iterators[next_ix].next() + }) } pub fn diagnostic_groups(&self) -> Vec> { let mut groups = Vec::new(); - self.diagnostics.groups(&mut groups, self); + for diagnostics in self.diagnostics.values() { + diagnostics.groups(&mut groups, self); + } + + groups.sort_by(|a, b| { + let a_start = &a.entries[a.primary_ix].range.start; + let b_start = &b.entries[b.primary_ix].range.start; + a_start.cmp(b_start, self) + }); + groups } @@ -2523,7 +2569,9 @@ impl BufferSnapshot { where O: 'a + FromAnchor, { - self.diagnostics.group(group_id, self) + self.diagnostics + .values() + .flat_map(move |set| set.group(group_id, self)) } pub fn diagnostics_update_count(&self) -> usize { diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 4675e4e9dc..6b6ce041f7 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -1866,7 +1866,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { buffer, ); log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics); - buffer.update_diagnostics(diagnostics, cx); + buffer.update_diagnostics(0, diagnostics, cx); }); mutation_count -= 1; } diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index fb50f2a743..e4963e0882 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -40,6 +40,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { crate::Operation::Buffer(text::Operation::Edit(edit)) => { proto::operation::Variant::Edit(serialize_edit_operation(edit)) } + crate::Operation::Buffer(text::Operation::Undo { undo, lamport_timestamp, @@ -58,6 +59,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { }) .collect(), }), + crate::Operation::UpdateSelections { selections, line_mode, @@ -70,14 +72,18 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { line_mode: *line_mode, cursor_shape: serialize_cursor_shape(cursor_shape) as i32, }), + crate::Operation::UpdateDiagnostics { - diagnostics, lamport_timestamp, + server_id, + diagnostics, } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { replica_id: lamport_timestamp.replica_id as u32, lamport_timestamp: lamport_timestamp.value, + server_id: *server_id as u64, diagnostics: serialize_diagnostics(diagnostics.iter()), }), + crate::Operation::UpdateCompletionTriggers { triggers, lamport_timestamp, @@ -267,11 +273,12 @@ pub fn deserialize_operation(message: proto::Operation) -> Result { crate::Operation::UpdateDiagnostics { - diagnostics: deserialize_diagnostics(message.diagnostics), lamport_timestamp: clock::Lamport { replica_id: message.replica_id as ReplicaId, value: message.lamport_timestamp, }, + server_id: message.server_id as usize, + diagnostics: deserialize_diagnostics(message.diagnostics), } } proto::operation::Variant::UpdateCompletionTriggers(message) => { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 536d803fea..6ace10f6df 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1675,7 +1675,7 @@ impl Project { if let Some(local_worktree) = file.worktree.read(cx).as_local() { for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) { - self.update_buffer_diagnostics(buffer_handle, diagnostics, server_id, None, cx) + self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx) .log_err(); } } @@ -2968,7 +2968,7 @@ impl Project { }; if let Some(buffer) = self.get_open_buffer(&project_path, cx) { - self.update_buffer_diagnostics(&buffer, diagnostics.clone(), server_id, version, cx)?; + self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?; } let updated = worktree.update(cx, |worktree, cx| { @@ -2989,9 +2989,9 @@ impl Project { fn update_buffer_diagnostics( &mut self, buffer: &ModelHandle, - mut diagnostics: Vec>>, server_id: usize, version: Option, + mut diagnostics: Vec>>, cx: &mut ModelContext, ) -> Result<()> { fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering { @@ -3053,7 +3053,9 @@ impl Project { drop(edits_since_save); let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot); - buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx)); + buffer.update(cx, |buffer, cx| { + buffer.update_diagnostics(server_id, set, cx) + }); Ok(()) } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 72b66f3d78..d8272f21f2 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -861,7 +861,8 @@ message IncomingContactRequest { message UpdateDiagnostics { uint32 replica_id = 1; uint32 lamport_timestamp = 2; - repeated Diagnostic diagnostics = 3; + uint64 server_id = 3; + repeated Diagnostic diagnostics = 4; } message Follow { From 6156dbced0e9b227556e8c4de1b828db13c84538 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 19 Apr 2023 15:28:09 -0400 Subject: [PATCH 11/26] Finish getting multiple diagnostics sources building and running --- crates/editor/src/hover_popover.rs | 2 +- crates/editor/src/multi_buffer.rs | 2 +- crates/language/src/language.rs | 2 +- crates/lsp/src/lsp.rs | 4 --- crates/project/src/project.rs | 34 +++++++++++++++++++------- crates/project/src/project_tests.rs | 5 ++-- crates/zed/src/languages.rs | 6 ++--- crates/zed/src/languages/typescript.rs | 2 -- 8 files changed, 34 insertions(+), 23 deletions(-) diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 76a7d41e10..f673593413 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -620,7 +620,7 @@ mod tests { }], &snapshot, ); - buffer.update_diagnostics(set, cx); + buffer.update_diagnostics(0, set, cx); }); // Hover pops diagnostic immediately diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index f3a3c9b00f..bf2f12e82e 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -2800,7 +2800,7 @@ impl MultiBufferSnapshot { ) -> impl Iterator> + 'a where T: 'a + ToOffset, - O: 'a + text::FromAnchor, + O: 'a + text::FromAnchor + Ord, { self.as_singleton() .into_iter() diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 7e65a73ffc..7064fa5055 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1588,7 +1588,7 @@ mod tests { ..Default::default() }, tree_sitter_javascript::language(), - None, + vec![], |_| Default::default(), ); diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index b968cbbb30..1e37c9765b 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -250,10 +250,6 @@ impl LanguageServer { log::trace!("incoming message:{}", String::from_utf8_lossy(&buffer)); if let Ok(msg) = serde_json::from_slice::(&buffer) { - dbg!( - msg.method, - notification_handlers.lock().keys().collect::>() - ); if let Some(handler) = notification_handlers.lock().get_mut(msg.method) { handler(msg.id, msg.params.get(), cx.clone()); } else { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 6ace10f6df..b0973f1b24 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1547,12 +1547,13 @@ impl Project { cx: &mut ModelContext, ) -> Task> { let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx); - let old_path = - File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx))); + let old_file = File::from_dyn(buffer.read(cx).file()) + .filter(|f| f.is_local()) + .cloned(); cx.spawn(|this, mut cx| async move { - if let Some(old_path) = old_path { + if let Some(old_file) = &old_file { this.update(&mut cx, |this, cx| { - this.unregister_buffer_from_language_servers(&buffer, old_path, cx); + this.unregister_buffer_from_language_servers(&buffer, old_file, cx); }); } let (worktree, path) = worktree_task.await?; @@ -1740,11 +1741,24 @@ impl Project { fn unregister_buffer_from_language_servers( &mut self, buffer: &ModelHandle, - old_path: PathBuf, + old_file: &File, cx: &mut ModelContext, ) { + let old_path = match old_file.as_local() { + Some(local) => local.abs_path(cx), + None => return, + }; + buffer.update(cx, |buffer, cx| { - buffer.update_diagnostics(Default::default(), cx); + let worktree_id = old_file.worktree_id(cx); + let ids = &self.language_server_ids; + + let language = buffer.language().cloned(); + let adapters = language.iter().flat_map(|language| language.lsp_adapters()); + for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) { + buffer.update_diagnostics(server_id, Default::default(), cx); + } + self.buffer_snapshots.remove(&buffer.remote_id()); let file_url = lsp::Url::from_file_path(old_path).unwrap(); for (_, language_server) in self.language_servers_for_buffer(buffer, cx) { @@ -4501,8 +4515,10 @@ impl Project { cx: &mut ModelContext, ) { let snapshot = worktree_handle.read(cx).snapshot(); + let mut buffers_to_delete = Vec::new(); let mut renamed_buffers = Vec::new(); + for (buffer_id, buffer) in &self.opened_buffers { if let Some(buffer) = buffer.upgrade(cx) { buffer.update(cx, |buffer, cx| { @@ -4545,7 +4561,7 @@ impl Project { let old_path = old_file.abs_path(cx); if new_file.abs_path(cx) != old_path { - renamed_buffers.push((cx.handle(), old_path)); + renamed_buffers.push((cx.handle(), old_file.clone())); } if new_file != *old_file { @@ -4572,8 +4588,8 @@ impl Project { self.opened_buffers.remove(&buffer_id); } - for (buffer, old_path) in renamed_buffers { - self.unregister_buffer_from_language_servers(&buffer, old_path, cx); + for (buffer, old_file) in renamed_buffers { + self.unregister_buffer_from_language_servers(&buffer, &old_file, cx); self.detect_language_for_buffer(&buffer, cx); self.register_buffer_with_language_servers(&buffer, cx); } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 08f1768766..b3a7cc88b8 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -303,6 +303,7 @@ async fn test_managing_language_servers( rust_buffer2.update(cx, |buffer, cx| { buffer.update_diagnostics( + 0, DiagnosticSet::from_sorted_entries( vec![DiagnosticEntry { diagnostic: Default::default(), @@ -1402,6 +1403,8 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) { project .update_buffer_diagnostics( &buffer, + 0, + None, vec![ DiagnosticEntry { range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)), @@ -1420,8 +1423,6 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) { }, }, ], - 0, - None, cx, ) .unwrap(); diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index 75d12a555d..4dc54f7a9b 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -90,7 +90,7 @@ pub fn init( "tsx", tree_sitter_typescript::language_tsx(), vec![ - // adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), ], ), @@ -98,7 +98,7 @@ pub fn init( "typescript", tree_sitter_typescript::language_typescript(), vec![ - // adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), ], ), @@ -106,7 +106,7 @@ pub fn init( "javascript", tree_sitter_typescript::language_tsx(), vec![ - // adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), + adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), ], ), diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index ac3baaa8e4..ceb1d85402 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -73,7 +73,6 @@ impl LspAdapter for TypeScriptLspAdapter { _: Arc, container_dir: PathBuf, ) -> Result { - dbg!(); let versions = versions.downcast::().unwrap(); let server_path = container_dir.join(Self::NEW_SERVER_PATH); @@ -99,7 +98,6 @@ impl LspAdapter for TypeScriptLspAdapter { } async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { - dbg!(); (|| async move { let old_server_path = container_dir.join(Self::OLD_SERVER_PATH); let new_server_path = container_dir.join(Self::NEW_SERVER_PATH); From df94aee75839befb2b6d102807c3e83b5777b23c Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 19 Apr 2023 15:57:20 -0400 Subject: [PATCH 12/26] Fix failing tests Co-Authored-By: Max Brunsfeld --- crates/language/src/language.rs | 4 +++- crates/project/src/project_tests.rs | 4 ++-- crates/zed/src/languages/typescript.rs | 6 +++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 7064fa5055..38548c02db 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -817,7 +817,9 @@ impl LanguageRegistry { .detach(); Ok(server) }); - return vec![PendingLanguageServer { server_id: 0, task }]; + + let server_id = post_inc(&mut self.state.write().next_language_server_id); + return vec![PendingLanguageServer { server_id, task }]; } let download_dir = self diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index b3a7cc88b8..b6e9741aec 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -400,7 +400,7 @@ async fn test_managing_language_servers( .text_document, lsp::TextDocumentItem { uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(), - version: 1, + version: 0, text: rust_buffer.read_with(cx, |buffer, _| buffer.text()), language_id: Default::default() } @@ -427,7 +427,7 @@ async fn test_managing_language_servers( }, lsp::TextDocumentItem { uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(), - version: 1, + version: 0, text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()), language_id: Default::default() } diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index ceb1d85402..4587aa5bfd 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -1,11 +1,11 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; -use futures::{future::BoxFuture, FutureExt, StreamExt}; -use gpui::{AppContext, Task}; +use futures::{future::BoxFuture, FutureExt}; +use gpui::AppContext; use language::{LanguageServerBinary, LanguageServerName, LspAdapter}; use lsp::CodeActionKind; use node_runtime::NodeRuntime; -use serde_json::{json, Map, Value}; +use serde_json::{json, Value}; use smol::fs; use std::{ any::Any, From 26abc824a9238ae5b760e0a2af8c743985b8f783 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 19 Apr 2023 15:57:28 -0400 Subject: [PATCH 13/26] Bump protocol version --- crates/rpc/src/rpc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rpc/src/rpc.rs b/crates/rpc/src/rpc.rs index 898c8c5e98..f64e6bea4c 100644 --- a/crates/rpc/src/rpc.rs +++ b/crates/rpc/src/rpc.rs @@ -6,4 +6,4 @@ pub use conn::Connection; pub use peer::*; mod macros; -pub const PROTOCOL_VERSION: u32 = 51; +pub const PROTOCOL_VERSION: u32 = 52; From c59204c5e6f145baa12398218a55afc0d086a064 Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 19 Apr 2023 15:57:51 -0400 Subject: [PATCH 14/26] Cleanup Co-Authored-By: Max Brunsfeld --- crates/project/src/project.rs | 6 ++---- crates/zed/src/languages/typescript.rs | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b0973f1b24..3963c42c46 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2161,8 +2161,7 @@ impl Project { } if !self.language_server_ids.contains_key(&key) { - let adapter = self.setup_language_adapter( - worktree_path.clone(), + let adapter = self.setup_pending_language_server( initialization_options, pending_server, adapter.clone(), @@ -2175,9 +2174,8 @@ impl Project { } } - fn setup_language_adapter( + fn setup_pending_language_server( &mut self, - worktree_path: Arc, initialization_options: Option, pending_server: PendingLanguageServer, adapter: Arc, diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index 4587aa5bfd..b422e99926 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -293,8 +293,8 @@ impl LspAdapter for EsLintLspAdapter { async fn label_for_completion( &self, - item: &lsp::CompletionItem, - language: &Arc, + _item: &lsp::CompletionItem, + _language: &Arc, ) -> Option { None } From 9e2949e7baa848ce20717a27170b97d13dd7138a Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 19 Apr 2023 16:30:24 -0400 Subject: [PATCH 15/26] Refactor language server startup Avoid parallel vecs Co-Authored-By: Max Brunsfeld --- crates/language/src/language.rs | 99 +++--- crates/project/src/project.rs | 559 ++++++++++++++++---------------- 2 files changed, 325 insertions(+), 333 deletions(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 38548c02db..6c440e116b 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -782,13 +782,14 @@ impl LanguageRegistry { self.state.read().languages.iter().cloned().collect() } - pub fn start_language_servers( + pub fn start_language_server( self: &Arc, language: Arc, + adapter: Arc, root_path: Arc, http_client: Arc, cx: &mut AppContext, - ) -> Vec { + ) -> Option { #[cfg(any(test, feature = "test-support"))] if language.fake_adapter.is_some() { let task = cx.spawn(|cx| async move { @@ -819,70 +820,60 @@ impl LanguageRegistry { }); let server_id = post_inc(&mut self.state.write().next_language_server_id); - return vec![PendingLanguageServer { server_id, task }]; + return Some(PendingLanguageServer { server_id, task }); } let download_dir = self .language_server_download_dir .clone() .ok_or_else(|| anyhow!("language server download directory has not been assigned")) - .log_err(); - let download_dir = match download_dir { - Some(download_dir) => download_dir, - None => return Vec::new(), - }; + .log_err()?; - let mut results = Vec::new(); + let this = self.clone(); + let language = language.clone(); + let http_client = http_client.clone(); + let download_dir = download_dir.clone(); + let root_path = root_path.clone(); + let adapter = adapter.clone(); + let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone(); + let login_shell_env_loaded = self.login_shell_env_loaded.clone(); + let server_id = post_inc(&mut self.state.write().next_language_server_id); - for adapter in &language.adapters { - let this = self.clone(); - let language = language.clone(); - let http_client = http_client.clone(); - let download_dir = download_dir.clone(); - let root_path = root_path.clone(); - let adapter = adapter.clone(); - let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone(); - let login_shell_env_loaded = self.login_shell_env_loaded.clone(); - let server_id = post_inc(&mut self.state.write().next_language_server_id); + let task = cx.spawn(|cx| async move { + login_shell_env_loaded.await; - let task = cx.spawn(|cx| async move { - login_shell_env_loaded.await; + let mut lock = this.lsp_binary_paths.lock(); + let entry = lock + .entry(adapter.name.clone()) + .or_insert_with(|| { + get_binary( + adapter.clone(), + language.clone(), + http_client, + download_dir, + lsp_binary_statuses, + ) + .map_err(Arc::new) + .boxed() + .shared() + }) + .clone(); + drop(lock); + let binary = entry.clone().map_err(|e| anyhow!(e)).await?; - let mut lock = this.lsp_binary_paths.lock(); - let entry = lock - .entry(adapter.name.clone()) - .or_insert_with(|| { - get_binary( - adapter.clone(), - language.clone(), - http_client, - download_dir, - lsp_binary_statuses, - ) - .map_err(Arc::new) - .boxed() - .shared() - }) - .clone(); - drop(lock); - let binary = entry.clone().map_err(|e| anyhow!(e)).await?; + let server = lsp::LanguageServer::new( + server_id, + &binary.path, + &binary.arguments, + &root_path, + adapter.code_action_kinds(), + cx, + )?; - let server = lsp::LanguageServer::new( - server_id, - &binary.path, - &binary.arguments, - &root_path, - adapter.code_action_kinds(), - cx, - )?; + Ok(server) + }); - Ok(server) - }); - - results.push(PendingLanguageServer { server_id, task }); - } - - results + Some(PendingLanguageServer { server_id, task }) } pub fn language_server_binary_statuses( diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 3963c42c46..95049e8aeb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -2137,17 +2137,23 @@ impl Project { return; } - let adapters = language.lsp_adapters(); - let language_servers = self.languages.start_language_servers( - language.clone(), - worktree_path.clone(), - self.client.http_client(), - cx, - ); - debug_assert_eq!(adapters.len(), language_servers.len()); - - for (adapter, pending_server) in adapters.into_iter().zip(language_servers.into_iter()) { + for adapter in language.lsp_adapters() { let key = (worktree_id, adapter.name.clone()); + if self.language_server_ids.contains_key(&key) { + continue; + } + + let pending_server = match self.languages.start_language_server( + language.clone(), + adapter.clone(), + worktree_path.clone(), + self.client.http_client(), + cx, + ) { + Some(pending_server) => pending_server, + None => continue, + }; + let lsp = &cx.global::().lsp.get(&adapter.name.0); let override_options = lsp.map(|s| s.initialization_options.clone()).flatten(); @@ -2160,17 +2166,17 @@ impl Project { _ => {} } - if !self.language_server_ids.contains_key(&key) { - let adapter = self.setup_pending_language_server( - initialization_options, - pending_server, - adapter.clone(), - language.clone(), - key.clone(), - cx, - ); - self.language_server_ids.insert(key.clone(), adapter); - } + let server_id = pending_server.server_id; + let state = self.setup_pending_language_server( + initialization_options, + pending_server, + adapter.clone(), + language.clone(), + key.clone(), + cx, + ); + self.language_servers.insert(server_id, state); + self.language_server_ids.insert(key.clone(), server_id); } } @@ -2182,286 +2188,281 @@ impl Project { language: Arc, key: (WorktreeId, LanguageServerName), cx: &mut ModelContext, - ) -> usize { + ) -> LanguageServerState { let server_id = pending_server.server_id; let languages = self.languages.clone(); - self.language_servers.insert( - server_id, - LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move { - let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await; - let language_server = pending_server.task.await.log_err()?; - let language_server = language_server - .initialize(initialization_options) - .await - .log_err()?; - let this = this.upgrade(&cx)?; + LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move { + let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await; + let language_server = pending_server.task.await.log_err()?; + let language_server = language_server + .initialize(initialization_options) + .await + .log_err()?; + let this = this.upgrade(&cx)?; - language_server - .on_notification::({ - let this = this.downgrade(); + language_server + .on_notification::({ + let this = this.downgrade(); + let adapter = adapter.clone(); + move |mut params, cx| { + let this = this; let adapter = adapter.clone(); - move |mut params, cx| { - let this = this; - let adapter = adapter.clone(); - cx.spawn(|mut cx| async move { - adapter.process_diagnostics(&mut params).await; - if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, cx| { - this.update_diagnostics( - server_id, - params, - &adapter.disk_based_diagnostic_sources, - cx, - ) - .log_err(); - }); - } - }) - .detach(); - } - }) - .detach(); - - language_server - .on_request::({ - let languages = languages.clone(); - move |params, mut cx| { - let languages = languages.clone(); - async move { - dbg!(¶ms.items); - let workspace_config = - cx.update(|cx| languages.workspace_configuration(cx)).await; - Ok(params - .items - .into_iter() - .map(|item| { - if let Some(section) = &item.section { - workspace_config - .get(section) - .cloned() - .unwrap_or(serde_json::Value::Null) - } else { - workspace_config.clone() - } - }) - .collect()) - } - } - }) - .detach(); - - // Even though we don't have handling for these requests, respond to them to - // avoid stalling any language server like `gopls` which waits for a response - // to these requests when initializing. - language_server - .on_request::({ - let this = this.downgrade(); - move |params, mut cx| async move { - if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, _| { - if let Some(status) = - this.language_server_statuses.get_mut(&server_id) - { - if let lsp::NumberOrString::String(token) = params.token { - status.progress_tokens.insert(token); - } - } - }); - } - Ok(()) - } - }) - .detach(); - language_server - .on_request::({ - let this = this.downgrade(); - move |params, mut cx| async move { - let this = this - .upgrade(&cx) - .ok_or_else(|| anyhow!("project dropped"))?; - for reg in params.registrations { - if reg.method == "workspace/didChangeWatchedFiles" { - if let Some(options) = reg.register_options { - let options = serde_json::from_value(options)?; - this.update(&mut cx, |this, cx| { - this.on_lsp_did_change_watched_files( - server_id, options, cx, - ); - }); - } - } - } - Ok(()) - } - }) - .detach(); - - language_server - .on_request::({ - let this = this.downgrade(); - let adapter = adapter.clone(); - let language_server = language_server.clone(); - move |params, cx| { - Self::on_lsp_workspace_edit( - this, - params, - server_id, - adapter.clone(), - language_server.clone(), - cx, - ) - } - }) - .detach(); - - let disk_based_diagnostics_progress_token = - adapter.disk_based_diagnostics_progress_token.clone(); - - language_server - .on_notification::({ - let this = this.downgrade(); - move |params, mut cx| { + cx.spawn(|mut cx| async move { + adapter.process_diagnostics(&mut params).await; if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| { - this.on_lsp_progress( - params, + this.update_diagnostics( server_id, - disk_based_diagnostics_progress_token.clone(), + params, + &adapter.disk_based_diagnostic_sources, cx, - ); + ) + .log_err(); }); } + }) + .detach(); + } + }) + .detach(); + + language_server + .on_request::({ + let languages = languages.clone(); + move |params, mut cx| { + let languages = languages.clone(); + async move { + let workspace_config = + cx.update(|cx| languages.workspace_configuration(cx)).await; + Ok(params + .items + .into_iter() + .map(|item| { + if let Some(section) = &item.section { + workspace_config + .get(section) + .cloned() + .unwrap_or(serde_json::Value::Null) + } else { + workspace_config.clone() + } + }) + .collect()) + } + } + }) + .detach(); + + // Even though we don't have handling for these requests, respond to them to + // avoid stalling any language server like `gopls` which waits for a response + // to these requests when initializing. + language_server + .on_request::({ + let this = this.downgrade(); + move |params, mut cx| async move { + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, _| { + if let Some(status) = + this.language_server_statuses.get_mut(&server_id) + { + if let lsp::NumberOrString::String(token) = params.token { + status.progress_tokens.insert(token); + } + } + }); + } + Ok(()) + } + }) + .detach(); + language_server + .on_request::({ + let this = this.downgrade(); + move |params, mut cx| async move { + let this = this + .upgrade(&cx) + .ok_or_else(|| anyhow!("project dropped"))?; + for reg in params.registrations { + if reg.method == "workspace/didChangeWatchedFiles" { + if let Some(options) = reg.register_options { + let options = serde_json::from_value(options)?; + this.update(&mut cx, |this, cx| { + this.on_lsp_did_change_watched_files( + server_id, options, cx, + ); + }); + } + } } - }) - .detach(); - - language_server - .notify::( - lsp::DidChangeConfigurationParams { - settings: workspace_config, - }, - ) - .ok(); - - this.update(&mut cx, |this, cx| { - // If the language server for this key doesn't match the server id, don't store the - // server. Which will cause it to be dropped, killing the process - if this - .language_server_ids - .get(&key) - .map(|id| id != &server_id) - .unwrap_or(false) - { - return None; + Ok(()) } + }) + .detach(); - // Update language_servers collection with Running variant of LanguageServerState - // indicating that the server is up and running and ready - this.language_servers.insert( - server_id, - LanguageServerState::Running { - adapter: adapter.clone(), - language: language.clone(), - watched_paths: Default::default(), - server: language_server.clone(), - simulate_disk_based_diagnostics_completion: None, - }, - ); - this.language_server_statuses.insert( - server_id, - LanguageServerStatus { - name: language_server.name().to_string(), - pending_work: Default::default(), - has_pending_diagnostic_updates: false, - progress_tokens: Default::default(), - }, - ); - - if let Some(project_id) = this.remote_id() { - this.client - .send(proto::StartLanguageServer { - project_id, - server: Some(proto::LanguageServer { - id: server_id as u64, - name: language_server.name().to_string(), - }), - }) - .log_err(); + language_server + .on_request::({ + let this = this.downgrade(); + let adapter = adapter.clone(); + let language_server = language_server.clone(); + move |params, cx| { + Self::on_lsp_workspace_edit( + this, + params, + server_id, + adapter.clone(), + language_server.clone(), + cx, + ) } + }) + .detach(); - // Tell the language server about every open buffer in the worktree that matches the language. - for buffer in this.opened_buffers.values() { - if let Some(buffer_handle) = buffer.upgrade(cx) { - let buffer = buffer_handle.read(cx); - let file = match File::from_dyn(buffer.file()) { - Some(file) => file, - None => continue, - }; - let language = match buffer.language() { - Some(language) => language, - None => continue, - }; + let disk_based_diagnostics_progress_token = + adapter.disk_based_diagnostics_progress_token.clone(); - if file.worktree.read(cx).id() != key.0 - || !language.lsp_adapters().iter().any(|a| a.name == key.1) - { - continue; - } - - let file = file.as_local()?; - let versions = this - .buffer_snapshots - .entry(buffer.remote_id()) - .or_default() - .entry(server_id) - .or_insert_with(|| { - vec![LspBufferSnapshot { - version: 0, - snapshot: buffer.text_snapshot(), - }] - }); - - let snapshot = versions.last().unwrap(); - let version = snapshot.version; - let initial_snapshot = &snapshot.snapshot; - let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); - language_server - .notify::( - lsp::DidOpenTextDocumentParams { - text_document: lsp::TextDocumentItem::new( - uri, - adapter - .language_ids - .get(language.name().as_ref()) - .cloned() - .unwrap_or_default(), - version, - initial_snapshot.text(), - ), - }, - ) - .log_err()?; - buffer_handle.update(cx, |buffer, cx| { - buffer.set_completion_triggers( - language_server - .capabilities() - .completion_provider - .as_ref() - .and_then(|provider| provider.trigger_characters.clone()) - .unwrap_or_default(), + language_server + .on_notification::({ + let this = this.downgrade(); + move |params, mut cx| { + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, cx| { + this.on_lsp_progress( + params, + server_id, + disk_based_diagnostics_progress_token.clone(), cx, - ) + ); }); } } - - cx.notify(); - Some(language_server) }) - })), - ); - server_id + .detach(); + + language_server + .notify::( + lsp::DidChangeConfigurationParams { + settings: workspace_config, + }, + ) + .ok(); + + this.update(&mut cx, |this, cx| { + // If the language server for this key doesn't match the server id, don't store the + // server. Which will cause it to be dropped, killing the process + if this + .language_server_ids + .get(&key) + .map(|id| id != &server_id) + .unwrap_or(false) + { + return None; + } + + // Update language_servers collection with Running variant of LanguageServerState + // indicating that the server is up and running and ready + this.language_servers.insert( + server_id, + LanguageServerState::Running { + adapter: adapter.clone(), + language: language.clone(), + watched_paths: Default::default(), + server: language_server.clone(), + simulate_disk_based_diagnostics_completion: None, + }, + ); + this.language_server_statuses.insert( + server_id, + LanguageServerStatus { + name: language_server.name().to_string(), + pending_work: Default::default(), + has_pending_diagnostic_updates: false, + progress_tokens: Default::default(), + }, + ); + + if let Some(project_id) = this.remote_id() { + this.client + .send(proto::StartLanguageServer { + project_id, + server: Some(proto::LanguageServer { + id: server_id as u64, + name: language_server.name().to_string(), + }), + }) + .log_err(); + } + + // Tell the language server about every open buffer in the worktree that matches the language. + for buffer in this.opened_buffers.values() { + if let Some(buffer_handle) = buffer.upgrade(cx) { + let buffer = buffer_handle.read(cx); + let file = match File::from_dyn(buffer.file()) { + Some(file) => file, + None => continue, + }; + let language = match buffer.language() { + Some(language) => language, + None => continue, + }; + + if file.worktree.read(cx).id() != key.0 + || !language.lsp_adapters().iter().any(|a| a.name == key.1) + { + continue; + } + + let file = file.as_local()?; + let versions = this + .buffer_snapshots + .entry(buffer.remote_id()) + .or_default() + .entry(server_id) + .or_insert_with(|| { + vec![LspBufferSnapshot { + version: 0, + snapshot: buffer.text_snapshot(), + }] + }); + + let snapshot = versions.last().unwrap(); + let version = snapshot.version; + let initial_snapshot = &snapshot.snapshot; + let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap(); + language_server + .notify::( + lsp::DidOpenTextDocumentParams { + text_document: lsp::TextDocumentItem::new( + uri, + adapter + .language_ids + .get(language.name().as_ref()) + .cloned() + .unwrap_or_default(), + version, + initial_snapshot.text(), + ), + }, + ) + .log_err()?; + buffer_handle.update(cx, |buffer, cx| { + buffer.set_completion_triggers( + language_server + .capabilities() + .completion_provider + .as_ref() + .and_then(|provider| provider.trigger_characters.clone()) + .unwrap_or_default(), + cx, + ) + }); + } + } + + cx.notify(); + Some(language_server) + }) + })) } // Returns a list of all of the worktrees which no longer have a language server and the root path From c5f86bc6af33b098904267fb0279b51092a4dcfe Mon Sep 17 00:00:00 2001 From: Julia Date: Wed, 19 Apr 2023 17:49:44 -0400 Subject: [PATCH 16/26] Avoid language servers fighting over diagnostics summaries Previously each server would stomp all over the existing results Co-Authored-By: Max Brunsfeld --- crates/collab/src/tests/integration_tests.rs | 6 +- crates/diagnostics/src/diagnostics.rs | 2 +- crates/project/src/project.rs | 19 ++--- crates/project/src/project_tests.rs | 58 +++++++++++++++ crates/project/src/worktree.rs | 77 +++++++++++++------- crates/zed/src/languages.rs | 2 +- 6 files changed, 123 insertions(+), 41 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 092fdddb96..8542bc2056 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -3477,6 +3477,7 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, + 0, DiagnosticSummary { error_count: 1, warning_count: 0, @@ -3512,6 +3513,7 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, + 0, DiagnosticSummary { error_count: 1, warning_count: 0, @@ -3552,10 +3554,10 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, + 0, DiagnosticSummary { error_count: 1, warning_count: 1, - ..Default::default() }, )] ); @@ -3568,10 +3570,10 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, + 0, DiagnosticSummary { error_count: 1, warning_count: 1, - ..Default::default() }, )] ); diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 75a95586be..09344a3f4f 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -168,7 +168,7 @@ impl ProjectDiagnosticsEditor { let project = project_handle.read(cx); let paths_to_update = project .diagnostic_summaries(cx) - .map(|e| (e.0, e.1.language_server_id)) + .map(|(path, server_id, _)| (path, server_id)) .collect(); let summary = project.diagnostic_summary(cx); let mut this = Self { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 95049e8aeb..7c0f33a94a 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -243,7 +243,6 @@ pub struct ProjectPath { #[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)] pub struct DiagnosticSummary { - pub language_server_id: usize, pub error_count: usize, pub warning_count: usize, } @@ -314,12 +313,8 @@ pub struct Hover { pub struct ProjectTransaction(pub HashMap, language::Transaction>); impl DiagnosticSummary { - fn new<'a, T: 'a>( - language_server_id: usize, - diagnostics: impl IntoIterator>, - ) -> Self { + fn new<'a, T: 'a>(diagnostics: impl IntoIterator>) -> Self { let mut this = Self { - language_server_id, error_count: 0, warning_count: 0, }; @@ -341,10 +336,10 @@ impl DiagnosticSummary { self.error_count == 0 && self.warning_count == 0 } - pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary { + pub fn to_proto(&self, language_server_id: usize, path: &Path) -> proto::DiagnosticSummary { proto::DiagnosticSummary { path: path.to_string_lossy().to_string(), - language_server_id: self.language_server_id as u64, + language_server_id: language_server_id as u64, error_count: self.error_count as u32, warning_count: self.warning_count as u32, } @@ -4731,7 +4726,7 @@ impl Project { pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary { let mut summary = DiagnosticSummary::default(); - for (_, path_summary) in self.diagnostic_summaries(cx) { + for (_, _, path_summary) in self.diagnostic_summaries(cx) { summary.error_count += path_summary.error_count; summary.warning_count += path_summary.warning_count; } @@ -4741,13 +4736,15 @@ impl Project { pub fn diagnostic_summaries<'a>( &'a self, cx: &'a AppContext, - ) -> impl Iterator + 'a { + ) -> impl Iterator + 'a { self.visible_worktrees(cx).flat_map(move |worktree| { let worktree = worktree.read(cx); let worktree_id = worktree.id(); worktree .diagnostic_summaries() - .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary)) + .map(move |(path, server_id, summary)| { + (ProjectPath { worktree_id, path }, server_id, summary) + }) }) } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index b6e9741aec..6cda33dc47 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1449,6 +1449,64 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) { }); } +#[gpui::test] +async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) { + println!("hello from stdout"); + eprintln!("hello from stderr"); + cx.foreground().forbid_parking(); + + let fs = FakeFs::new(cx.background()); + fs.insert_tree("/dir", json!({ "a.rs": "one two three" })) + .await; + + let project = Project::test(fs, ["/dir".as_ref()], cx).await; + + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + 0, + Path::new("/dir/a.rs").to_owned(), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + is_primary: true, + message: "syntax error a1".to_string(), + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project + .update_diagnostic_entries( + 1, + Path::new("/dir/a.rs").to_owned(), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)), + diagnostic: Diagnostic { + severity: DiagnosticSeverity::ERROR, + is_primary: true, + message: "syntax error b1".to_string(), + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + + assert_eq!( + project.diagnostic_summary(cx), + DiagnosticSummary { + error_count: 2, + warning_count: 0, + } + ); + }); +} + #[gpui::test] async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) { cx.foreground().forbid_parking(); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 5a0559f54a..057d89f567 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -50,7 +50,7 @@ use std::{ }, time::{Duration, SystemTime}, }; -use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet}; +use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeSet}; use util::{paths::HOME, ResultExt, TryFutureExt}; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)] @@ -68,7 +68,7 @@ pub struct LocalWorktree { _background_scanner_task: Task<()>, share: Option, diagnostics: HashMap, Vec<(usize, Vec>>)>>, - diagnostic_summaries: TreeMap, + diagnostic_summaries: HashMap, HashMap>, client: Arc, fs: Arc, visible: bool, @@ -82,7 +82,7 @@ pub struct RemoteWorktree { updates_tx: Option>, snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>, replica_id: ReplicaId, - diagnostic_summaries: TreeMap, + diagnostic_summaries: HashMap, HashMap>, visible: bool, disconnected: bool, } @@ -463,13 +463,17 @@ impl Worktree { pub fn diagnostic_summaries( &self, - ) -> impl Iterator, DiagnosticSummary)> + '_ { + ) -> impl Iterator, usize, DiagnosticSummary)> + '_ { match self { Worktree::Local(worktree) => &worktree.diagnostic_summaries, Worktree::Remote(worktree) => &worktree.diagnostic_summaries, } .iter() - .map(|(path, summary)| (path.0.clone(), *summary)) + .flat_map(|(path, summaries)| { + summaries + .iter() + .map(move |(&server_id, &summary)| (path.clone(), server_id, summary)) + }) } pub fn abs_path(&self) -> Arc { @@ -525,30 +529,40 @@ impl LocalWorktree { diagnostics: Vec>>, _: &mut ModelContext, ) -> Result { - self.diagnostics.remove(&worktree_path); - let old_summary = self + let summaries_by_server_id = self .diagnostic_summaries - .remove(&PathKey(worktree_path.clone())) + .entry(worktree_path.clone()) + .or_default(); + + let old_summary = summaries_by_server_id + .remove(&server_id) .unwrap_or_default(); - let new_summary = DiagnosticSummary::new(server_id, &diagnostics); - if !new_summary.is_empty() { - self.diagnostic_summaries - .insert(PathKey(worktree_path.clone()), new_summary); + + let new_summary = DiagnosticSummary::new(&diagnostics); + if new_summary.is_empty() { + if let Some(diagnostics_by_server_id) = self.diagnostics.get_mut(&worktree_path) { + if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { + diagnostics_by_server_id.remove(ix); + } + if diagnostics_by_server_id.is_empty() { + self.diagnostics.remove(&worktree_path); + } + } + } else { + summaries_by_server_id.insert(server_id, new_summary); let diagnostics_by_server_id = self.diagnostics.entry(worktree_path.clone()).or_default(); match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) { Ok(ix) => { diagnostics_by_server_id[ix] = (server_id, diagnostics); } - Err(ix) => { diagnostics_by_server_id.insert(ix, (server_id, diagnostics)); } } } - let updated = !old_summary.is_empty() || !new_summary.is_empty(); - if updated { + if !old_summary.is_empty() || !new_summary.is_empty() { if let Some(share) = self.share.as_ref() { self.client .send(proto::UpdateDiagnosticSummary { @@ -565,7 +579,7 @@ impl LocalWorktree { } } - Ok(updated) + Ok(!old_summary.is_empty() || !new_summary.is_empty()) } fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext) { @@ -955,13 +969,15 @@ impl LocalWorktree { let (resume_updates_tx, mut resume_updates_rx) = watch::channel(); let worktree_id = cx.model_id() as u64; - for (path, summary) in self.diagnostic_summaries.iter() { - if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary { - project_id, - worktree_id, - summary: Some(summary.to_proto(&path.0)), - }) { - return Task::ready(Err(e)); + for (path, summaries) in &self.diagnostic_summaries { + for (&server_id, summary) in summaries { + if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary { + project_id, + worktree_id, + summary: Some(summary.to_proto(server_id, &path)), + }) { + return Task::ready(Err(e)); + } } } @@ -1119,15 +1135,24 @@ impl RemoteWorktree { path: Arc, summary: &proto::DiagnosticSummary, ) { + let server_id = summary.language_server_id as usize; let summary = DiagnosticSummary { - language_server_id: summary.language_server_id as usize, error_count: summary.error_count as usize, warning_count: summary.warning_count as usize, }; + if summary.is_empty() { - self.diagnostic_summaries.remove(&PathKey(path)); + if let Some(summaries) = self.diagnostic_summaries.get_mut(&path) { + summaries.remove(&server_id); + if summaries.is_empty() { + self.diagnostic_summaries.remove(&path); + } + } } else { - self.diagnostic_summaries.insert(PathKey(path), summary); + self.diagnostic_summaries + .entry(path) + .or_default() + .insert(server_id, summary); } } diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index 4dc54f7a9b..4d9e0ae36a 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -107,7 +107,7 @@ pub fn init( tree_sitter_typescript::language_tsx(), vec![ adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), - adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), + // adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), ], ), ( From 4dd917c123fe6576d6f38739f875ab1388a2b30d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 19 Apr 2023 17:37:28 -0700 Subject: [PATCH 17/26] Introduce a LanguageServerId wrapper type Clarify the meaning of all the usizes in use in all of these struct fields an method signatures. --- Cargo.lock | 1 + crates/collab/src/tests/integration_tests.rs | 9 ++- crates/copilot/src/copilot.rs | 4 +- crates/diagnostics/Cargo.toml | 2 + crates/diagnostics/src/diagnostics.rs | 26 +++--- crates/diagnostics/src/items.rs | 3 +- crates/editor/src/hover_popover.rs | 3 +- crates/language/src/buffer.rs | 13 +-- crates/language/src/buffer_tests.rs | 2 +- crates/language/src/language.rs | 11 ++- crates/language/src/proto.rs | 10 +-- crates/lsp/src/lsp.rs | 23 ++++-- crates/project/src/lsp_command.rs | 26 +++--- crates/project/src/project.rs | 84 +++++++++++--------- crates/project/src/project_tests.rs | 40 +++++----- crates/project/src/worktree.rs | 26 ++++-- 16 files changed, 166 insertions(+), 117 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d7c0249798..0109c6cbaa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1834,6 +1834,7 @@ dependencies = [ "editor", "gpui", "language", + "lsp", "postage", "project", "serde_json", diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 8542bc2056..206064dc38 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -22,6 +22,7 @@ use language::{ LanguageConfig, OffsetRangeExt, Point, Rope, }; use live_kit_client::MacOSDisplay; +use lsp::LanguageServerId; use project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath}; use rand::prelude::*; use serde_json::json; @@ -3477,7 +3478,7 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, - 0, + LanguageServerId(0), DiagnosticSummary { error_count: 1, warning_count: 0, @@ -3513,7 +3514,7 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, - 0, + LanguageServerId(0), DiagnosticSummary { error_count: 1, warning_count: 0, @@ -3554,7 +3555,7 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, - 0, + LanguageServerId(0), DiagnosticSummary { error_count: 1, warning_count: 1, @@ -3570,7 +3571,7 @@ async fn test_collaborating_with_diagnostics( worktree_id, path: Arc::from(Path::new("a.rs")), }, - 0, + LanguageServerId(0), DiagnosticSummary { error_count: 1, warning_count: 1, diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index c3ec63c43c..4806826b3c 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -14,7 +14,7 @@ use language::{ ToPointUtf16, }; use log::{debug, error}; -use lsp::LanguageServer; +use lsp::{LanguageServer, LanguageServerId}; use node_runtime::NodeRuntime; use request::{LogMessage, StatusNotification}; use settings::Settings; @@ -380,7 +380,7 @@ impl Copilot { let node_path = node_runtime.binary_path().await?; let arguments: &[OsString] = &[server_path.into(), "--stdio".into()]; let server = LanguageServer::new( - 0, + LanguageServerId(0), &node_path, arguments, Path::new("/"), diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index 8ef2546b5d..83ee243b82 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -14,6 +14,7 @@ smallvec = { version = "1.6", features = ["union"] } collections = { path = "../collections" } editor = { path = "../editor" } language = { path = "../language" } +lsp = { path = "../lsp" } gpui = { path = "../gpui" } project = { path = "../project" } settings = { path = "../settings" } @@ -27,6 +28,7 @@ unindent = "0.1" client = { path = "../client", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } +lsp = { path = "../lsp", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } workspace = { path = "../workspace", features = ["test-support"] } serde_json = { workspace = true } diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 09344a3f4f..9b2964036e 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -18,6 +18,7 @@ use language::{ Anchor, Bias, Buffer, Diagnostic, DiagnosticEntry, DiagnosticSeverity, Point, Selection, SelectionGoal, }; +use lsp::LanguageServerId; use project::{DiagnosticSummary, Project, ProjectPath}; use serde_json::json; use settings::Settings; @@ -56,7 +57,7 @@ struct ProjectDiagnosticsEditor { summary: DiagnosticSummary, excerpts: ModelHandle, path_states: Vec, - paths_to_update: BTreeMap, + paths_to_update: BTreeMap, } struct PathState { @@ -116,7 +117,7 @@ impl View for ProjectDiagnosticsEditor { }), "summary": self.summary, "paths_to_update": self.paths_to_update.iter().map(|(path, server_id)| - (path.path.to_string_lossy(), server_id) + (path.path.to_string_lossy(), server_id.0) ).collect::>(), "paths_states": self.path_states.iter().map(|state| json!({ @@ -196,7 +197,11 @@ impl ProjectDiagnosticsEditor { } } - fn update_excerpts(&mut self, language_server_id: Option, cx: &mut ViewContext) { + fn update_excerpts( + &mut self, + language_server_id: Option, + cx: &mut ViewContext, + ) { let mut paths = Vec::new(); self.paths_to_update.retain(|path, server_id| { if language_server_id @@ -809,6 +814,7 @@ mod tests { ) .await; + let language_server_id = LanguageServerId(0); let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); @@ -816,7 +822,7 @@ mod tests { project.update(cx, |project, cx| { project .update_diagnostic_entries( - 0, + language_server_id, PathBuf::from("/test/main.rs"), None, vec![ @@ -965,10 +971,10 @@ mod tests { // Diagnostics are added for another earlier path. project.update(cx, |project, cx| { - project.disk_based_diagnostics_started(0, cx); + project.disk_based_diagnostics_started(language_server_id, cx); project .update_diagnostic_entries( - 0, + language_server_id, PathBuf::from("/test/consts.rs"), None, vec![DiagnosticEntry { @@ -985,7 +991,7 @@ mod tests { cx, ) .unwrap(); - project.disk_based_diagnostics_finished(0, cx); + project.disk_based_diagnostics_finished(language_server_id, cx); }); view.next_notification(cx).await; @@ -1065,10 +1071,10 @@ mod tests { // Diagnostics are added to the first path project.update(cx, |project, cx| { - project.disk_based_diagnostics_started(0, cx); + project.disk_based_diagnostics_started(language_server_id, cx); project .update_diagnostic_entries( - 0, + language_server_id, PathBuf::from("/test/consts.rs"), None, vec![ @@ -1101,7 +1107,7 @@ mod tests { cx, ) .unwrap(); - project.disk_based_diagnostics_finished(0, cx); + project.disk_based_diagnostics_finished(language_server_id, cx); }); view.next_notification(cx).await; diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index 6ebae6e204..65b42ab3d4 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -7,6 +7,7 @@ use gpui::{ ViewHandle, WeakViewHandle, }; use language::Diagnostic; +use lsp::LanguageServerId; use project::Project; use settings::Settings; use workspace::{item::ItemHandle, StatusItemView}; @@ -15,7 +16,7 @@ pub struct DiagnosticIndicator { summary: project::DiagnosticSummary, active_editor: Option>, current_diagnostic: Option, - in_progress_checks: HashSet, + in_progress_checks: HashSet, _observe_active_editor: Option, } diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index f673593413..3e69d2c64b 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -436,6 +436,7 @@ mod tests { use indoc::indoc; use language::{Diagnostic, DiagnosticSet}; + use lsp::LanguageServerId; use project::HoverBlock; use smol::stream::StreamExt; @@ -620,7 +621,7 @@ mod tests { }], &snapshot, ); - buffer.update_diagnostics(0, set, cx); + buffer.update_diagnostics(LanguageServerId(0), set, cx); }); // Hover pops diagnostic immediately diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 90d6194801..c52ca4d43e 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -17,6 +17,7 @@ use collections::HashMap; use fs::LineEnding; use futures::FutureExt as _; use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task}; +use lsp::LanguageServerId; use parking_lot::Mutex; use settings::Settings; use similar::{ChangeTag, TextDiff}; @@ -72,7 +73,7 @@ pub struct Buffer { syntax_map: Mutex, parsing_in_background: bool, parse_count: usize, - diagnostics: HashMap, // server_id -> diagnostic set + diagnostics: HashMap, remote_selections: TreeMap, selections_update_count: usize, diagnostics_update_count: usize, @@ -89,7 +90,7 @@ pub struct BufferSnapshot { pub git_diff: git::diff::BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, - diagnostics: HashMap, // server_id -> diagnostic set + diagnostics: HashMap, diagnostics_update_count: usize, file_update_count: usize, git_diff_update_count: usize, @@ -157,7 +158,7 @@ pub struct Completion { #[derive(Clone, Debug)] pub struct CodeAction { - pub server_id: usize, + pub server_id: LanguageServerId, pub range: Range, pub lsp_action: lsp::CodeAction, } @@ -167,7 +168,7 @@ pub enum Operation { Buffer(text::Operation), UpdateDiagnostics { - server_id: usize, + server_id: LanguageServerId, diagnostics: Arc<[DiagnosticEntry]>, lamport_timestamp: clock::Lamport, }, @@ -879,7 +880,7 @@ impl Buffer { pub fn update_diagnostics( &mut self, - server_id: usize, + server_id: LanguageServerId, diagnostics: DiagnosticSet, cx: &mut ModelContext, ) { @@ -1645,7 +1646,7 @@ impl Buffer { fn apply_diagnostic_update( &mut self, - server_id: usize, + server_id: LanguageServerId, diagnostics: DiagnosticSet, lamport_timestamp: clock::Lamport, cx: &mut ModelContext, diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index 6b6ce041f7..eeac1a4818 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -1866,7 +1866,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) { buffer, ); log::info!("peer {} setting diagnostics: {:?}", replica_id, diagnostics); - buffer.update_diagnostics(0, diagnostics, cx); + buffer.update_diagnostics(LanguageServerId(0), diagnostics, cx); }); mutation_count -= 1; } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 6c440e116b..85c9089952 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -54,6 +54,7 @@ use futures::channel::mpsc; pub use buffer::Operation; pub use buffer::*; pub use diagnostic_set::DiagnosticEntry; +pub use lsp::LanguageServerId; pub use outline::{Outline, OutlineItem}; pub use tree_sitter::{Parser, Tree}; @@ -524,7 +525,7 @@ struct LanguageRegistryState { } pub struct PendingLanguageServer { - pub server_id: usize, + pub server_id: LanguageServerId, pub task: Task>, } @@ -819,7 +820,7 @@ impl LanguageRegistry { Ok(server) }); - let server_id = post_inc(&mut self.state.write().next_language_server_id); + let server_id = self.state.write().next_language_server_id(); return Some(PendingLanguageServer { server_id, task }); } @@ -837,7 +838,7 @@ impl LanguageRegistry { let adapter = adapter.clone(); let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone(); let login_shell_env_loaded = self.login_shell_env_loaded.clone(); - let server_id = post_inc(&mut self.state.write().next_language_server_id); + let server_id = self.state.write().next_language_server_id(); let task = cx.spawn(|cx| async move { login_shell_env_loaded.await; @@ -884,6 +885,10 @@ impl LanguageRegistry { } impl LanguageRegistryState { + fn next_language_server_id(&mut self) -> LanguageServerId { + LanguageServerId(post_inc(&mut self.next_language_server_id)) + } + fn add(&mut self, language: Arc) { if let Some(theme) = self.theme.as_ref() { language.set_theme(&theme.editor.syntax); diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index e4963e0882..bf1d1dd273 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -4,7 +4,7 @@ use crate::{ }; use anyhow::{anyhow, Result}; use clock::ReplicaId; -use lsp::DiagnosticSeverity; +use lsp::{DiagnosticSeverity, LanguageServerId}; use rpc::proto; use std::{ops::Range, sync::Arc}; use text::*; @@ -80,7 +80,7 @@ pub fn serialize_operation(operation: &crate::Operation) -> proto::Operation { } => proto::operation::Variant::UpdateDiagnostics(proto::UpdateDiagnostics { replica_id: lamport_timestamp.replica_id as u32, lamport_timestamp: lamport_timestamp.value, - server_id: *server_id as u64, + server_id: server_id.0 as u64, diagnostics: serialize_diagnostics(diagnostics.iter()), }), @@ -277,7 +277,7 @@ pub fn deserialize_operation(message: proto::Operation) -> Result proto::CodeAction { proto::CodeAction { - server_id: action.server_id as u64, + server_id: action.server_id.0 as u64, start: Some(serialize_anchor(&action.range.start)), end: Some(serialize_anchor(&action.range.end)), lsp_action: serde_json::to_vec(&action.lsp_action).unwrap(), @@ -487,7 +487,7 @@ pub fn deserialize_code_action(action: proto::CodeAction) -> Result .ok_or_else(|| anyhow!("invalid end"))?; let lsp_action = serde_json::from_slice(&action.lsp_action)?; Ok(CodeAction { - server_id: action.server_id as usize, + server_id: LanguageServerId(action.server_id as usize), range: start..end, lsp_action, }) diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 1e37c9765b..b6a4d8513e 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -16,6 +16,7 @@ use smol::{ process::{self, Child}, }; use std::{ + fmt, future::Future, io::Write, path::PathBuf, @@ -35,7 +36,7 @@ type NotificationHandler = Box, &str, AsyncAppCon type ResponseHandler = Box)>; pub struct LanguageServer { - server_id: usize, + server_id: LanguageServerId, next_id: AtomicUsize, outbound_tx: channel::Sender>, name: String, @@ -51,6 +52,10 @@ pub struct LanguageServer { _server: Option, } +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[repr(transparent)] +pub struct LanguageServerId(pub usize); + pub struct Subscription { method: &'static str, notification_handlers: Arc>>, @@ -107,7 +112,7 @@ struct Error { impl LanguageServer { pub fn new>( - server_id: usize, + server_id: LanguageServerId, binary_path: &Path, arguments: &[T], root_path: &Path, @@ -158,7 +163,7 @@ impl LanguageServer { } fn new_internal( - server_id: usize, + server_id: LanguageServerId, stdin: Stdin, stdout: Stdout, server: Option, @@ -581,7 +586,7 @@ impl LanguageServer { &self.capabilities } - pub fn server_id(&self) -> usize { + pub fn server_id(&self) -> LanguageServerId { self.server_id } @@ -685,6 +690,12 @@ impl Subscription { } } +impl fmt::Display for LanguageServerId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + impl Drop for Subscription { fn drop(&mut self) { self.notification_handlers.lock().remove(self.method); @@ -720,7 +731,7 @@ impl LanguageServer { let (notifications_tx, notifications_rx) = channel::unbounded(); let server = Self::new_internal( - 0, + LanguageServerId(0), stdin_writer, stdout_reader, None, @@ -731,7 +742,7 @@ impl LanguageServer { ); let fake = FakeLanguageServer { server: Arc::new(Self::new_internal( - 0, + LanguageServerId(0), stdout_writer, stdin_reader, None, diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 96e44d6f84..b26987694e 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -12,7 +12,7 @@ use language::{ range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction, Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Unclipped, }; -use lsp::{DocumentHighlightKind, LanguageServer, ServerCapabilities}; +use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, ServerCapabilities}; use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag}; use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc}; @@ -39,7 +39,7 @@ pub(crate) trait LspCommand: 'static + Sized { message: ::Result, project: ModelHandle, buffer: ModelHandle, - server_id: usize, + server_id: LanguageServerId, cx: AsyncAppContext, ) -> Result; @@ -143,7 +143,7 @@ impl LspCommand for PrepareRename { message: Option, _: ModelHandle, buffer: ModelHandle, - _: usize, + _: LanguageServerId, cx: AsyncAppContext, ) -> Result>> { buffer.read_with(&cx, |buffer, _| { @@ -270,7 +270,7 @@ impl LspCommand for PerformRename { message: Option, project: ModelHandle, buffer: ModelHandle, - server_id: usize, + server_id: LanguageServerId, mut cx: AsyncAppContext, ) -> Result { if let Some(edit) = message { @@ -389,7 +389,7 @@ impl LspCommand for GetDefinition { message: Option, project: ModelHandle, buffer: ModelHandle, - server_id: usize, + server_id: LanguageServerId, cx: AsyncAppContext, ) -> Result> { location_links_from_lsp(message, project, buffer, server_id, cx).await @@ -482,7 +482,7 @@ impl LspCommand for GetTypeDefinition { message: Option, project: ModelHandle, buffer: ModelHandle, - server_id: usize, + server_id: LanguageServerId, cx: AsyncAppContext, ) -> Result> { location_links_from_lsp(message, project, buffer, server_id, cx).await @@ -548,7 +548,7 @@ impl LspCommand for GetTypeDefinition { fn language_server_for_buffer( project: &ModelHandle, buffer: &ModelHandle, - server_id: usize, + server_id: LanguageServerId, cx: &mut AsyncAppContext, ) -> Result<(Arc, Arc)> { project @@ -626,7 +626,7 @@ async fn location_links_from_lsp( message: Option, project: ModelHandle, buffer: ModelHandle, - server_id: usize, + server_id: LanguageServerId, mut cx: AsyncAppContext, ) -> Result> { let message = match message { @@ -770,7 +770,7 @@ impl LspCommand for GetReferences { locations: Option>, project: ModelHandle, buffer: ModelHandle, - server_id: usize, + server_id: LanguageServerId, mut cx: AsyncAppContext, ) -> Result> { let mut references = Vec::new(); @@ -932,7 +932,7 @@ impl LspCommand for GetDocumentHighlights { lsp_highlights: Option>, _: ModelHandle, buffer: ModelHandle, - _: usize, + _: LanguageServerId, cx: AsyncAppContext, ) -> Result> { buffer.read_with(&cx, |buffer, _| { @@ -1078,7 +1078,7 @@ impl LspCommand for GetHover { message: Option, _: ModelHandle, buffer: ModelHandle, - _: usize, + _: LanguageServerId, cx: AsyncAppContext, ) -> Result { Ok(message.and_then(|hover| { @@ -1300,7 +1300,7 @@ impl LspCommand for GetCompletions { completions: Option, _: ModelHandle, buffer: ModelHandle, - _: usize, + _: LanguageServerId, cx: AsyncAppContext, ) -> Result> { let completions = if let Some(completions) = completions { @@ -1520,7 +1520,7 @@ impl LspCommand for GetCodeActions { actions: Option, _: ModelHandle, _: ModelHandle, - server_id: usize, + server_id: LanguageServerId, _: AsyncAppContext, ) -> Result> { Ok(actions diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7c0f33a94a..afd0b3bbae 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -36,7 +36,7 @@ use language::{ }; use lsp::{ DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, - DocumentHighlightKind, LanguageServer, LanguageString, MarkedString, + DocumentHighlightKind, LanguageServer, LanguageServerId, LanguageString, MarkedString, }; use lsp_command::*; use lsp_glob_set::LspGlobSet; @@ -95,10 +95,10 @@ pub struct Project { active_entry: Option, buffer_changes_tx: mpsc::UnboundedSender, languages: Arc, - language_servers: HashMap, - language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>, - language_server_statuses: BTreeMap, - last_workspace_edits_by_language_server: HashMap, + language_servers: HashMap, + language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>, + language_server_statuses: BTreeMap, + last_workspace_edits_by_language_server: HashMap, client: Arc, next_entry_id: Arc, join_project_response_message_id: u32, @@ -123,7 +123,7 @@ pub struct Project { /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it. /// Used for re-issuing buffer requests when peers temporarily disconnect incomplete_remote_buffers: HashMap>>, - buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots + buffer_snapshots: HashMap>>, // buffer_id -> server_id -> vec of snapshots buffers_being_formatted: HashSet, nonce: u128, _maintain_buffer_languages: Task<()>, @@ -189,14 +189,14 @@ pub enum Event { WorktreeAdded, WorktreeRemoved(WorktreeId), DiskBasedDiagnosticsStarted { - language_server_id: usize, + language_server_id: LanguageServerId, }, DiskBasedDiagnosticsFinished { - language_server_id: usize, + language_server_id: LanguageServerId, }, DiagnosticsUpdated { path: ProjectPath, - language_server_id: usize, + language_server_id: LanguageServerId, }, RemoteIdChanged(Option), DisconnectedFromHost, @@ -336,10 +336,14 @@ impl DiagnosticSummary { self.error_count == 0 && self.warning_count == 0 } - pub fn to_proto(&self, language_server_id: usize, path: &Path) -> proto::DiagnosticSummary { + pub fn to_proto( + &self, + language_server_id: LanguageServerId, + path: &Path, + ) -> proto::DiagnosticSummary { proto::DiagnosticSummary { path: path.to_string_lossy().to_string(), - language_server_id: language_server_id as u64, + language_server_id: language_server_id.0 as u64, error_count: self.error_count as u32, warning_count: self.warning_count as u32, } @@ -541,7 +545,7 @@ impl Project { .into_iter() .map(|server| { ( - server.id as usize, + LanguageServerId(server.id as usize), LanguageServerStatus { name: server.name, pending_work: Default::default(), @@ -1025,7 +1029,7 @@ impl Project { .send(proto::StartLanguageServer { project_id, server: Some(proto::LanguageServer { - id: *server_id as u64, + id: server_id.0 as u64, name: status.name.clone(), }), }) @@ -1152,7 +1156,7 @@ impl Project { .into_iter() .map(|server| { ( - server.id as usize, + LanguageServerId(server.id as usize), LanguageServerStatus { name: server.name, pending_work: Default::default(), @@ -1444,7 +1448,7 @@ impl Project { fn open_local_buffer_via_lsp( &mut self, abs_path: lsp::Url, - language_server_id: usize, + language_server_id: LanguageServerId, language_server_name: LanguageServerName, cx: &mut ModelContext, ) -> Task>> { @@ -2381,7 +2385,7 @@ impl Project { .send(proto::StartLanguageServer { project_id, server: Some(proto::LanguageServer { - id: server_id as u64, + id: server_id.0 as u64, name: language_server.name().to_string(), }), }) @@ -2603,7 +2607,7 @@ impl Project { fn on_lsp_progress( &mut self, progress: lsp::ProgressParams, - server_id: usize, + server_id: LanguageServerId, disk_based_diagnostics_progress_token: Option, cx: &mut ModelContext, ) { @@ -2715,7 +2719,7 @@ impl Project { fn on_lsp_work_start( &mut self, - language_server_id: usize, + language_server_id: LanguageServerId, token: String, progress: LanguageServerProgress, cx: &mut ModelContext, @@ -2728,7 +2732,7 @@ impl Project { fn on_lsp_work_progress( &mut self, - language_server_id: usize, + language_server_id: LanguageServerId, token: String, progress: LanguageServerProgress, cx: &mut ModelContext, @@ -2755,7 +2759,7 @@ impl Project { fn on_lsp_work_end( &mut self, - language_server_id: usize, + language_server_id: LanguageServerId, token: String, cx: &mut ModelContext, ) { @@ -2767,7 +2771,7 @@ impl Project { fn on_lsp_did_change_watched_files( &mut self, - language_server_id: usize, + language_server_id: LanguageServerId, params: DidChangeWatchedFilesRegistrationOptions, cx: &mut ModelContext, ) { @@ -2785,7 +2789,7 @@ impl Project { async fn on_lsp_workspace_edit( this: WeakModelHandle, params: lsp::ApplyWorkspaceEditParams, - server_id: usize, + server_id: LanguageServerId, adapter: Arc, language_server: Arc, mut cx: AsyncAppContext, @@ -2818,14 +2822,14 @@ impl Project { fn broadcast_language_server_update( &self, - language_server_id: usize, + language_server_id: LanguageServerId, event: proto::update_language_server::Variant, ) { if let Some(project_id) = self.remote_id() { self.client .send(proto::UpdateLanguageServer { project_id, - language_server_id: language_server_id as u64, + language_server_id: language_server_id.0 as u64, variant: Some(event), }) .log_err(); @@ -2840,7 +2844,7 @@ impl Project { pub fn update_diagnostics( &mut self, - language_server_id: usize, + language_server_id: LanguageServerId, mut params: lsp::PublishDiagnosticsParams, disk_based_sources: &[String], cx: &mut ModelContext, @@ -2960,7 +2964,7 @@ impl Project { pub fn update_diagnostic_entries( &mut self, - server_id: usize, + server_id: LanguageServerId, abs_path: PathBuf, version: Option, diagnostics: Vec>>, @@ -2997,7 +3001,7 @@ impl Project { fn update_buffer_diagnostics( &mut self, buffer: &ModelHandle, - server_id: usize, + server_id: LanguageServerId, version: Option, mut diagnostics: Vec>>, cx: &mut ModelContext, @@ -4712,7 +4716,7 @@ impl Project { pub fn language_servers_running_disk_based_diagnostics( &self, - ) -> impl Iterator + '_ { + ) -> impl Iterator + '_ { self.language_server_statuses .iter() .filter_map(|(id, status)| { @@ -4736,7 +4740,7 @@ impl Project { pub fn diagnostic_summaries<'a>( &'a self, cx: &'a AppContext, - ) -> impl Iterator + 'a { + ) -> impl Iterator + 'a { self.visible_worktrees(cx).flat_map(move |worktree| { let worktree = worktree.read(cx); let worktree_id = worktree.id(); @@ -4750,7 +4754,7 @@ impl Project { pub fn disk_based_diagnostics_started( &mut self, - language_server_id: usize, + language_server_id: LanguageServerId, cx: &mut ModelContext, ) { cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id }); @@ -4758,7 +4762,7 @@ impl Project { pub fn disk_based_diagnostics_finished( &mut self, - language_server_id: usize, + language_server_id: LanguageServerId, cx: &mut ModelContext, ) { cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id }); @@ -5065,7 +5069,7 @@ impl Project { .update_diagnostic_summary(project_path.path.clone(), &summary); }); cx.emit(Event::DiagnosticsUpdated { - language_server_id: summary.language_server_id as usize, + language_server_id: LanguageServerId(summary.language_server_id as usize), path: project_path, }); } @@ -5086,7 +5090,7 @@ impl Project { .ok_or_else(|| anyhow!("invalid server"))?; this.update(&mut cx, |this, cx| { this.language_server_statuses.insert( - server.id as usize, + LanguageServerId(server.id as usize), LanguageServerStatus { name: server.name, pending_work: Default::default(), @@ -5106,7 +5110,7 @@ impl Project { mut cx: AsyncAppContext, ) -> Result<()> { this.update(&mut cx, |this, cx| { - let language_server_id = envelope.payload.language_server_id as usize; + let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize); match envelope .payload @@ -6142,7 +6146,7 @@ impl Project { &mut self, buffer: &ModelHandle, lsp_edits: impl 'static + Send + IntoIterator, - server_id: usize, + server_id: LanguageServerId, version: Option, cx: &mut ModelContext, ) -> Task, String)>>> { @@ -6245,7 +6249,7 @@ impl Project { fn buffer_snapshot_for_lsp_version( &mut self, buffer: &ModelHandle, - server_id: usize, + server_id: LanguageServerId, version: Option, cx: &AppContext, ) -> Result { @@ -6314,14 +6318,18 @@ impl Project { fn language_server_for_buffer( &self, buffer: &Buffer, - server_id: usize, + server_id: LanguageServerId, cx: &AppContext, ) -> Option<(&Arc, &Arc)> { self.language_servers_iter_for_buffer(buffer, cx) .find(|(_, s)| s.server_id() == server_id) } - fn language_server_ids_for_buffer(&self, buffer: &Buffer, cx: &AppContext) -> Vec { + fn language_server_ids_for_buffer( + &self, + buffer: &Buffer, + cx: &AppContext, + ) -> Vec { if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) { let worktree_id = file.worktree_id(cx); language diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 6cda33dc47..5d062d42ce 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -303,7 +303,7 @@ async fn test_managing_language_servers( rust_buffer2.update(cx, |buffer, cx| { buffer.update_diagnostics( - 0, + LanguageServerId(0), DiagnosticSet::from_sorted_entries( vec![DiagnosticEntry { diagnostic: Default::default(), @@ -582,7 +582,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { project.update(cx, |project, cx| { project .update_diagnostics( - 0, + LanguageServerId(0), lsp::PublishDiagnosticsParams { uri: Url::from_file_path("/dir/a.rs").unwrap(), version: None, @@ -599,7 +599,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { .unwrap(); project .update_diagnostics( - 0, + LanguageServerId(0), lsp::PublishDiagnosticsParams { uri: Url::from_file_path("/dir/b.rs").unwrap(), version: None, @@ -675,7 +675,7 @@ async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) { project.update(cx, |project, cx| { project .update_diagnostics( - 0, + LanguageServerId(0), lsp::PublishDiagnosticsParams { uri: Url::from_file_path("/root/other.rs").unwrap(), version: None, @@ -767,7 +767,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsStarted { - language_server_id: 0, + language_server_id: LanguageServerId(0), } ); @@ -784,7 +784,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { assert_eq!( events.next().await.unwrap(), Event::DiagnosticsUpdated { - language_server_id: 0, + language_server_id: LanguageServerId(0), path: (worktree_id, Path::new("a.rs")).into() } ); @@ -793,7 +793,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsFinished { - language_server_id: 0 + language_server_id: LanguageServerId(0) } ); @@ -831,7 +831,7 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { assert_eq!( events.next().await.unwrap(), Event::DiagnosticsUpdated { - language_server_id: 0, + language_server_id: LanguageServerId(0), path: (worktree_id, Path::new("a.rs")).into() } ); @@ -892,7 +892,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsStarted { - language_server_id: 1 + language_server_id: LanguageServerId(1) } ); project.read_with(cx, |project, _| { @@ -900,7 +900,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC project .language_servers_running_disk_based_diagnostics() .collect::>(), - [1] + [LanguageServerId(1)] ); }); @@ -910,7 +910,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsFinished { - language_server_id: 1 + language_server_id: LanguageServerId(1) } ); project.read_with(cx, |project, _| { @@ -918,7 +918,7 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC project .language_servers_running_disk_based_diagnostics() .collect::>(), - [0; 0] + [LanguageServerId(0); 0] ); }); } @@ -1403,7 +1403,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) { project .update_buffer_diagnostics( &buffer, - 0, + LanguageServerId(0), None, vec![ DiagnosticEntry { @@ -1464,7 +1464,7 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC project.update(cx, |project, cx| { project .update_diagnostic_entries( - 0, + LanguageServerId(0), Path::new("/dir/a.rs").to_owned(), None, vec![DiagnosticEntry { @@ -1481,7 +1481,7 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC .unwrap(); project .update_diagnostic_entries( - 1, + LanguageServerId(1), Path::new("/dir/a.rs").to_owned(), None, vec![DiagnosticEntry { @@ -1633,7 +1633,7 @@ async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) { new_text: "".into(), }, ], - 0, + LanguageServerId(0), Some(lsp_document_version), cx, ) @@ -1728,7 +1728,7 @@ async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestApp new_text: "".into(), }, ], - 0, + LanguageServerId(0), None, cx, ) @@ -1832,7 +1832,7 @@ async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) { .unindent(), }, ], - 0, + LanguageServerId(0), None, cx, ) @@ -3011,7 +3011,9 @@ async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) { }; project - .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx)) + .update(cx, |p, cx| { + p.update_diagnostics(LanguageServerId(0), message, &[], cx) + }) .unwrap(); let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot()); diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 057d89f567..14fb4f2628 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -26,6 +26,7 @@ use language::{ }, Buffer, DiagnosticEntry, File as _, PointUtf16, Rope, RopeFingerprint, Unclipped, }; +use lsp::LanguageServerId; use parking_lot::Mutex; use postage::{ barrier, @@ -67,8 +68,14 @@ pub struct LocalWorktree { is_scanning: (watch::Sender, watch::Receiver), _background_scanner_task: Task<()>, share: Option, - diagnostics: HashMap, Vec<(usize, Vec>>)>>, - diagnostic_summaries: HashMap, HashMap>, + diagnostics: HashMap< + Arc, + Vec<( + LanguageServerId, + Vec>>, + )>, + >, + diagnostic_summaries: HashMap, HashMap>, client: Arc, fs: Arc, visible: bool, @@ -82,7 +89,7 @@ pub struct RemoteWorktree { updates_tx: Option>, snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>, replica_id: ReplicaId, - diagnostic_summaries: HashMap, HashMap>, + diagnostic_summaries: HashMap, HashMap>, visible: bool, disconnected: bool, } @@ -463,7 +470,7 @@ impl Worktree { pub fn diagnostic_summaries( &self, - ) -> impl Iterator, usize, DiagnosticSummary)> + '_ { + ) -> impl Iterator, LanguageServerId, DiagnosticSummary)> + '_ { match self { Worktree::Local(worktree) => &worktree.diagnostic_summaries, Worktree::Remote(worktree) => &worktree.diagnostic_summaries, @@ -518,13 +525,16 @@ impl LocalWorktree { pub fn diagnostics_for_path( &self, path: &Path, - ) -> Vec<(usize, Vec>>)> { + ) -> Vec<( + LanguageServerId, + Vec>>, + )> { self.diagnostics.get(path).cloned().unwrap_or_default() } pub fn update_diagnostics( &mut self, - server_id: usize, + server_id: LanguageServerId, worktree_path: Arc, diagnostics: Vec>>, _: &mut ModelContext, @@ -570,7 +580,7 @@ impl LocalWorktree { worktree_id: self.id().to_proto(), summary: Some(proto::DiagnosticSummary { path: worktree_path.to_string_lossy().to_string(), - language_server_id: server_id as u64, + language_server_id: server_id.0 as u64, error_count: new_summary.error_count as u32, warning_count: new_summary.warning_count as u32, }), @@ -1135,7 +1145,7 @@ impl RemoteWorktree { path: Arc, summary: &proto::DiagnosticSummary, ) { - let server_id = summary.language_server_id as usize; + let server_id = LanguageServerId(summary.language_server_id as usize); let summary = DiagnosticSummary { error_count: summary.error_count as usize, warning_count: summary.warning_count as usize, From 0ebe44bfd5b29d8c6a788650580e2d7a46d88eb9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 19 Apr 2023 18:09:03 -0700 Subject: [PATCH 18/26] Handle multiple language servers for a given path in project diagnostics view --- crates/diagnostics/src/diagnostics.rs | 338 ++++++++++++++++++++++++-- crates/language/src/buffer.rs | 24 +- crates/language/src/diagnostic_set.rs | 26 +- 3 files changed, 347 insertions(+), 41 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 9b2964036e..a2793b1fff 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -1,7 +1,7 @@ pub mod items; use anyhow::Result; -use collections::{BTreeMap, HashSet}; +use collections::{BTreeSet, HashSet}; use editor::{ diagnostic_block_renderer, display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock}, @@ -57,7 +57,7 @@ struct ProjectDiagnosticsEditor { summary: DiagnosticSummary, excerpts: ModelHandle, path_states: Vec, - paths_to_update: BTreeMap, + paths_to_update: BTreeSet<(ProjectPath, LanguageServerId)>, } struct PathState { @@ -73,6 +73,7 @@ struct Jump { } struct DiagnosticGroupState { + language_server_id: LanguageServerId, primary_diagnostic: DiagnosticEntry, primary_excerpt_ix: usize, excerpts: Vec, @@ -150,7 +151,7 @@ impl ProjectDiagnosticsEditor { path, } => { this.paths_to_update - .insert(path.clone(), *language_server_id); + .insert((path.clone(), *language_server_id)); } _ => {} }) @@ -203,7 +204,7 @@ impl ProjectDiagnosticsEditor { cx: &mut ViewContext, ) { let mut paths = Vec::new(); - self.paths_to_update.retain(|path, server_id| { + self.paths_to_update.retain(|(path, server_id)| { if language_server_id .map_or(true, |language_server_id| language_server_id == *server_id) { @@ -220,7 +221,9 @@ impl ProjectDiagnosticsEditor { let buffer = project .update(&mut cx, |project, cx| project.open_buffer(path.clone(), cx)) .await?; - this.update(&mut cx, |this, cx| this.populate_excerpts(path, buffer, cx)) + this.update(&mut cx, |this, cx| { + this.populate_excerpts(path, language_server_id, buffer, cx) + }) } Result::<_, anyhow::Error>::Ok(()) } @@ -232,6 +235,7 @@ impl ProjectDiagnosticsEditor { fn populate_excerpts( &mut self, path: ProjectPath, + language_server_id: Option, buffer: ModelHandle, cx: &mut ViewContext, ) { @@ -270,9 +274,9 @@ impl ProjectDiagnosticsEditor { let excerpts_snapshot = self.excerpts.update(cx, |excerpts, excerpts_cx| { let mut old_groups = path_state.diagnostic_groups.iter().enumerate().peekable(); let mut new_groups = snapshot - .diagnostic_groups() + .diagnostic_groups(language_server_id) .into_iter() - .filter(|group| { + .filter(|(_, group)| { group.entries[group.primary_ix].diagnostic.severity <= DiagnosticSeverity::WARNING }) @@ -284,12 +288,27 @@ impl ProjectDiagnosticsEditor { match (old_groups.peek(), new_groups.peek()) { (None, None) => break, (None, Some(_)) => to_insert = new_groups.next(), - (Some(_), None) => to_remove = old_groups.next(), - (Some((_, old_group)), Some(new_group)) => { + (Some((_, old_group)), None) => { + if language_server_id.map_or(true, |id| id == old_group.language_server_id) + { + to_remove = old_groups.next(); + } else { + to_keep = old_groups.next(); + } + } + (Some((_, old_group)), Some((_, new_group))) => { let old_primary = &old_group.primary_diagnostic; let new_primary = &new_group.entries[new_group.primary_ix]; match compare_diagnostics(old_primary, new_primary, &snapshot) { - Ordering::Less => to_remove = old_groups.next(), + Ordering::Less => { + if language_server_id + .map_or(true, |id| id == old_group.language_server_id) + { + to_remove = old_groups.next(); + } else { + to_keep = old_groups.next(); + } + } Ordering::Equal => { to_keep = old_groups.next(); new_groups.next(); @@ -299,8 +318,9 @@ impl ProjectDiagnosticsEditor { } } - if let Some(group) = to_insert { + if let Some((language_server_id, group)) = to_insert { let mut group_state = DiagnosticGroupState { + language_server_id, primary_diagnostic: group.entries[group.primary_ix].clone(), primary_excerpt_ix: 0, excerpts: Default::default(), @@ -778,26 +798,24 @@ mod tests { }; use gpui::TestAppContext; use language::{Diagnostic, DiagnosticEntry, DiagnosticSeverity, PointUtf16, Unclipped}; + use project::FakeFs; use serde_json::json; use unindent::Unindent as _; - use workspace::AppState; #[gpui::test] async fn test_diagnostics(cx: &mut TestAppContext) { - let app_state = cx.update(AppState::test); - app_state - .fs - .as_fake() - .insert_tree( - "/test", - json!({ - "consts.rs": " + Settings::test_async(cx); + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/test", + json!({ + "consts.rs": " const a: i32 = 'a'; const b: i32 = c; " - .unindent(), + .unindent(), - "main.rs": " + "main.rs": " fn main() { let x = vec![]; let y = vec![]; @@ -809,13 +827,13 @@ mod tests { d(x); } " - .unindent(), - }), - ) - .await; + .unindent(), + }), + ) + .await; let language_server_id = LanguageServerId(0); - let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await; + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); // Create some diagnostics @@ -1187,6 +1205,272 @@ mod tests { }); } + #[gpui::test] + async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) { + Settings::test_async(cx); + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/test", + json!({ + "main.js": " + a(); + b(); + c(); + d(); + e(); + ".unindent() + }), + ) + .await; + + let server_id_1 = LanguageServerId(100); + let server_id_2 = LanguageServerId(101); + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx)); + + let view = cx.add_view(&workspace, |cx| { + ProjectDiagnosticsEditor::new(project.clone(), workspace.downgrade(), cx) + }); + + // Two language servers start updating diagnostics + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(server_id_1, cx); + project.disk_based_diagnostics_started(server_id_2, cx); + project + .update_diagnostic_entries( + server_id_1, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 1)), + diagnostic: Diagnostic { + message: "error 1".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(1, 0))..Unclipped(PointUtf16::new(1, 1)), + diagnostic: Diagnostic { + message: "warning 1".to_string(), + severity: DiagnosticSeverity::ERROR, + is_primary: true, + is_disk_based: true, + group_id: 2, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + }); + + // The first language server finishes + project.update(cx, |project, cx| { + project.disk_based_diagnostics_finished(server_id_1, cx); + }); + + // Only the first language server's diagnostics are shown. + cx.foreground().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // + "b();", + ) + ); + }); + + // The second language server finishes + project.update(cx, |project, cx| { + project.disk_based_diagnostics_finished(server_id_2, cx); + }); + + // Both language server's diagnostics are shown. + cx.foreground().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (6, "collapsed context".into()), + (7, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // location + "b();\n", // + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "a();\n", // context + "b();\n", // + "c();", // context + ) + ); + }); + + // Both language servers start updating diagnostics, and the first server finishes. + project.update(cx, |project, cx| { + project.disk_based_diagnostics_started(server_id_1, cx); + project.disk_based_diagnostics_started(server_id_2, cx); + project + .update_diagnostic_entries( + server_id_1, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(2, 0))..Unclipped(PointUtf16::new(2, 1)), + diagnostic: Diagnostic { + message: "warning 2".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.rs"), + None, + vec![], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_1, cx); + }); + + // Only the first language server's diagnostics are updated. + cx.foreground().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (7, "collapsed context".into()), + (8, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "a();\n", // location + "b();\n", // + "c();\n", // context + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "b();\n", // context + "c();\n", // + "d();", // context + ) + ); + }); + + // The second language server finishes. + project.update(cx, |project, cx| { + project + .update_diagnostic_entries( + server_id_2, + PathBuf::from("/test/main.js"), + None, + vec![DiagnosticEntry { + range: Unclipped(PointUtf16::new(3, 0))..Unclipped(PointUtf16::new(3, 1)), + diagnostic: Diagnostic { + message: "warning 2".to_string(), + severity: DiagnosticSeverity::WARNING, + is_primary: true, + is_disk_based: true, + group_id: 1, + ..Default::default() + }, + }], + cx, + ) + .unwrap(); + project.disk_based_diagnostics_finished(server_id_2, cx); + }); + + // Both language servers' diagnostics are updated. + cx.foreground().run_until_parked(); + view.update(cx, |view, cx| { + assert_eq!( + editor_blocks(&view.editor, cx), + [ + (0, "path header block".into()), + (2, "diagnostic header".into()), + (7, "collapsed context".into()), + (8, "diagnostic header".into()), + ] + ); + assert_eq!( + view.editor.update(cx, |editor, cx| editor.display_text(cx)), + concat!( + "\n", // filename + "\n", // padding + // diagnostic group 1 + "\n", // primary message + "\n", // padding + "b();\n", // location + "c();\n", // + "d();\n", // context + "\n", // collapsed context + // diagnostic group 2 + "\n", // primary message + "\n", // padding + "c();\n", // context + "d();\n", // + "e();", // context + ) + ); + }); + } + fn editor_blocks(editor: &ViewHandle, cx: &mut AppContext) -> Vec<(u32, String)> { let mut presenter = cx.build_presenter(editor.id(), 0., Default::default()); let mut cx = presenter.build_layout_context(Default::default(), false, cx); diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index c52ca4d43e..25536adcbb 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -2548,16 +2548,26 @@ impl BufferSnapshot { }) } - pub fn diagnostic_groups(&self) -> Vec> { + pub fn diagnostic_groups( + &self, + language_server_id: Option, + ) -> Vec<(LanguageServerId, DiagnosticGroup)> { let mut groups = Vec::new(); - for diagnostics in self.diagnostics.values() { - diagnostics.groups(&mut groups, self); + + if let Some(language_server_id) = language_server_id { + if let Some(diagnostics) = self.diagnostics.get(&language_server_id) { + diagnostics.groups(language_server_id, &mut groups, self); + } + } else { + for (&language_server_id, diagnostics) in self.diagnostics.iter() { + diagnostics.groups(language_server_id, &mut groups, self); + } } - groups.sort_by(|a, b| { - let a_start = &a.entries[a.primary_ix].range.start; - let b_start = &b.entries[b.primary_ix].range.start; - a_start.cmp(b_start, self) + groups.sort_by(|(id_a, group_a), (id_b, group_b)| { + let a_start = &group_a.entries[group_a.primary_ix].range.start; + let b_start = &group_b.entries[group_b.primary_ix].range.start; + a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b)) }); groups diff --git a/crates/language/src/diagnostic_set.rs b/crates/language/src/diagnostic_set.rs index cde5a6fb2b..948a7ee394 100644 --- a/crates/language/src/diagnostic_set.rs +++ b/crates/language/src/diagnostic_set.rs @@ -1,5 +1,6 @@ use crate::Diagnostic; use collections::HashMap; +use lsp::LanguageServerId; use std::{ cmp::{Ordering, Reverse}, iter, @@ -129,7 +130,12 @@ impl DiagnosticSet { }) } - pub fn groups(&self, output: &mut Vec>, buffer: &text::BufferSnapshot) { + pub fn groups( + &self, + language_server_id: LanguageServerId, + output: &mut Vec<(LanguageServerId, DiagnosticGroup)>, + buffer: &text::BufferSnapshot, + ) { let mut groups = HashMap::default(); for entry in self.diagnostics.iter() { groups @@ -144,16 +150,22 @@ impl DiagnosticSet { entries .iter() .position(|entry| entry.diagnostic.is_primary) - .map(|primary_ix| DiagnosticGroup { - entries, - primary_ix, + .map(|primary_ix| { + ( + language_server_id, + DiagnosticGroup { + entries, + primary_ix, + }, + ) }) })); - output[start_ix..].sort_unstable_by(|a, b| { - a.entries[a.primary_ix] + output[start_ix..].sort_unstable_by(|(id_a, group_a), (id_b, group_b)| { + group_a.entries[group_a.primary_ix] .range .start - .cmp(&b.entries[b.primary_ix].range.start, buffer) + .cmp(&group_b.entries[group_b.primary_ix].range.start, buffer) + .then_with(|| id_a.cmp(&id_b)) }); } From 960a2bc589a7f88ce28e1ee5cb63bfccb04870de Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 19 Apr 2023 18:13:50 -0700 Subject: [PATCH 19/26] Don't use ESLint for now --- crates/zed/src/languages.rs | 21 +++++++++------------ crates/zed/src/languages/typescript.rs | 9 +-------- 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/crates/zed/src/languages.rs b/crates/zed/src/languages.rs index 4d9e0ae36a..9ab6e1d778 100644 --- a/crates/zed/src/languages.rs +++ b/crates/zed/src/languages.rs @@ -89,26 +89,23 @@ pub fn init( ( "tsx", tree_sitter_typescript::language_tsx(), - vec![ - adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), - adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), - ], + vec![adapter_arc(typescript::TypeScriptLspAdapter::new( + node_runtime.clone(), + ))], ), ( "typescript", tree_sitter_typescript::language_typescript(), - vec![ - adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), - adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), - ], + vec![adapter_arc(typescript::TypeScriptLspAdapter::new( + node_runtime.clone(), + ))], ), ( "javascript", tree_sitter_typescript::language_tsx(), - vec![ - adapter_arc(typescript::TypeScriptLspAdapter::new(node_runtime.clone())), - // adapter_arc(typescript::EsLintLspAdapter::new(node_runtime.clone())), - ], + vec![adapter_arc(typescript::TypeScriptLspAdapter::new( + node_runtime.clone(), + ))], ), ( "html", diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index b422e99926..bfd6c11a27 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -175,19 +175,12 @@ impl EsLintLspAdapter { const SERVER_PATH: &'static str = "node_modules/vscode-langservers-extracted/lib/eslint-language-server/eslintServer.js"; + #[allow(unused)] pub fn new(node: Arc) -> Self { EsLintLspAdapter { node } } } -// "workspaceFolder": { -// "name": "testing_ts", -// "uri": "file:///Users/julia/Stuff/testing_ts" -// }, -// "workingDirectory": "file:///Users/julia/Stuff/testing_ts", -// "nodePath": "/opt/homebrew/opt/node@18/bin/node", -// "experimental": {}, - #[async_trait] impl LspAdapter for EsLintLspAdapter { fn workspace_configuration(&self, _: &mut AppContext) -> Option> { From 32c57bcd22fa95257cc55b1e24f3f2576d7106dc Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 20 Apr 2023 00:11:45 -0700 Subject: [PATCH 20/26] Store buffer's diagnostic sets in a smallvec --- Cargo.toml | 1 + crates/activity_indicator/Cargo.toml | 2 +- crates/clock/Cargo.toml | 2 +- crates/context_menu/Cargo.toml | 2 +- crates/diagnostics/Cargo.toml | 2 +- crates/editor/Cargo.toml | 2 +- crates/gpui/Cargo.toml | 2 +- crates/language/Cargo.toml | 2 +- crates/language/src/buffer.rs | 32 +++++++++++++++++----------- crates/rope/Cargo.toml | 2 +- crates/search/Cargo.toml | 2 +- crates/snippet/Cargo.toml | 2 +- crates/terminal/Cargo.toml | 2 +- crates/terminal_view/Cargo.toml | 2 +- crates/text/Cargo.toml | 2 +- crates/theme_testbench/Cargo.toml | 2 +- crates/workspace/Cargo.toml | 2 +- crates/zed/Cargo.toml | 2 +- 18 files changed, 37 insertions(+), 28 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8113c0cfcf..1275571668 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -76,6 +76,7 @@ serde_derive = { version = "1.0", features = ["deserialize_in_place"] } serde_json = { version = "1.0", features = ["preserve_order", "raw_value"] } rand = { version = "0.8" } postage = { version = "0.5", features = ["futures-traits"] } +smallvec = { version = "1.6", features = ["union"] } [patch.crates-io] tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "c51896d32dcc11a38e41f36e3deb1a6a9c4f4b14" } diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 78a4e752b2..b54271ee2d 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -18,4 +18,4 @@ settings = { path = "../settings" } util = { path = "../util" } workspace = { path = "../workspace" } futures = "0.3" -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } diff --git a/crates/clock/Cargo.toml b/crates/clock/Cargo.toml index 1705fdc6d5..2ea2779659 100644 --- a/crates/clock/Cargo.toml +++ b/crates/clock/Cargo.toml @@ -9,4 +9,4 @@ path = "src/clock.rs" doctest = false [dependencies] -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } diff --git a/crates/context_menu/Cargo.toml b/crates/context_menu/Cargo.toml index d764d4ddb8..64449bdc92 100644 --- a/crates/context_menu/Cargo.toml +++ b/crates/context_menu/Cargo.toml @@ -13,4 +13,4 @@ gpui = { path = "../gpui" } menu = { path = "../menu" } settings = { path = "../settings" } theme = { path = "../theme" } -smallvec = "1.6" +smallvec = { workspace = true } diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index 83ee243b82..45621f10c6 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -10,7 +10,7 @@ doctest = false [dependencies] anyhow = "1.0" -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } collections = { path = "../collections" } editor = { path = "../editor" } language = { path = "../language" } diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index 82b7082576..4cd81df1dc 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -58,7 +58,7 @@ postage = { workspace = true } rand = { version = "0.8.3", optional = true } serde = { workspace = true } serde_derive = { workspace = true } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } smol = "1.2" tree-sitter-rust = { version = "*", optional = true } tree-sitter-html = { version = "*", optional = true } diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 8715142dd3..88c40665c2 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -44,7 +44,7 @@ seahash = "4.1" serde = { workspace = true } serde_derive = { workspace = true } serde_json = { workspace = true } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } smol = "1.2" time = { version = "0.3", features = ["serde", "serde-well-known"] } tiny-skia = "0.5" diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 4311f04391..4e15ccf679 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -50,7 +50,7 @@ serde = { workspace = true } serde_derive = { workspace = true } serde_json = { workspace = true } similar = "1.3" -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } smol = "1.2" tree-sitter = "0.20" tree-sitter-rust = { version = "*", optional = true } diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 25536adcbb..65e4d3b8b6 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -13,7 +13,6 @@ use crate::{ }; use anyhow::{anyhow, Result}; use clock::ReplicaId; -use collections::HashMap; use fs::LineEnding; use futures::FutureExt as _; use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task}; @@ -21,6 +20,7 @@ use lsp::LanguageServerId; use parking_lot::Mutex; use settings::Settings; use similar::{ChangeTag, TextDiff}; +use smallvec::SmallVec; use smol::future::yield_now; use std::{ any::Any, @@ -73,7 +73,7 @@ pub struct Buffer { syntax_map: Mutex, parsing_in_background: bool, parse_count: usize, - diagnostics: HashMap, + diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>, remote_selections: TreeMap, selections_update_count: usize, diagnostics_update_count: usize, @@ -90,7 +90,7 @@ pub struct BufferSnapshot { pub git_diff: git::diff::BufferDiff, pub(crate) syntax: SyntaxSnapshot, file: Option>, - diagnostics: HashMap, + diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>, diagnostics_update_count: usize, file_update_count: usize, git_diff_update_count: usize, @@ -1652,7 +1652,10 @@ impl Buffer { cx: &mut ModelContext, ) { if lamport_timestamp > self.diagnostics_timestamp { - self.diagnostics.insert(server_id, diagnostics); + match self.diagnostics.binary_search_by_key(&server_id, |e| e.0) { + Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)), + Ok(ix) => self.diagnostics[ix].1 = diagnostics, + }; self.diagnostics_timestamp = lamport_timestamp; self.diagnostics_update_count += 1; self.text.lamport_clock.observe(lamport_timestamp); @@ -2530,8 +2533,8 @@ impl BufferSnapshot { { let mut iterators: Vec<_> = self .diagnostics - .values() - .map(|collection| { + .iter() + .map(|(_, collection)| { collection .range::(search_range.clone(), self, true, reversed) .peekable() @@ -2555,12 +2558,17 @@ impl BufferSnapshot { let mut groups = Vec::new(); if let Some(language_server_id) = language_server_id { - if let Some(diagnostics) = self.diagnostics.get(&language_server_id) { - diagnostics.groups(language_server_id, &mut groups, self); + if let Ok(ix) = self + .diagnostics + .binary_search_by_key(&language_server_id, |e| e.0) + { + self.diagnostics[ix] + .1 + .groups(language_server_id, &mut groups, self); } } else { - for (&language_server_id, diagnostics) in self.diagnostics.iter() { - diagnostics.groups(language_server_id, &mut groups, self); + for (language_server_id, diagnostics) in self.diagnostics.iter() { + diagnostics.groups(*language_server_id, &mut groups, self); } } @@ -2581,8 +2589,8 @@ impl BufferSnapshot { O: 'a + FromAnchor, { self.diagnostics - .values() - .flat_map(move |set| set.group(group_id, self)) + .iter() + .flat_map(move |(_, set)| set.group(group_id, self)) } pub fn diagnostics_update_count(&self) -> usize { diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index 4566b8397d..bd72c7c263 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -9,7 +9,7 @@ path = "src/rope.rs" [dependencies] bromberg_sl2 = { git = "https://github.com/zed-industries/bromberg_sl2", rev = "950bc5482c216c395049ae33ae4501e08975f17f" } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } sum_tree = { path = "../sum_tree" } arrayvec = "0.7.1" log = { version = "0.4.16", features = ["kv_unstable_serde"] } diff --git a/crates/search/Cargo.toml b/crates/search/Cargo.toml index f786d4abc6..fb4e79a703 100644 --- a/crates/search/Cargo.toml +++ b/crates/search/Cargo.toml @@ -25,7 +25,7 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] } postage = { workspace = true } serde = { workspace = true } serde_derive = { workspace = true } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } smol = "1.2" [dev-dependencies] diff --git a/crates/snippet/Cargo.toml b/crates/snippet/Cargo.toml index 429f5d416e..67f77e4483 100644 --- a/crates/snippet/Cargo.toml +++ b/crates/snippet/Cargo.toml @@ -10,4 +10,4 @@ doctest = false [dependencies] anyhow = "1.0" -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } diff --git a/crates/terminal/Cargo.toml b/crates/terminal/Cargo.toml index 56796fca59..107d325677 100644 --- a/crates/terminal/Cargo.toml +++ b/crates/terminal/Cargo.toml @@ -17,7 +17,7 @@ theme = { path = "../theme" } util = { path = "../util" } alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "a51dbe25d67e84d6ed4261e640d3954fbdd9be45" } procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } smol = "1.2.5" mio-extras = "2.0.6" futures = "0.3" diff --git a/crates/terminal_view/Cargo.toml b/crates/terminal_view/Cargo.toml index 726a1a674f..f271cd118d 100644 --- a/crates/terminal_view/Cargo.toml +++ b/crates/terminal_view/Cargo.toml @@ -21,7 +21,7 @@ workspace = { path = "../workspace" } db = { path = "../db" } procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false } terminal = { path = "../terminal" } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } smol = "1.2.5" mio-extras = "2.0.6" futures = "0.3" diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index 362a060c1f..d85464a446 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -24,7 +24,7 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] } parking_lot = "0.11" postage = { workspace = true } rand = { version = "0.8.3", optional = true } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } util = { path = "../util" } regex = "1.5" diff --git a/crates/theme_testbench/Cargo.toml b/crates/theme_testbench/Cargo.toml index 763727fc68..ade8e39550 100644 --- a/crates/theme_testbench/Cargo.toml +++ b/crates/theme_testbench/Cargo.toml @@ -16,4 +16,4 @@ settings = { path = "../settings" } workspace = { path = "../workspace" } project = { path = "../project" } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 5a2380de3f..84db04e9a6 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -47,7 +47,7 @@ postage = { workspace = true } serde = { workspace = true } serde_derive = { workspace = true } serde_json = { workspace = true } -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } indoc = "1.0.4" uuid = { version = "1.1.2", features = ["v4"] } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 180a1fce24..3df019bb45 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -96,7 +96,7 @@ serde_derive = { workspace = true } serde_json = { workspace = true } serde_path_to_error = "0.1.4" simplelog = "0.9" -smallvec = { version = "1.6", features = ["union"] } +smallvec = { workspace = true } smol = "1.2.5" tempdir = { version = "0.3.7" } thiserror = "1.0.29" From 21bb13d309d2938c2e3676377e7742c738779df9 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 20 Apr 2023 09:03:42 -0700 Subject: [PATCH 21/26] Fix comment formatting errors for rust 1.69 --- crates/editor/src/editor_tests.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index ce293ed064..646a8f33d6 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -4371,7 +4371,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) cx.set_state( &[ "one ", // - "twoˇ", // + "twoˇ", // "three ", // "four", // ] @@ -4446,7 +4446,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) &[ "one", // "", // - "twoˇ", // + "twoˇ", // "", // "three", // "four", // @@ -4461,7 +4461,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut gpui::TestAppContext) cx.assert_editor_state( &[ "one ", // - "twoˇ", // + "twoˇ", // "three ", // "four", // ] From 3a855184bccc21baa69aa812a474d6253ea85e98 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 20 Apr 2023 09:52:07 -0700 Subject: [PATCH 22/26] v0.84.x dev --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0109c6cbaa..f477014590 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8522,7 +8522,7 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" [[package]] name = "zed" -version = "0.83.0" +version = "0.84.0" dependencies = [ "activity_indicator", "anyhow", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 3df019bb45..7eb35f3b3e 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] description = "The fast, collaborative code editor." edition = "2021" name = "zed" -version = "0.83.0" +version = "0.84.0" publish = false [lib] From abdfb5a4513b0bb3713b8050bc2ad01f90fd95e2 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 20 Apr 2023 09:52:32 -0700 Subject: [PATCH 23/26] collab 0.9.0 --- Cargo.lock | 2 +- crates/collab/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f477014590..b3e5c38a46 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1192,7 +1192,7 @@ dependencies = [ [[package]] name = "collab" -version = "0.8.3" +version = "0.9.0" dependencies = [ "anyhow", "async-tungstenite", diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 58fc602c94..778ae42ec4 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -3,7 +3,7 @@ authors = ["Nathan Sobo "] default-run = "collab" edition = "2021" name = "collab" -version = "0.8.3" +version = "0.9.0" publish = false [[bin]] From dfdc826015852b8cb309d568342fd52603b9f577 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 20 Apr 2023 17:24:27 -0700 Subject: [PATCH 24/26] Create a new setting to adjust the line height in the terminal --- assets/settings/default.json | 12 ++- crates/settings/src/settings.rs | 92 ++++++++++++++++---- crates/terminal_view/src/terminal_element.rs | 2 +- 3 files changed, 89 insertions(+), 17 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 1ecfbf03a1..930676fb16 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -197,7 +197,17 @@ // enviroment. Use `:` to seperate multiple values. "env": { // "KEY": "value1:value2" - } + }, + // Set the terminal's line height. + // May take 3 values: + // 1. Use a line height that's comfortable for reading, 1.618 + // "line_height": "comfortable" + // 2. Use a standard line height, 1.3. This option is useful for TUIs, + // particularly if they use box characters + // "line_height": "standard", + // 3. Use a custom line height. + // "line_height": 1.2, + "line_height": "comfortable" // Set the terminal's font size. If this option is not included, // the terminal will default to matching the buffer's font size. // "font_size": "15" diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 6942a6e57b..285e06c2c8 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -12,7 +12,7 @@ use schemars::{ schema::{InstanceType, ObjectValidation, Schema, SchemaObject, SingleOrVec}, JsonSchema, }; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde::{de::DeserializeOwned, Deserialize, Deserializer, Serialize}; use serde_json::Value; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, @@ -252,6 +252,7 @@ pub struct TerminalSettings { pub working_directory: Option, pub font_size: Option, pub font_family: Option, + pub line_height: Option, pub font_features: Option, pub env: Option>, pub blinking: Option, @@ -260,6 +261,56 @@ pub struct TerminalSettings { pub copy_on_select: Option, } +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)] +#[serde(rename_all = "snake_case")] +#[serde(untagged)] +pub enum TerminalLineHeight { + #[default] + #[serde(deserialize_with = "comfortable")] + Comfortable, + #[serde(deserialize_with = "standard")] + Standard, + Custom(f32), +} + +// Copied from: https://github.com/serde-rs/serde/issues/1158#issuecomment-365362959 +fn comfortable<'de, D>(deserializer: D) -> Result<(), D::Error> +where + D: Deserializer<'de>, +{ + #[derive(Deserialize)] + enum Helper { + #[serde(rename = "comfortable")] + Variant, + } + Helper::deserialize(deserializer)?; + Ok(()) +} + +// Copied from: https://github.com/serde-rs/serde/issues/1158#issuecomment-365362959 +fn standard<'de, D>(deserializer: D) -> Result<(), D::Error> +where + D: Deserializer<'de>, +{ + #[derive(Deserialize)] + enum Helper { + #[serde(rename = "standard")] + Variant, + } + Helper::deserialize(deserializer)?; + Ok(()) +} + +impl TerminalLineHeight { + fn value(&self) -> f32 { + match self { + TerminalLineHeight::Comfortable => 1.618, + TerminalLineHeight::Standard => 1.3, + TerminalLineHeight::Custom(line_height) => *line_height, + } + } +} + #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum TerminalBlink { @@ -316,6 +367,14 @@ impl Default for WorkingDirectory { } } +impl TerminalSettings { + fn line_height(&self) -> Option { + self.line_height + .to_owned() + .map(|line_height| line_height.value()) + } +} + #[derive(PartialEq, Eq, Debug, Default, Copy, Clone, Hash, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum DockAnchor { @@ -640,16 +699,6 @@ impl Settings { }) } - fn terminal_setting(&self, f: F) -> R - where - F: Fn(&TerminalSettings) -> Option<&R>, - { - f(&self.terminal_overrides) - .or_else(|| f(&self.terminal_defaults)) - .cloned() - .unwrap_or_else(|| R::default()) - } - pub fn telemetry(&self) -> TelemetrySettings { TelemetrySettings { diagnostics: Some(self.telemetry_diagnostics()), @@ -671,20 +720,33 @@ impl Settings { .expect("missing default") } + fn terminal_setting(&self, f: F) -> R + where + F: Fn(&TerminalSettings) -> Option, + { + None.or_else(|| f(&self.terminal_overrides)) + .or_else(|| f(&self.terminal_defaults)) + .expect("missing default") + } + + pub fn terminal_line_height(&self) -> f32 { + self.terminal_setting(|terminal_setting| terminal_setting.line_height()) + } + pub fn terminal_scroll(&self) -> AlternateScroll { - self.terminal_setting(|terminal_setting| terminal_setting.alternate_scroll.as_ref()) + self.terminal_setting(|terminal_setting| terminal_setting.alternate_scroll.to_owned()) } pub fn terminal_shell(&self) -> Shell { - self.terminal_setting(|terminal_setting| terminal_setting.shell.as_ref()) + self.terminal_setting(|terminal_setting| terminal_setting.shell.to_owned()) } pub fn terminal_env(&self) -> HashMap { - self.terminal_setting(|terminal_setting| terminal_setting.env.as_ref()) + self.terminal_setting(|terminal_setting| terminal_setting.env.to_owned()) } pub fn terminal_strategy(&self) -> WorkingDirectory { - self.terminal_setting(|terminal_setting| terminal_setting.working_directory.as_ref()) + self.terminal_setting(|terminal_setting| terminal_setting.working_directory.to_owned()) } #[cfg(any(test, feature = "test-support"))] diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index 252ab128be..af8f303bbd 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -567,7 +567,7 @@ impl Element for TerminalElement { let selection_color = settings.theme.editor.selection.selection; let match_color = settings.theme.search.match_background; let dimensions = { - let line_height = font_cache.line_height(text_style.font_size); + let line_height = text_style.font_size * settings.terminal_line_height(); let cell_width = font_cache.em_advance(text_style.font_id, text_style.font_size); TerminalSize::new(line_height, cell_width, constraint.max) }; From c72b70d4ae96ce21c236e755335b8f75746c692e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 20 Apr 2023 17:33:06 -0700 Subject: [PATCH 25/26] Fixed silly custom settings parsing --- crates/settings/src/settings.rs | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 285e06c2c8..5c3ee7180b 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -263,44 +263,13 @@ pub struct TerminalSettings { #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)] #[serde(rename_all = "snake_case")] -#[serde(untagged)] pub enum TerminalLineHeight { #[default] - #[serde(deserialize_with = "comfortable")] Comfortable, - #[serde(deserialize_with = "standard")] Standard, Custom(f32), } -// Copied from: https://github.com/serde-rs/serde/issues/1158#issuecomment-365362959 -fn comfortable<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: Deserializer<'de>, -{ - #[derive(Deserialize)] - enum Helper { - #[serde(rename = "comfortable")] - Variant, - } - Helper::deserialize(deserializer)?; - Ok(()) -} - -// Copied from: https://github.com/serde-rs/serde/issues/1158#issuecomment-365362959 -fn standard<'de, D>(deserializer: D) -> Result<(), D::Error> -where - D: Deserializer<'de>, -{ - #[derive(Deserialize)] - enum Helper { - #[serde(rename = "standard")] - Variant, - } - Helper::deserialize(deserializer)?; - Ok(()) -} - impl TerminalLineHeight { fn value(&self) -> f32 { match self { From 1fa52adabd5a1886f78c9016ceb0a8f573bf6031 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 20 Apr 2023 17:34:47 -0700 Subject: [PATCH 26/26] Fix warning --- assets/settings/default.json | 5 ++++- crates/settings/src/settings.rs | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 454aedfd56..b12bd00efa 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -206,7 +206,10 @@ // particularly if they use box characters // "line_height": "standard", // 3. Use a custom line height. - // "line_height": 1.2, + // "line_height": { + // "custom": 2 + // }, + // "line_height": "comfortable" // Set the terminal's font size. If this option is not included, // the terminal will default to matching the buffer's font size. diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 5c3ee7180b..f2082be6bb 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -12,7 +12,7 @@ use schemars::{ schema::{InstanceType, ObjectValidation, Schema, SchemaObject, SingleOrVec}, JsonSchema, }; -use serde::{de::DeserializeOwned, Deserialize, Deserializer, Serialize}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::Value; use sqlez::{ bindable::{Bind, Column, StaticColumnCount},