diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 72bb002ed7..7c745c0702 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -118,6 +118,7 @@ action!(SelectSmallerSyntaxNode); action!(MoveToEnclosingBracket); action!(ShowNextDiagnostic); action!(GoToDefinition); +action!(FindAllReferences); action!(Rename); action!(ConfirmRename); action!(PageUp); @@ -249,6 +250,7 @@ pub fn init(cx: &mut MutableAppContext, path_openers: &mut Vec() .unwrap(); @@ -4098,6 +4099,83 @@ impl Editor { .detach_and_log_err(cx); } + pub fn find_all_references( + workspace: &mut Workspace, + _: &FindAllReferences, + cx: &mut ViewContext, + ) -> Option>> { + let active_item = workspace.active_item(cx)?; + let editor_handle = active_item.act_as::(cx)?; + + let editor = editor_handle.read(cx); + let buffer = editor.buffer.read(cx); + let head = editor.newest_selection::(&buffer.read(cx)).head(); + let (buffer, head) = editor.buffer.read(cx).text_anchor_for_position(head, cx)?; + let replica_id = editor.replica_id(cx); + + let references = workspace + .project() + .update(cx, |project, cx| project.references(&buffer, head, cx)); + Some(cx.spawn(|workspace, mut cx| async move { + let mut locations = references.await?; + if locations.is_empty() { + return Ok(()); + } + + locations.sort_by_key(|location| location.buffer.id()); + let mut locations = locations.into_iter().peekable(); + let mut ranges_to_highlight = Vec::new(); + + let excerpt_buffer = cx.add_model(|cx| { + let mut symbol_name = None; + let mut multibuffer = MultiBuffer::new(replica_id); + while let Some(location) = locations.next() { + let buffer = location.buffer.read(cx); + let mut ranges_for_buffer = Vec::new(); + let range = location.range.to_offset(buffer); + ranges_for_buffer.push(range.clone()); + if symbol_name.is_none() { + symbol_name = Some(buffer.text_for_range(range).collect::()); + } + + while let Some(next_location) = locations.peek() { + if next_location.buffer == location.buffer { + ranges_for_buffer.push(next_location.range.to_offset(buffer)); + locations.next(); + } else { + break; + } + } + + ranges_for_buffer.sort_by_key(|range| (range.start, Reverse(range.end))); + ranges_to_highlight.extend(multibuffer.push_excerpts_with_context_lines( + location.buffer.clone(), + ranges_for_buffer, + 1, + cx, + )); + } + multibuffer.with_title(format!("References to `{}`", symbol_name.unwrap())) + }); + + workspace.update(&mut cx, |workspace, cx| { + let editor = workspace.open_item(MultiBufferItemHandle(excerpt_buffer), cx); + if let Some(editor) = editor.act_as::(cx) { + editor.update(cx, |editor, cx| { + let settings = (editor.build_settings)(cx); + editor.highlight_ranges::( + ranges_to_highlight, + settings.style.highlighted_line_background, + cx, + ); + }); + } + }); + + Ok(()) + })) + } + pub fn rename(&mut self, _: &Rename, cx: &mut ViewContext) -> Option>> { use language::ToOffset as _; diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index 14f0c2817a..5e9f7c81ad 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -1,4 +1,4 @@ -use crate::{Definition, Project, ProjectTransaction}; +use crate::{Location, Project, ProjectTransaction}; use anyhow::{anyhow, Result}; use async_trait::async_trait; use client::{proto, PeerId}; @@ -66,6 +66,10 @@ pub(crate) struct GetDefinition { pub position: PointUtf16, } +pub(crate) struct GetReferences { + pub position: PointUtf16, +} + #[async_trait(?Send)] impl LspCommand for PrepareRename { type Response = Option>; @@ -287,7 +291,7 @@ impl LspCommand for PerformRename { #[async_trait(?Send)] impl LspCommand for GetDefinition { - type Response = Vec; + type Response = Vec; type LspRequest = lsp::request::GotoDefinition; type ProtoRequest = proto::GetDefinition; @@ -310,7 +314,7 @@ impl LspCommand for GetDefinition { project: ModelHandle, buffer: ModelHandle, mut cx: AsyncAppContext, - ) -> Result> { + ) -> Result> { let mut definitions = Vec::new(); let (language, language_server) = buffer .read_with(&cx, |buffer, _| { @@ -357,9 +361,9 @@ impl LspCommand for GetDefinition { .clip_point_utf16(point_from_lsp(target_range.start), Bias::Left); let target_end = target_buffer .clip_point_utf16(point_from_lsp(target_range.end), Bias::Left); - definitions.push(Definition { - target_buffer: target_buffer_handle, - target_range: target_buffer.anchor_after(target_start) + definitions.push(Location { + buffer: target_buffer_handle, + range: target_buffer.anchor_after(target_start) ..target_buffer.anchor_before(target_end), }); }); @@ -393,25 +397,24 @@ impl LspCommand for GetDefinition { } fn response_to_proto( - response: Vec, + response: Vec, project: &mut Project, peer_id: PeerId, _: &clock::Global, cx: &AppContext, ) -> proto::GetDefinitionResponse { - let definitions = response + let locations = response .into_iter() .map(|definition| { - let buffer = - project.serialize_buffer_for_peer(&definition.target_buffer, peer_id, cx); - proto::Definition { - target_start: Some(serialize_anchor(&definition.target_range.start)), - target_end: Some(serialize_anchor(&definition.target_range.end)), + let buffer = project.serialize_buffer_for_peer(&definition.buffer, peer_id, cx); + proto::Location { + start: Some(serialize_anchor(&definition.range.start)), + end: Some(serialize_anchor(&definition.range.end)), buffer: Some(buffer), } }) .collect(); - proto::GetDefinitionResponse { definitions } + proto::GetDefinitionResponse { locations } } async fn response_from_proto( @@ -420,30 +423,178 @@ impl LspCommand for GetDefinition { project: ModelHandle, _: ModelHandle, mut cx: AsyncAppContext, - ) -> Result> { - let mut definitions = Vec::new(); - for definition in message.definitions { - let buffer = definition.buffer.ok_or_else(|| anyhow!("missing buffer"))?; - let target_buffer = project + ) -> Result> { + let mut locations = Vec::new(); + for location in message.locations { + let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?; + let buffer = project .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx)) .await?; - let target_start = definition - .target_start + let start = location + .start .and_then(deserialize_anchor) .ok_or_else(|| anyhow!("missing target start"))?; - let target_end = definition - .target_end + let end = location + .end .and_then(deserialize_anchor) .ok_or_else(|| anyhow!("missing target end"))?; - definitions.push(Definition { - target_buffer, - target_range: target_start..target_end, + locations.push(Location { + buffer, + range: start..end, }) } - Ok(definitions) + Ok(locations) } fn buffer_id_from_proto(message: &proto::GetDefinition) -> u64 { message.buffer_id } } + +#[async_trait(?Send)] +impl LspCommand for GetReferences { + type Response = Vec; + type LspRequest = lsp::request::References; + type ProtoRequest = proto::GetReferences; + + fn to_lsp(&self, path: &Path, _: &AppContext) -> lsp::ReferenceParams { + lsp::ReferenceParams { + text_document_position: lsp::TextDocumentPositionParams { + text_document: lsp::TextDocumentIdentifier { + uri: lsp::Url::from_file_path(path).unwrap(), + }, + position: self.position.to_lsp_position(), + }, + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + context: lsp::ReferenceContext { + include_declaration: true, + }, + } + } + + async fn response_from_lsp( + self, + locations: Option>, + project: ModelHandle, + buffer: ModelHandle, + mut cx: AsyncAppContext, + ) -> Result> { + let mut references = Vec::new(); + let (language, language_server) = buffer + .read_with(&cx, |buffer, _| { + buffer + .language() + .cloned() + .zip(buffer.language_server().cloned()) + }) + .ok_or_else(|| anyhow!("buffer no longer has language server"))?; + + if let Some(locations) = locations { + for lsp_location in locations { + let target_buffer_handle = project + .update(&mut cx, |this, cx| { + this.open_local_buffer_via_lsp( + lsp_location.uri, + language.name().to_string(), + language_server.clone(), + cx, + ) + }) + .await?; + + cx.read(|cx| { + let target_buffer = target_buffer_handle.read(cx); + let target_start = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left); + let target_end = target_buffer + .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left); + references.push(Location { + buffer: target_buffer_handle, + range: target_buffer.anchor_after(target_start) + ..target_buffer.anchor_before(target_end), + }); + }); + } + } + + Ok(references) + } + + fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetReferences { + proto::GetReferences { + project_id, + buffer_id: buffer.remote_id(), + position: Some(language::proto::serialize_anchor( + &buffer.anchor_before(self.position), + )), + } + } + + fn from_proto(message: proto::GetReferences, _: &mut Project, buffer: &Buffer) -> Result { + let position = message + .position + .and_then(deserialize_anchor) + .ok_or_else(|| anyhow!("invalid position"))?; + if !buffer.can_resolve(&position) { + Err(anyhow!("cannot resolve position"))?; + } + Ok(Self { + position: position.to_point_utf16(buffer), + }) + } + + fn response_to_proto( + response: Vec, + project: &mut Project, + peer_id: PeerId, + _: &clock::Global, + cx: &AppContext, + ) -> proto::GetReferencesResponse { + let locations = response + .into_iter() + .map(|definition| { + let buffer = project.serialize_buffer_for_peer(&definition.buffer, peer_id, cx); + proto::Location { + start: Some(serialize_anchor(&definition.range.start)), + end: Some(serialize_anchor(&definition.range.end)), + buffer: Some(buffer), + } + }) + .collect(); + proto::GetReferencesResponse { locations } + } + + async fn response_from_proto( + self, + message: proto::GetReferencesResponse, + project: ModelHandle, + _: ModelHandle, + mut cx: AsyncAppContext, + ) -> Result> { + let mut locations = Vec::new(); + for location in message.locations { + let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?; + let target_buffer = project + .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx)) + .await?; + let start = location + .start + .and_then(deserialize_anchor) + .ok_or_else(|| anyhow!("missing target start"))?; + let end = location + .end + .and_then(deserialize_anchor) + .ok_or_else(|| anyhow!("missing target end"))?; + locations.push(Location { + buffer: target_buffer, + range: start..end, + }) + } + Ok(locations) + } + + fn buffer_id_from_proto(message: &proto::GetReferences) -> u64 { + message.buffer_id + } +} diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 19500f3229..aebabcd92e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -118,9 +118,9 @@ pub struct DiagnosticSummary { } #[derive(Debug)] -pub struct Definition { - pub target_buffer: ModelHandle, - pub target_range: Range, +pub struct Location { + pub buffer: ModelHandle, + pub range: Range, } #[derive(Clone, Debug)] @@ -202,6 +202,7 @@ impl Project { client.add_entity_request_handler(Self::handle_get_code_actions); client.add_entity_request_handler(Self::handle_get_completions); client.add_entity_request_handler(Self::handle_lsp_command::); + client.add_entity_request_handler(Self::handle_lsp_command::); client.add_entity_request_handler(Self::handle_lsp_command::); client.add_entity_request_handler(Self::handle_lsp_command::); client.add_entity_request_handler(Self::handle_get_project_symbols); @@ -1253,11 +1254,21 @@ impl Project { buffer: &ModelHandle, position: T, cx: &mut ModelContext, - ) -> Task>> { + ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); self.request_lsp(buffer.clone(), GetDefinition { position }, cx) } + pub fn references( + &self, + buffer: &ModelHandle, + position: T, + cx: &mut ModelContext, + ) -> Task>> { + let position = position.to_point_utf16(buffer.read(cx)); + self.request_lsp(buffer.clone(), GetReferences { position }, cx) + } + pub fn symbols(&self, query: &str, cx: &mut ModelContext) -> Task>> { if self.is_local() { let mut language_servers = HashMap::default(); @@ -3606,7 +3617,7 @@ mod tests { assert_eq!(definitions.len(), 1); let definition = definitions.pop().unwrap(); cx.update(|cx| { - let target_buffer = definition.target_buffer.read(cx); + let target_buffer = definition.buffer.read(cx); assert_eq!( target_buffer .file() @@ -3616,7 +3627,7 @@ mod tests { .abs_path(cx), Path::new("/dir/a.rs"), ); - assert_eq!(definition.target_range.to_offset(target_buffer), 9..10); + assert_eq!(definition.range.to_offset(target_buffer), 9..10); assert_eq!( list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), false), ("/dir/a.rs".as_ref(), true)] diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 664d103a15..426ba93821 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -23,57 +23,59 @@ message Envelope { RemoveProjectCollaborator remove_project_collaborator = 17; GetDefinition get_definition = 18; GetDefinitionResponse get_definition_response = 19; - GetProjectSymbols get_project_symbols = 20; - GetProjectSymbolsResponse get_project_symbols_response = 21; - OpenBufferForSymbol open_buffer_for_symbol = 22; - OpenBufferForSymbolResponse open_buffer_for_symbol_response = 23; + GetReferences get_references = 20; + GetReferencesResponse get_references_response = 21; + GetProjectSymbols get_project_symbols = 22; + GetProjectSymbolsResponse get_project_symbols_response = 23; + OpenBufferForSymbol open_buffer_for_symbol = 24; + OpenBufferForSymbolResponse open_buffer_for_symbol_response = 25; - RegisterWorktree register_worktree = 24; - UnregisterWorktree unregister_worktree = 25; - ShareWorktree share_worktree = 26; - UpdateWorktree update_worktree = 27; - UpdateDiagnosticSummary update_diagnostic_summary = 28; - DiskBasedDiagnosticsUpdating disk_based_diagnostics_updating = 29; - DiskBasedDiagnosticsUpdated disk_based_diagnostics_updated = 30; + RegisterWorktree register_worktree = 26; + UnregisterWorktree unregister_worktree = 27; + ShareWorktree share_worktree = 28; + UpdateWorktree update_worktree = 29; + UpdateDiagnosticSummary update_diagnostic_summary = 30; + DiskBasedDiagnosticsUpdating disk_based_diagnostics_updating = 31; + DiskBasedDiagnosticsUpdated disk_based_diagnostics_updated = 32; - OpenBuffer open_buffer = 31; - OpenBufferResponse open_buffer_response = 32; - CloseBuffer close_buffer = 33; - UpdateBuffer update_buffer = 34; - UpdateBufferFile update_buffer_file = 35; - SaveBuffer save_buffer = 36; - BufferSaved buffer_saved = 37; - BufferReloaded buffer_reloaded = 38; - FormatBuffers format_buffers = 39; - FormatBuffersResponse format_buffers_response = 40; - GetCompletions get_completions = 41; - GetCompletionsResponse get_completions_response = 42; - ApplyCompletionAdditionalEdits apply_completion_additional_edits = 43; - ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 44; - GetCodeActions get_code_actions = 45; - GetCodeActionsResponse get_code_actions_response = 46; - ApplyCodeAction apply_code_action = 47; - ApplyCodeActionResponse apply_code_action_response = 48; - PrepareRename prepare_rename = 49; - PrepareRenameResponse prepare_rename_response = 50; - PerformRename perform_rename = 51; - PerformRenameResponse perform_rename_response = 52; + OpenBuffer open_buffer = 33; + OpenBufferResponse open_buffer_response = 34; + CloseBuffer close_buffer = 35; + UpdateBuffer update_buffer = 36; + UpdateBufferFile update_buffer_file = 37; + SaveBuffer save_buffer = 38; + BufferSaved buffer_saved = 39; + BufferReloaded buffer_reloaded = 40; + FormatBuffers format_buffers = 41; + FormatBuffersResponse format_buffers_response = 42; + GetCompletions get_completions = 43; + GetCompletionsResponse get_completions_response = 44; + ApplyCompletionAdditionalEdits apply_completion_additional_edits = 45; + ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 46; + GetCodeActions get_code_actions = 47; + GetCodeActionsResponse get_code_actions_response = 48; + ApplyCodeAction apply_code_action = 49; + ApplyCodeActionResponse apply_code_action_response = 50; + PrepareRename prepare_rename = 51; + PrepareRenameResponse prepare_rename_response = 52; + PerformRename perform_rename = 53; + PerformRenameResponse perform_rename_response = 54; - GetChannels get_channels = 53; - GetChannelsResponse get_channels_response = 54; - JoinChannel join_channel = 55; - JoinChannelResponse join_channel_response = 56; - LeaveChannel leave_channel = 57; - SendChannelMessage send_channel_message = 58; - SendChannelMessageResponse send_channel_message_response = 59; - ChannelMessageSent channel_message_sent = 60; - GetChannelMessages get_channel_messages = 61; - GetChannelMessagesResponse get_channel_messages_response = 62; + GetChannels get_channels = 55; + GetChannelsResponse get_channels_response = 56; + JoinChannel join_channel = 57; + JoinChannelResponse join_channel_response = 58; + LeaveChannel leave_channel = 59; + SendChannelMessage send_channel_message = 60; + SendChannelMessageResponse send_channel_message_response = 61; + ChannelMessageSent channel_message_sent = 62; + GetChannelMessages get_channel_messages = 63; + GetChannelMessagesResponse get_channel_messages_response = 64; - UpdateContacts update_contacts = 63; + UpdateContacts update_contacts = 65; - GetUsers get_users = 64; - GetUsersResponse get_users_response = 65; + GetUsers get_users = 66; + GetUsersResponse get_users_response = 67; } } @@ -166,13 +168,23 @@ message GetDefinition { } message GetDefinitionResponse { - repeated Definition definitions = 1; + repeated Location locations = 1; } -message Definition { +message GetReferences { + uint64 project_id = 1; + uint64 buffer_id = 2; + Anchor position = 3; + } + +message GetReferencesResponse { + repeated Location locations = 1; +} + +message Location { Buffer buffer = 1; - Anchor target_start = 2; - Anchor target_end = 3; + Anchor start = 2; + Anchor end = 3; } message GetProjectSymbols { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index e11586e9dd..61c4585deb 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -157,6 +157,8 @@ messages!( (GetCompletionsResponse, Foreground), (GetDefinition, Foreground), (GetDefinitionResponse, Foreground), + (GetReferences, Foreground), + (GetReferencesResponse, Foreground), (GetProjectSymbols, Background), (GetProjectSymbolsResponse, Background), (GetUsers, Foreground), @@ -208,6 +210,7 @@ request_messages!( (GetCodeActions, GetCodeActionsResponse), (GetCompletions, GetCompletionsResponse), (GetDefinition, GetDefinitionResponse), + (GetReferences, GetReferencesResponse), (GetProjectSymbols, GetProjectSymbolsResponse), (GetUsers, GetUsersResponse), (JoinChannel, JoinChannelResponse), @@ -242,6 +245,7 @@ entity_messages!( GetCodeActions, GetCompletions, GetDefinition, + GetReferences, GetProjectSymbols, JoinProject, LeaveProject, diff --git a/crates/server/src/rpc.rs b/crates/server/src/rpc.rs index 00b4c61d23..39c2d8a20f 100644 --- a/crates/server/src/rpc.rs +++ b/crates/server/src/rpc.rs @@ -79,6 +79,7 @@ impl Server { .add_message_handler(Server::disk_based_diagnostics_updating) .add_message_handler(Server::disk_based_diagnostics_updated) .add_request_handler(Server::get_definition) + .add_request_handler(Server::get_references) .add_request_handler(Server::get_project_symbols) .add_request_handler(Server::open_buffer_for_symbol) .add_request_handler(Server::open_buffer) @@ -589,6 +590,20 @@ impl Server { .await?) } + async fn get_references( + self: Arc, + request: TypedEnvelope, + ) -> tide::Result { + let host_connection_id = self + .state() + .read_project(request.payload.project_id, request.sender_id)? + .host_connection_id; + Ok(self + .peer + .forward_request(request.sender_id, host_connection_id, request.payload) + .await?) + } + async fn get_project_symbols( self: Arc, request: TypedEnvelope, @@ -2658,13 +2673,13 @@ mod tests { cx_b.read(|cx| { assert_eq!(definitions_1.len(), 1); assert_eq!(project_b.read(cx).worktrees(cx).count(), 2); - let target_buffer = definitions_1[0].target_buffer.read(cx); + let target_buffer = definitions_1[0].buffer.read(cx); assert_eq!( target_buffer.text(), "const TWO: usize = 2;\nconst THREE: usize = 3;" ); assert_eq!( - definitions_1[0].target_range.to_point(target_buffer), + definitions_1[0].range.to_point(target_buffer), Point::new(0, 6)..Point::new(0, 9) ); }); @@ -2683,20 +2698,17 @@ mod tests { cx_b.read(|cx| { assert_eq!(definitions_2.len(), 1); assert_eq!(project_b.read(cx).worktrees(cx).count(), 2); - let target_buffer = definitions_2[0].target_buffer.read(cx); + let target_buffer = definitions_2[0].buffer.read(cx); assert_eq!( target_buffer.text(), "const TWO: usize = 2;\nconst THREE: usize = 3;" ); assert_eq!( - definitions_2[0].target_range.to_point(target_buffer), + definitions_2[0].range.to_point(target_buffer), Point::new(1, 6)..Point::new(1, 11) ); }); - assert_eq!( - definitions_1[0].target_buffer, - definitions_2[0].target_buffer - ); + assert_eq!(definitions_1[0].buffer, definitions_2[0].buffer); cx_b.update(|_| { drop(definitions_1); @@ -2707,6 +2719,142 @@ mod tests { .await; } + #[gpui::test(iterations = 10)] + async fn test_references(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { + cx_a.foreground().forbid_parking(); + let mut lang_registry = Arc::new(LanguageRegistry::new()); + let fs = FakeFs::new(cx_a.background()); + fs.insert_tree( + "/root-1", + json!({ + ".zed.toml": r#"collaborators = ["user_b"]"#, + "one.rs": "const ONE: usize = 1;", + "two.rs": "const TWO: usize = one::ONE + one::ONE;", + }), + ) + .await; + fs.insert_tree( + "/root-2", + json!({ + "three.rs": "const THREE: usize = two::TWO + one::ONE;", + }), + ) + .await; + + // Set up a fake language server. + let (language_server_config, mut fake_language_servers) = LanguageServerConfig::fake(); + Arc::get_mut(&mut lang_registry) + .unwrap() + .add(Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + path_suffixes: vec!["rs".to_string()], + language_server: Some(language_server_config), + ..Default::default() + }, + Some(tree_sitter_rust::language()), + ))); + + // Connect to a server as 2 clients. + let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await; + let client_a = server.create_client(&mut cx_a, "user_a").await; + let client_b = server.create_client(&mut cx_b, "user_b").await; + + // Share a project as client A + let project_a = cx_a.update(|cx| { + Project::local( + client_a.clone(), + client_a.user_store.clone(), + lang_registry.clone(), + fs.clone(), + cx, + ) + }); + let (worktree_a, _) = project_a + .update(&mut cx_a, |p, cx| { + p.find_or_create_local_worktree("/root-1", false, cx) + }) + .await + .unwrap(); + worktree_a + .read_with(&cx_a, |tree, _| tree.as_local().unwrap().scan_complete()) + .await; + let project_id = project_a.update(&mut cx_a, |p, _| p.next_remote_id()).await; + let worktree_id = worktree_a.read_with(&cx_a, |tree, _| tree.id()); + project_a + .update(&mut cx_a, |p, cx| p.share(cx)) + .await + .unwrap(); + + // Join the worktree as client B. + let project_b = Project::remote( + project_id, + client_b.clone(), + client_b.user_store.clone(), + lang_registry.clone(), + fs.clone(), + &mut cx_b.to_async(), + ) + .await + .unwrap(); + + // Open the file on client B. + let buffer_b = cx_b + .background() + .spawn(project_b.update(&mut cx_b, |p, cx| { + p.open_buffer((worktree_id, "one.rs"), cx) + })) + .await + .unwrap(); + + // Request references to a symbol as the guest. + let references = project_b.update(&mut cx_b, |p, cx| p.references(&buffer_b, 7, cx)); + + let mut fake_language_server = fake_language_servers.next().await.unwrap(); + fake_language_server.handle_request::(|params| { + assert_eq!( + params.text_document_position.text_document.uri.as_str(), + "file:///root-1/one.rs" + ); + Some(vec![ + lsp::Location { + uri: lsp::Url::from_file_path("/root-1/two.rs").unwrap(), + range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)), + }, + lsp::Location { + uri: lsp::Url::from_file_path("/root-1/two.rs").unwrap(), + range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)), + }, + lsp::Location { + uri: lsp::Url::from_file_path("/root-2/three.rs").unwrap(), + range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)), + }, + ]) + }); + + let references = references.await.unwrap(); + cx_b.read(|cx| { + assert_eq!(references.len(), 3); + assert_eq!(project_b.read(cx).worktrees(cx).count(), 2); + + let two_buffer = references[0].buffer.read(cx); + let three_buffer = references[2].buffer.read(cx); + assert_eq!( + two_buffer.file().unwrap().path().as_ref(), + Path::new("two.rs") + ); + assert_eq!(references[1].buffer, references[0].buffer); + assert_eq!( + three_buffer.file().unwrap().full_path(cx), + Path::new("three.rs") + ); + + assert_eq!(references[0].range.to_offset(&two_buffer), 24..27); + assert_eq!(references[1].range.to_offset(&two_buffer), 35..38); + assert_eq!(references[2].range.to_offset(&three_buffer), 37..40); + }); + } + #[gpui::test(iterations = 10)] async fn test_project_symbols(mut cx_a: TestAppContext, mut cx_b: TestAppContext) { cx_a.foreground().forbid_parking(); @@ -2950,7 +3098,7 @@ mod tests { let buffer_b2 = buffer_b2.await.unwrap(); let definitions = definitions.await.unwrap(); assert_eq!(definitions.len(), 1); - assert_eq!(definitions[0].target_buffer, buffer_b2); + assert_eq!(definitions[0].buffer, buffer_b2); } #[gpui::test(iterations = 10)]