Extract an LspStore object from Project, to prepare for language support over SSH (#17041)

For ssh remoting lsps we'll need to have language server support
factored out of project.

Thus that begins

Release Notes:

- N/A

---------

Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
Conrad Irwin 2024-08-30 15:36:38 -06:00 committed by GitHub
parent 7c57ffafbd
commit 75d4c7981e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 7252 additions and 6466 deletions

1
Cargo.lock generated
View File

@ -9015,7 +9015,6 @@ dependencies = [
"shellexpand 2.1.2",
"smol",
"toml 0.8.19",
"util",
"worktree",
]

View File

@ -150,7 +150,7 @@ impl ActivityIndicator {
) -> impl Iterator<Item = PendingWork<'a>> {
self.project
.read(cx)
.language_server_statuses()
.language_server_statuses(cx)
.rev()
.filter_map(|(server_id, status)| {
if status.pending_work.is_empty() {

View File

@ -4841,7 +4841,10 @@ fn make_lsp_adapter_delegate(
.worktrees(cx)
.next()
.ok_or_else(|| anyhow!("no worktrees when constructing ProjectLspAdapterDelegate"))?;
Ok(ProjectLspAdapterDelegate::new(project, &worktree, cx) as Arc<dyn LspAdapterDelegate>)
project.lsp_store().update(cx, |lsp_store, cx| {
Ok(ProjectLspAdapterDelegate::new(lsp_store, &worktree, cx)
as Arc<dyn LspAdapterDelegate>)
})
})
}

View File

@ -446,6 +446,15 @@ impl<T: 'static> PendingEntitySubscription<T> {
);
drop(state);
for message in messages {
let client_id = self.client.id();
let type_name = message.payload_type_name();
let sender_id = message.original_sender_id();
log::debug!(
"handling queued rpc message. client_id:{}, sender_id:{:?}, type:{}",
client_id,
sender_id,
type_name
);
self.client.handle_message(message, cx);
}
Subscription::Entity {
@ -1516,7 +1525,12 @@ impl Client {
self.peer.send(self.connection_id()?, message)
}
pub fn send_dynamic(&self, envelope: proto::Envelope) -> Result<()> {
pub fn send_dynamic(
&self,
envelope: proto::Envelope,
message_type: &'static str,
) -> Result<()> {
log::debug!("rpc send. client_id:{}, name:{}", self.id(), message_type);
let connection_id = self.connection_id()?;
self.peer.send_dynamic(connection_id, envelope)
}
@ -1728,8 +1742,8 @@ impl ProtoClient for Client {
self.request_dynamic(envelope, request_type).boxed()
}
fn send(&self, envelope: proto::Envelope) -> Result<()> {
self.send_dynamic(envelope)
fn send(&self, envelope: proto::Envelope, message_type: &'static str) -> Result<()> {
self.send_dynamic(envelope, message_type)
}
}

View File

@ -495,6 +495,9 @@ impl Server {
.add_request_handler(user_handler(
forward_read_only_project_request::<proto::InlayHints>,
))
.add_request_handler(user_handler(
forward_read_only_project_request::<proto::ResolveInlayHint>,
))
.add_request_handler(user_handler(
forward_read_only_project_request::<proto::OpenBufferByPath>,
))

View File

@ -250,6 +250,7 @@ async fn test_channel_notes_participant_indices(
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
// Clients A and B open the same file.
executor.start_waiting();
let editor_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id_a, "file.txt"), None, true, cx)
@ -258,6 +259,7 @@ async fn test_channel_notes_participant_indices(
.unwrap()
.downcast::<Editor>()
.unwrap();
executor.start_waiting();
let editor_b = workspace_b
.update(cx_b, |workspace, cx| {
workspace.open_path((worktree_id_a, "file.txt"), None, true, cx)

View File

@ -1021,8 +1021,8 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
});
executor.run_until_parked();
project_a.read_with(cx_a, |project, _| {
let status = project.language_server_statuses().next().unwrap().1;
project_a.read_with(cx_a, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
assert_eq!(status.name, "the-language-server");
assert_eq!(status.pending_work.len(), 1);
assert_eq!(
@ -1038,8 +1038,8 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
executor.run_until_parked();
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().unwrap().1;
project_b.read_with(cx_b, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
assert_eq!(status.name, "the-language-server");
});
@ -1055,8 +1055,8 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
});
executor.run_until_parked();
project_a.read_with(cx_a, |project, _| {
let status = project.language_server_statuses().next().unwrap().1;
project_a.read_with(cx_a, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
assert_eq!(status.name, "the-language-server");
assert_eq!(status.pending_work.len(), 1);
assert_eq!(
@ -1065,8 +1065,8 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
);
});
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().unwrap().1;
project_b.read_with(cx_b, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
assert_eq!(status.name, "the-language-server");
assert_eq!(status.pending_work.len(), 1);
assert_eq!(

View File

@ -4780,8 +4780,8 @@ async fn test_references(
// User is informed that a request is pending.
executor.run_until_parked();
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().unwrap().1;
project_b.read_with(cx_b, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
assert_eq!(status.name, "my-fake-lsp-adapter");
assert_eq!(
status.pending_work.values().next().unwrap().message,
@ -4811,7 +4811,7 @@ async fn test_references(
executor.run_until_parked();
project_b.read_with(cx_b, |project, cx| {
// User is informed that a request is no longer pending.
let status = project.language_server_statuses().next().unwrap().1;
let status = project.language_server_statuses(cx).next().unwrap().1;
assert!(status.pending_work.is_empty());
assert_eq!(references.len(), 3);
@ -4838,8 +4838,8 @@ async fn test_references(
// User is informed that a request is pending.
executor.run_until_parked();
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().unwrap().1;
project_b.read_with(cx_b, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
assert_eq!(status.name, "my-fake-lsp-adapter");
assert_eq!(
status.pending_work.values().next().unwrap().message,
@ -4855,8 +4855,8 @@ async fn test_references(
// User is informed that the request is no longer pending.
executor.run_until_parked();
project_b.read_with(cx_b, |project, _| {
let status = project.language_server_statuses().next().unwrap().1;
project_b.read_with(cx_b, |project, cx| {
let status = project.language_server_statuses(cx).next().unwrap().1;
assert!(status.pending_work.is_empty());
});
}

View File

@ -60,13 +60,14 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
let language_server_id = LanguageServerId(0);
let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
let workspace = window.root(cx).unwrap();
// Create some diagnostics
project.update(cx, |project, cx| {
project
lsp_store.update(cx, |lsp_store, cx| {
lsp_store
.update_diagnostic_entries(
language_server_id,
PathBuf::from("/test/main.rs"),
@ -215,9 +216,9 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
});
// Diagnostics are added for another earlier path.
project.update(cx, |project, cx| {
project.disk_based_diagnostics_started(language_server_id, cx);
project
lsp_store.update(cx, |lsp_store, cx| {
lsp_store.disk_based_diagnostics_started(language_server_id, cx);
lsp_store
.update_diagnostic_entries(
language_server_id,
PathBuf::from("/test/consts.rs"),
@ -236,7 +237,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
cx,
)
.unwrap();
project.disk_based_diagnostics_finished(language_server_id, cx);
lsp_store.disk_based_diagnostics_finished(language_server_id, cx);
});
view.next_notification(cx).await;
@ -314,9 +315,9 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
});
// Diagnostics are added to the first path
project.update(cx, |project, cx| {
project.disk_based_diagnostics_started(language_server_id, cx);
project
lsp_store.update(cx, |lsp_store, cx| {
lsp_store.disk_based_diagnostics_started(language_server_id, cx);
lsp_store
.update_diagnostic_entries(
language_server_id,
PathBuf::from("/test/consts.rs"),
@ -348,7 +349,7 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
cx,
)
.unwrap();
project.disk_based_diagnostics_finished(language_server_id, cx);
lsp_store.disk_based_diagnostics_finished(language_server_id, cx);
});
view.next_notification(cx).await;
@ -449,6 +450,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
let server_id_1 = LanguageServerId(100);
let server_id_2 = LanguageServerId(101);
let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
let workspace = window.root(cx).unwrap();
@ -459,10 +461,10 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
let editor = view.update(cx, |view, _| view.editor.clone());
// Two language servers start updating diagnostics
project.update(cx, |project, cx| {
project.disk_based_diagnostics_started(server_id_1, cx);
project.disk_based_diagnostics_started(server_id_2, cx);
project
lsp_store.update(cx, |lsp_store, cx| {
lsp_store.disk_based_diagnostics_started(server_id_1, cx);
lsp_store.disk_based_diagnostics_started(server_id_2, cx);
lsp_store
.update_diagnostic_entries(
server_id_1,
PathBuf::from("/test/main.js"),
@ -484,8 +486,8 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
});
// The first language server finishes
project.update(cx, |project, cx| {
project.disk_based_diagnostics_finished(server_id_1, cx);
lsp_store.update(cx, |lsp_store, cx| {
lsp_store.disk_based_diagnostics_finished(server_id_1, cx);
});
// Only the first language server's diagnostics are shown.
@ -511,8 +513,8 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
);
// The second language server finishes
project.update(cx, |project, cx| {
project
lsp_store.update(cx, |lsp_store, cx| {
lsp_store
.update_diagnostic_entries(
server_id_2,
PathBuf::from("/test/main.js"),
@ -531,7 +533,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
cx,
)
.unwrap();
project.disk_based_diagnostics_finished(server_id_2, cx);
lsp_store.disk_based_diagnostics_finished(server_id_2, cx);
});
// Both language server's diagnostics are shown.
@ -566,10 +568,10 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
);
// Both language servers start updating diagnostics, and the first server finishes.
project.update(cx, |project, cx| {
project.disk_based_diagnostics_started(server_id_1, cx);
project.disk_based_diagnostics_started(server_id_2, cx);
project
lsp_store.update(cx, |lsp_store, cx| {
lsp_store.disk_based_diagnostics_started(server_id_1, cx);
lsp_store.disk_based_diagnostics_started(server_id_2, cx);
lsp_store
.update_diagnostic_entries(
server_id_1,
PathBuf::from("/test/main.js"),
@ -588,7 +590,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
cx,
)
.unwrap();
project
lsp_store
.update_diagnostic_entries(
server_id_2,
PathBuf::from("/test/main.rs"),
@ -597,7 +599,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
cx,
)
.unwrap();
project.disk_based_diagnostics_finished(server_id_1, cx);
lsp_store.disk_based_diagnostics_finished(server_id_1, cx);
});
// Only the first language server's diagnostics are updated.
@ -633,8 +635,8 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
);
// The second language server finishes.
project.update(cx, |project, cx| {
project
lsp_store.update(cx, |lsp_store, cx| {
lsp_store
.update_diagnostic_entries(
server_id_2,
PathBuf::from("/test/main.js"),
@ -653,7 +655,7 @@ async fn test_diagnostics_multiple_servers(cx: &mut TestAppContext) {
cx,
)
.unwrap();
project.disk_based_diagnostics_finished(server_id_2, cx);
lsp_store.disk_based_diagnostics_finished(server_id_2, cx);
});
// Both language servers' diagnostics are updated.
@ -701,6 +703,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
fs.insert_tree("/test", json!({})).await;
let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let window = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*window, cx);
let workspace = window.root(cx).unwrap();
@ -731,8 +734,8 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
0..=20 if !updated_language_servers.is_empty() => {
let server_id = *updated_language_servers.iter().choose(&mut rng).unwrap();
log::info!("finishing diagnostic check for language server {server_id}");
project.update(cx, |project, cx| {
project.disk_based_diagnostics_finished(server_id, cx)
lsp_store.update(cx, |lsp_store, cx| {
lsp_store.disk_based_diagnostics_finished(server_id, cx)
});
if rng.gen_bool(0.5) {

View File

@ -22,7 +22,7 @@ impl Render for ToolbarControls {
|| editor
.project
.read(cx)
.language_servers_running_disk_based_diagnostics()
.language_servers_running_disk_based_diagnostics(cx)
.next()
.is_some();
}

View File

@ -835,7 +835,7 @@ fn new_update_task(
let query_range_failed =
|range: &Range<language::Anchor>, e: anyhow::Error, cx: &mut AsyncWindowContext| {
log::error!("inlay hint update task for range {range:?} failed: {e:#}");
log::error!("inlay hint update task for range failed: {e:#?}");
editor
.update(cx, |editor, cx| {
if let Some(task_ranges) = editor

View File

@ -286,7 +286,7 @@ impl LogStore {
cx.subscribe(project, |this, project, event, cx| match event {
project::Event::LanguageServerAdded(id) => {
let read_project = project.read(cx);
if let Some(server) = read_project.language_server_for_id(*id) {
if let Some(server) = read_project.language_server_for_id(*id, cx) {
this.add_language_server(
LanguageServerKind::Local {
project: project.downgrade(),
@ -671,7 +671,7 @@ impl LspLogView {
let mut rows = self
.project
.read(cx)
.language_servers()
.language_servers(cx)
.filter_map(|(server_id, language_server_name, worktree_id)| {
let worktree = self.project.read(cx).worktree_for_id(worktree_id, cx)?;
let state = log_store.language_servers.get(&server_id)?;
@ -687,7 +687,7 @@ impl LspLogView {
.chain(
self.project
.read(cx)
.supplementary_language_servers()
.supplementary_language_servers(cx)
.filter_map(|(&server_id, name)| {
let state = log_store.language_servers.get(&server_id)?;
Some(LogMenuItem {
@ -853,7 +853,7 @@ impl LspLogView {
level: TraceValue,
cx: &mut ViewContext<Self>,
) {
if let Some(server) = self.project.read(cx).language_server_for_id(server_id) {
if let Some(server) = self.project.read(cx).language_server_for_id(server_id, cx) {
self.log_store.update(cx, |this, _| {
if let Some(state) = this.get_language_server_state(server_id) {
state.trace_level = level;

View File

@ -18,11 +18,11 @@ use language::{
Buffer, Capability, Event as BufferEvent, File as _, Language, Operation,
};
use rpc::{
proto::{self, AnyProtoClient, EnvelopedMessage},
proto::{self, AnyProtoClient},
ErrorExt as _, TypedEnvelope,
};
use smol::channel::Receiver;
use std::{io, path::Path, str::FromStr as _, sync::Arc};
use std::{io, path::Path, str::FromStr as _, sync::Arc, time::Instant};
use text::BufferId;
use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
use worktree::{
@ -32,6 +32,7 @@ use worktree::{
/// A set of open buffers.
pub struct BufferStore {
downstream_client: Option<AnyProtoClient>,
remote_id: Option<u64>,
#[allow(unused)]
worktree_store: Model<WorktreeStore>,
@ -62,12 +63,23 @@ pub enum BufferStoreEvent {
buffer: Model<Buffer>,
old_file: Option<Arc<dyn language::File>>,
},
MessageToReplicas(Box<proto::Envelope>),
}
#[derive(Default)]
pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
impl EventEmitter<BufferStoreEvent> for BufferStore {}
impl BufferStore {
pub fn init(client: &Arc<Client>) {
client.add_model_message_handler(Self::handle_buffer_reloaded);
client.add_model_message_handler(Self::handle_buffer_saved);
client.add_model_message_handler(Self::handle_update_buffer_file);
client.add_model_message_handler(Self::handle_update_diff_base);
client.add_model_request_handler(Self::handle_save_buffer);
client.add_model_request_handler(Self::handle_blame_buffer);
}
/// Creates a buffer store, optionally retaining its buffers.
///
/// If `retain_buffers` is `true`, then buffers are owned by the buffer store
@ -89,6 +101,7 @@ impl BufferStore {
Self {
remote_id,
downstream_client: None,
worktree_store,
opened_buffers: Default::default(),
remote_buffer_listeners: Default::default(),
@ -280,14 +293,15 @@ impl BufferStore {
buffer.remote_id().to_proto()
});
if let Some(project_id) = this.remote_id {
cx.emit(BufferStoreEvent::MessageToReplicas(Box::new(
proto::UpdateDiffBase {
project_id,
buffer_id,
diff_base,
}
.into_envelope(0, None, None),
)))
if let Some(client) = &this.downstream_client {
client
.send(proto::UpdateDiffBase {
project_id,
buffer_id,
diff_base,
})
.log_err();
}
}
}
})
@ -486,26 +500,25 @@ impl BufferStore {
let new_file = save.await?;
let mtime = new_file.mtime;
this.update(&mut cx, |this, cx| {
if let Some(project_id) = this.remote_id {
if let Some(downstream_client) = this.downstream_client.as_ref() {
let project_id = this.remote_id.unwrap_or(0);
if has_changed_file {
cx.emit(BufferStoreEvent::MessageToReplicas(Box::new(
proto::UpdateBufferFile {
downstream_client
.send(proto::UpdateBufferFile {
project_id,
buffer_id: buffer_id.to_proto(),
file: Some(language::File::to_proto(&*new_file, cx)),
}
.into_envelope(0, None, None),
)));
})
.log_err();
}
cx.emit(BufferStoreEvent::MessageToReplicas(Box::new(
proto::BufferSaved {
downstream_client
.send(proto::BufferSaved {
project_id,
buffer_id: buffer_id.to_proto(),
version: serialize_version(&version),
mtime: mtime.map(|time| time.into()),
}
.into_envelope(0, None, None),
)));
})
.log_err();
}
})?;
buffer_handle.update(&mut cx, |buffer, cx| {
@ -759,6 +772,7 @@ impl BufferStore {
}
pub fn disconnected_from_host(&mut self, cx: &mut AppContext) {
self.downstream_client.take();
self.set_remote_id(None, cx);
for buffer in self.buffers() {
@ -772,7 +786,21 @@ impl BufferStore {
self.remote_buffer_listeners.clear();
}
pub fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut AppContext) {
pub fn shared(
&mut self,
remote_id: u64,
downstream_client: AnyProtoClient,
cx: &mut AppContext,
) {
self.downstream_client = Some(downstream_client);
self.set_remote_id(Some(remote_id), cx);
}
pub fn unshared(&mut self, _cx: &mut ModelContext<Self>) {
self.remote_id.take();
}
fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut AppContext) {
self.remote_id = remote_id;
for open_buffer in self.opened_buffers.values_mut() {
if remote_id.is_some() {
@ -966,14 +994,15 @@ impl BufferStore {
}
if let Some(project_id) = self.remote_id {
events.push(BufferStoreEvent::MessageToReplicas(Box::new(
proto::UpdateBufferFile {
project_id,
buffer_id: buffer_id.to_proto(),
file: Some(new_file.to_proto(cx)),
}
.into_envelope(0, None, None),
)))
if let Some(client) = &self.downstream_client {
client
.send(proto::UpdateBufferFile {
project_id,
buffer_id: buffer_id.to_proto(),
file: Some(new_file.to_proto(cx)),
})
.ok();
}
}
buffer.file_updated(Arc::new(new_file), cx);
@ -1406,8 +1435,6 @@ impl BufferStore {
&mut self,
buffer: &Model<Buffer>,
peer_id: proto::PeerId,
project_id: u64,
client: AnyProtoClient,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let buffer_id = buffer.read(cx).remote_id();
@ -1420,6 +1447,10 @@ impl BufferStore {
return Task::ready(Ok(()));
}
let Some((client, project_id)) = self.downstream_client.clone().zip(self.remote_id) else {
return Task::ready(Ok(()));
};
cx.spawn(|this, mut cx| async move {
let Some(buffer) = this.update(&mut cx, |this, _| this.get(buffer_id))? else {
return anyhow::Ok(());
@ -1480,6 +1511,64 @@ impl BufferStore {
pub fn shared_buffers(&self) -> &HashMap<proto::PeerId, HashSet<BufferId>> {
&self.shared_buffers
}
pub fn serialize_project_transaction_for_peer(
&mut self,
project_transaction: ProjectTransaction,
peer_id: proto::PeerId,
cx: &mut ModelContext<Self>,
) -> proto::ProjectTransaction {
let mut serialized_transaction = proto::ProjectTransaction {
buffer_ids: Default::default(),
transactions: Default::default(),
};
for (buffer, transaction) in project_transaction.0 {
self.create_buffer_for_peer(&buffer, peer_id, cx)
.detach_and_log_err(cx);
serialized_transaction
.buffer_ids
.push(buffer.read(cx).remote_id().into());
serialized_transaction
.transactions
.push(language::proto::serialize_transaction(&transaction));
}
serialized_transaction
}
pub async fn deserialize_project_transaction(
this: WeakModel<Self>,
message: proto::ProjectTransaction,
push_to_history: bool,
mut cx: AsyncAppContext,
) -> Result<ProjectTransaction> {
let mut project_transaction = ProjectTransaction::default();
for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions) {
let buffer_id = BufferId::new(buffer_id)?;
let buffer = this
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let transaction = language::proto::deserialize_transaction(transaction)?;
project_transaction.0.insert(buffer, transaction);
}
for (buffer, transaction) in &project_transaction.0 {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_edits(transaction.edit_ids.iter().copied())
})?
.await?;
if push_to_history {
buffer.update(&mut cx, |buffer, _| {
buffer.push_transaction(transaction.clone(), Instant::now());
})?;
}
}
Ok(project_transaction)
}
}
impl OpenBuffer {

View File

@ -1,9 +1,10 @@
mod signature_help;
use crate::{
CodeAction, CoreCompletion, DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint,
InlayHintLabel, InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location,
LocationLink, MarkupContent, Project, ProjectTransaction, ResolveState,
buffer_store::BufferStore, lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight,
Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart,
InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink, MarkupContent,
ProjectTransaction, ResolveState,
};
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
@ -11,7 +12,7 @@ use client::proto::{self, PeerId};
use clock::Global;
use collections::HashSet;
use futures::future;
use gpui::{AppContext, AsyncAppContext, Model};
use gpui::{AppContext, AsyncAppContext, Entity, Model};
use language::{
language_settings::{language_settings, InlayHintKind, LanguageSettings},
point_from_lsp, point_to_lsp,
@ -69,7 +70,7 @@ pub trait LspCommand: 'static + Sized + Send {
async fn response_from_lsp(
self,
message: <Self::LspRequest as lsp::request::Request>::Result,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
cx: AsyncAppContext,
@ -79,14 +80,14 @@ pub trait LspCommand: 'static + Sized + Send {
async fn from_proto(
message: Self::ProtoRequest,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
cx: AsyncAppContext,
) -> Result<Self>;
fn response_to_proto(
response: Self::Response,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
buffer_version: &clock::Global,
cx: &mut AppContext,
@ -95,7 +96,7 @@ pub trait LspCommand: 'static + Sized + Send {
async fn response_from_proto(
self,
message: <Self::ProtoRequest as proto::RequestMessage>::Response,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
cx: AsyncAppContext,
) -> Result<Self::Response>;
@ -205,7 +206,7 @@ impl LspCommand for PrepareRename {
async fn response_from_lsp(
self,
message: Option<lsp::PrepareRenameResponse>,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
_: LanguageServerId,
mut cx: AsyncAppContext,
@ -240,7 +241,7 @@ impl LspCommand for PrepareRename {
async fn from_proto(
message: proto::PrepareRename,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -261,7 +262,7 @@ impl LspCommand for PrepareRename {
fn response_to_proto(
range: Option<Range<Anchor>>,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
buffer_version: &clock::Global,
_: &mut AppContext,
@ -281,7 +282,7 @@ impl LspCommand for PrepareRename {
async fn response_from_proto(
self,
message: proto::PrepareRenameResponse,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Option<Range<Anchor>>> {
@ -332,16 +333,16 @@ impl LspCommand for PerformRename {
async fn response_from_lsp(
self,
message: Option<lsp::WorkspaceEdit>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> Result<ProjectTransaction> {
if let Some(edit) = message {
let (lsp_adapter, lsp_server) =
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
Project::deserialize_workspace_edit(
project,
language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?;
LspStore::deserialize_workspace_edit(
lsp_store,
edit,
self.push_to_history,
lsp_adapter,
@ -368,7 +369,7 @@ impl LspCommand for PerformRename {
async fn from_proto(
message: proto::PerformRename,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -390,12 +391,14 @@ impl LspCommand for PerformRename {
fn response_to_proto(
response: ProjectTransaction,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
_: &clock::Global,
cx: &mut AppContext,
) -> proto::PerformRenameResponse {
let transaction = project.serialize_project_transaction_for_peer(response, peer_id, cx);
let transaction = lsp_store.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.serialize_project_transaction_for_peer(response, peer_id, cx)
});
proto::PerformRenameResponse {
transaction: Some(transaction),
}
@ -404,15 +407,15 @@ impl LspCommand for PerformRename {
async fn response_from_proto(
self,
message: proto::PerformRenameResponse,
project: Model<Project>,
lsp_store: Model<LspStore>,
_: Model<Buffer>,
cx: AsyncAppContext,
) -> Result<ProjectTransaction> {
let message = message
.transaction
.ok_or_else(|| anyhow!("missing transaction"))?;
Project::deserialize_project_transaction(
project.downgrade(),
BufferStore::deserialize_project_transaction(
lsp_store.read_with(&cx, |lsp_store, _| lsp_store.buffer_store().downgrade())?,
message,
self.push_to_history,
cx,
@ -460,12 +463,12 @@ impl LspCommand for GetDefinition {
async fn response_from_lsp(
self,
message: Option<lsp::GotoDefinitionResponse>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
location_links_from_lsp(message, project, buffer, server_id, cx).await
location_links_from_lsp(message, lsp_store, buffer, server_id, cx).await
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDefinition {
@ -481,7 +484,7 @@ impl LspCommand for GetDefinition {
async fn from_proto(
message: proto::GetDefinition,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -501,23 +504,23 @@ impl LspCommand for GetDefinition {
fn response_to_proto(
response: Vec<LocationLink>,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
_: &clock::Global,
cx: &mut AppContext,
) -> proto::GetDefinitionResponse {
let links = location_links_to_proto(response, project, peer_id, cx);
let links = location_links_to_proto(response, lsp_store, peer_id, cx);
proto::GetDefinitionResponse { links }
}
async fn response_from_proto(
self,
message: proto::GetDefinitionResponse,
project: Model<Project>,
lsp_store: Model<LspStore>,
_: Model<Buffer>,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
location_links_from_proto(message.links, project, cx).await
location_links_from_proto(message.links, lsp_store, cx).await
}
fn buffer_id_from_proto(message: &proto::GetDefinition) -> Result<BufferId> {
@ -560,12 +563,12 @@ impl LspCommand for GetDeclaration {
async fn response_from_lsp(
self,
message: Option<lsp::GotoDeclarationResponse>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
location_links_from_lsp(message, project, buffer, server_id, cx).await
location_links_from_lsp(message, lsp_store, buffer, server_id, cx).await
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDeclaration {
@ -581,7 +584,7 @@ impl LspCommand for GetDeclaration {
async fn from_proto(
message: proto::GetDeclaration,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -601,23 +604,23 @@ impl LspCommand for GetDeclaration {
fn response_to_proto(
response: Vec<LocationLink>,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
_: &clock::Global,
cx: &mut AppContext,
) -> proto::GetDeclarationResponse {
let links = location_links_to_proto(response, project, peer_id, cx);
let links = location_links_to_proto(response, lsp_store, peer_id, cx);
proto::GetDeclarationResponse { links }
}
async fn response_from_proto(
self,
message: proto::GetDeclarationResponse,
project: Model<Project>,
lsp_store: Model<LspStore>,
_: Model<Buffer>,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
location_links_from_proto(message.links, project, cx).await
location_links_from_proto(message.links, lsp_store, cx).await
}
fn buffer_id_from_proto(message: &proto::GetDeclaration) -> Result<BufferId> {
@ -653,12 +656,12 @@ impl LspCommand for GetImplementation {
async fn response_from_lsp(
self,
message: Option<lsp::GotoImplementationResponse>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
location_links_from_lsp(message, project, buffer, server_id, cx).await
location_links_from_lsp(message, lsp_store, buffer, server_id, cx).await
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetImplementation {
@ -674,7 +677,7 @@ impl LspCommand for GetImplementation {
async fn from_proto(
message: proto::GetImplementation,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -694,19 +697,19 @@ impl LspCommand for GetImplementation {
fn response_to_proto(
response: Vec<LocationLink>,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
_: &clock::Global,
cx: &mut AppContext,
) -> proto::GetImplementationResponse {
let links = location_links_to_proto(response, project, peer_id, cx);
let links = location_links_to_proto(response, lsp_store, peer_id, cx);
proto::GetImplementationResponse { links }
}
async fn response_from_proto(
self,
message: proto::GetImplementationResponse,
project: Model<Project>,
project: Model<LspStore>,
_: Model<Buffer>,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
@ -754,7 +757,7 @@ impl LspCommand for GetTypeDefinition {
async fn response_from_lsp(
self,
message: Option<lsp::GotoTypeDefinitionResponse>,
project: Model<Project>,
project: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
cx: AsyncAppContext,
@ -775,7 +778,7 @@ impl LspCommand for GetTypeDefinition {
async fn from_proto(
message: proto::GetTypeDefinition,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -795,19 +798,19 @@ impl LspCommand for GetTypeDefinition {
fn response_to_proto(
response: Vec<LocationLink>,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
_: &clock::Global,
cx: &mut AppContext,
) -> proto::GetTypeDefinitionResponse {
let links = location_links_to_proto(response, project, peer_id, cx);
let links = location_links_to_proto(response, lsp_store, peer_id, cx);
proto::GetTypeDefinitionResponse { links }
}
async fn response_from_proto(
self,
message: proto::GetTypeDefinitionResponse,
project: Model<Project>,
project: Model<LspStore>,
_: Model<Buffer>,
cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
@ -820,14 +823,14 @@ impl LspCommand for GetTypeDefinition {
}
fn language_server_for_buffer(
project: &Model<Project>,
lsp_store: &Model<LspStore>,
buffer: &Model<Buffer>,
server_id: LanguageServerId,
cx: &mut AsyncAppContext,
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
project
.update(cx, |project, cx| {
project
lsp_store
.update(cx, |lsp_store, cx| {
lsp_store
.language_server_for_buffer(buffer.read(cx), server_id, cx)
.map(|(adapter, server)| (adapter.clone(), server.clone()))
})?
@ -836,7 +839,7 @@ fn language_server_for_buffer(
async fn location_links_from_proto(
proto_links: Vec<proto::LocationLink>,
project: Model<Project>,
lsp_store: Model<LspStore>,
mut cx: AsyncAppContext,
) -> Result<Vec<LocationLink>> {
let mut links = Vec::new();
@ -845,9 +848,9 @@ async fn location_links_from_proto(
let origin = match link.origin {
Some(origin) => {
let buffer_id = BufferId::new(origin.buffer_id)?;
let buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(buffer_id, cx)
let buffer = lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let start = origin
@ -871,9 +874,9 @@ async fn location_links_from_proto(
let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
let buffer_id = BufferId::new(target.buffer_id)?;
let buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(buffer_id, cx)
let buffer = lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let start = target
@ -900,7 +903,7 @@ async fn location_links_from_proto(
async fn location_links_from_lsp(
message: Option<lsp::GotoDefinitionResponse>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
@ -932,10 +935,10 @@ async fn location_links_from_lsp(
}
let (lsp_adapter, language_server) =
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?;
let mut definitions = Vec::new();
for (origin_range, target_uri, target_range) in unresolved_links {
let target_buffer_handle = project
let target_buffer_handle = lsp_store
.update(&mut cx, |this, cx| {
this.open_local_buffer_via_lsp(
target_uri,
@ -982,7 +985,7 @@ async fn location_links_from_lsp(
fn location_links_to_proto(
links: Vec<LocationLink>,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
cx: &mut AppContext,
) -> Vec<proto::LocationLink> {
@ -990,9 +993,14 @@ fn location_links_to_proto(
.into_iter()
.map(|definition| {
let origin = definition.origin.map(|origin| {
let buffer_id = project
.create_buffer_for_peer(&origin.buffer, peer_id, cx)
.into();
lsp_store
.buffer_store()
.update(cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&origin.buffer, peer_id, cx)
})
.detach_and_log_err(cx);
let buffer_id = origin.buffer.read(cx).remote_id().into();
proto::Location {
start: Some(serialize_anchor(&origin.range.start)),
end: Some(serialize_anchor(&origin.range.end)),
@ -1000,9 +1008,14 @@ fn location_links_to_proto(
}
});
let buffer_id = project
.create_buffer_for_peer(&definition.target.buffer, peer_id, cx)
.into();
lsp_store
.buffer_store()
.update(cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&definition.target.buffer, peer_id, cx)
})
.detach_and_log_err(cx);
let buffer_id = definition.target.buffer.read(cx).remote_id().into();
let target = proto::Location {
start: Some(serialize_anchor(&definition.target.range.start)),
end: Some(serialize_anchor(&definition.target.range.end)),
@ -1060,20 +1073,20 @@ impl LspCommand for GetReferences {
async fn response_from_lsp(
self,
locations: Option<Vec<lsp::Location>>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> Result<Vec<Location>> {
let mut references = Vec::new();
let (lsp_adapter, language_server) =
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?;
if let Some(locations) = locations {
for lsp_location in locations {
let target_buffer_handle = project
.update(&mut cx, |this, cx| {
this.open_local_buffer_via_lsp(
let target_buffer_handle = lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.open_local_buffer_via_lsp(
lsp_location.uri,
language_server.server_id(),
lsp_adapter.name.clone(),
@ -1114,7 +1127,7 @@ impl LspCommand for GetReferences {
async fn from_proto(
message: proto::GetReferences,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -1134,7 +1147,7 @@ impl LspCommand for GetReferences {
fn response_to_proto(
response: Vec<Location>,
project: &mut Project,
lsp_store: &mut LspStore,
peer_id: PeerId,
_: &clock::Global,
cx: &mut AppContext,
@ -1142,7 +1155,13 @@ impl LspCommand for GetReferences {
let locations = response
.into_iter()
.map(|definition| {
let buffer_id = project.create_buffer_for_peer(&definition.buffer, peer_id, cx);
lsp_store
.buffer_store()
.update(cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(&definition.buffer, peer_id, cx)
})
.detach_and_log_err(cx);
let buffer_id = definition.buffer.read(cx).remote_id();
proto::Location {
start: Some(serialize_anchor(&definition.range.start)),
end: Some(serialize_anchor(&definition.range.end)),
@ -1156,7 +1175,7 @@ impl LspCommand for GetReferences {
async fn response_from_proto(
self,
message: proto::GetReferencesResponse,
project: Model<Project>,
project: Model<LspStore>,
_: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Vec<Location>> {
@ -1227,7 +1246,7 @@ impl LspCommand for GetDocumentHighlights {
async fn response_from_lsp(
self,
lsp_highlights: Option<Vec<lsp::DocumentHighlight>>,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
_: LanguageServerId,
mut cx: AsyncAppContext,
@ -1266,7 +1285,7 @@ impl LspCommand for GetDocumentHighlights {
async fn from_proto(
message: proto::GetDocumentHighlights,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -1286,7 +1305,7 @@ impl LspCommand for GetDocumentHighlights {
fn response_to_proto(
response: Vec<DocumentHighlight>,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
_: &clock::Global,
_: &mut AppContext,
@ -1310,7 +1329,7 @@ impl LspCommand for GetDocumentHighlights {
async fn response_from_proto(
self,
message: proto::GetDocumentHighlightsResponse,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Vec<DocumentHighlight>> {
@ -1386,7 +1405,7 @@ impl LspCommand for GetSignatureHelp {
async fn response_from_lsp(
self,
message: Option<lsp::SignatureHelp>,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
_: LanguageServerId,
mut cx: AsyncAppContext,
@ -1407,7 +1426,7 @@ impl LspCommand for GetSignatureHelp {
async fn from_proto(
payload: Self::ProtoRequest,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -1429,7 +1448,7 @@ impl LspCommand for GetSignatureHelp {
fn response_to_proto(
response: Self::Response,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
_: &Global,
_: &mut AppContext,
@ -1443,7 +1462,7 @@ impl LspCommand for GetSignatureHelp {
async fn response_from_proto(
self,
response: proto::GetSignatureHelpResponse,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self::Response> {
@ -1494,7 +1513,7 @@ impl LspCommand for GetHover {
async fn response_from_lsp(
self,
message: Option<lsp::Hover>,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
_: LanguageServerId,
mut cx: AsyncAppContext,
@ -1575,7 +1594,7 @@ impl LspCommand for GetHover {
async fn from_proto(
message: Self::ProtoRequest,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -1595,7 +1614,7 @@ impl LspCommand for GetHover {
fn response_to_proto(
response: Self::Response,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
_: &clock::Global,
_: &mut AppContext,
@ -1641,7 +1660,7 @@ impl LspCommand for GetHover {
async fn response_from_proto(
self,
message: proto::GetHoverResponse,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self::Response> {
@ -1717,7 +1736,7 @@ impl LspCommand for GetCompletions {
async fn response_from_lsp(
self,
completions: Option<lsp::CompletionResponse>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
@ -1737,9 +1756,9 @@ impl LspCommand for GetCompletions {
Default::default()
};
let language_server_adapter = project
.update(&mut cx, |project, _cx| {
project.language_server_adapter_for_id(server_id)
let language_server_adapter = lsp_store
.update(&mut cx, |lsp_store, _| {
lsp_store.language_server_adapter_for_id(server_id)
})?
.ok_or_else(|| anyhow!("no such language server"))?;
@ -1876,7 +1895,7 @@ impl LspCommand for GetCompletions {
async fn from_proto(
message: proto::GetCompletions,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -1904,7 +1923,7 @@ impl LspCommand for GetCompletions {
fn response_to_proto(
completions: Vec<CoreCompletion>,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
buffer_version: &clock::Global,
_: &mut AppContext,
@ -1912,7 +1931,7 @@ impl LspCommand for GetCompletions {
proto::GetCompletionsResponse {
completions: completions
.iter()
.map(Project::serialize_completion)
.map(LspStore::serialize_completion)
.collect(),
version: serialize_version(buffer_version),
}
@ -1921,7 +1940,7 @@ impl LspCommand for GetCompletions {
async fn response_from_proto(
self,
message: proto::GetCompletionsResponse,
_project: Model<Project>,
_project: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self::Response> {
@ -1934,7 +1953,7 @@ impl LspCommand for GetCompletions {
message
.completions
.into_iter()
.map(Project::deserialize_completion)
.map(LspStore::deserialize_completion)
.collect()
}
@ -2060,7 +2079,7 @@ impl LspCommand for GetCodeActions {
async fn response_from_lsp(
self,
actions: Option<lsp::CodeActionResponse>,
_: Model<Project>,
_: Model<LspStore>,
_: Model<Buffer>,
server_id: LanguageServerId,
_: AsyncAppContext,
@ -2094,7 +2113,7 @@ impl LspCommand for GetCodeActions {
async fn from_proto(
message: proto::GetCodeActions,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -2120,7 +2139,7 @@ impl LspCommand for GetCodeActions {
fn response_to_proto(
code_actions: Vec<CodeAction>,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
buffer_version: &clock::Global,
_: &mut AppContext,
@ -2128,7 +2147,7 @@ impl LspCommand for GetCodeActions {
proto::GetCodeActionsResponse {
actions: code_actions
.iter()
.map(Project::serialize_code_action)
.map(LspStore::serialize_code_action)
.collect(),
version: serialize_version(buffer_version),
}
@ -2137,7 +2156,7 @@ impl LspCommand for GetCodeActions {
async fn response_from_proto(
self,
message: proto::GetCodeActionsResponse,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Vec<CodeAction>> {
@ -2149,7 +2168,7 @@ impl LspCommand for GetCodeActions {
message
.actions
.into_iter()
.map(Project::deserialize_code_action)
.map(LspStore::deserialize_code_action)
.collect()
}
@ -2226,16 +2245,16 @@ impl LspCommand for OnTypeFormatting {
async fn response_from_lsp(
self,
message: Option<Vec<lsp::TextEdit>>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> Result<Option<Transaction>> {
if let Some(edits) = message {
let (lsp_adapter, lsp_server) =
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
Project::deserialize_edits(
project,
language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?;
LspStore::deserialize_text_edits(
lsp_store,
buffer,
edits,
self.push_to_history,
@ -2263,7 +2282,7 @@ impl LspCommand for OnTypeFormatting {
async fn from_proto(
message: proto::OnTypeFormatting,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -2291,7 +2310,7 @@ impl LspCommand for OnTypeFormatting {
fn response_to_proto(
response: Option<Transaction>,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
_: &clock::Global,
_: &mut AppContext,
@ -2305,7 +2324,7 @@ impl LspCommand for OnTypeFormatting {
async fn response_from_proto(
self,
message: proto::OnTypeFormattingResponse,
_: Model<Project>,
_: Model<LspStore>,
_: Model<Buffer>,
_: AsyncAppContext,
) -> Result<Option<Transaction>> {
@ -2729,13 +2748,13 @@ impl LspCommand for InlayHints {
async fn response_from_lsp(
self,
message: Option<Vec<lsp::InlayHint>>,
project: Model<Project>,
lsp_store: Model<LspStore>,
buffer: Model<Buffer>,
server_id: LanguageServerId,
mut cx: AsyncAppContext,
) -> anyhow::Result<Vec<InlayHint>> {
let (lsp_adapter, lsp_server) =
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
language_server_for_buffer(&lsp_store, &buffer, server_id, &mut cx)?;
// `typescript-language-server` adds padding to the left for type hints, turning
// `const foo: boolean` into `const foo : boolean` which looks odd.
// `rust-analyzer` does not have the padding for this case, and we have to accommodate both.
@ -2785,7 +2804,7 @@ impl LspCommand for InlayHints {
async fn from_proto(
message: proto::InlayHints,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -2808,7 +2827,7 @@ impl LspCommand for InlayHints {
fn response_to_proto(
response: Vec<InlayHint>,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
buffer_version: &clock::Global,
_: &mut AppContext,
@ -2825,7 +2844,7 @@ impl LspCommand for InlayHints {
async fn response_from_proto(
self,
message: proto::InlayHintsResponse,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> anyhow::Result<Vec<InlayHint>> {
@ -2887,7 +2906,7 @@ impl LspCommand for LinkedEditingRange {
async fn response_from_lsp(
self,
message: Option<lsp::LinkedEditingRanges>,
_project: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
_server_id: LanguageServerId,
cx: AsyncAppContext,
@ -2923,7 +2942,7 @@ impl LspCommand for LinkedEditingRange {
async fn from_proto(
message: proto::LinkedEditingRange,
_project: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Self> {
@ -2944,7 +2963,7 @@ impl LspCommand for LinkedEditingRange {
fn response_to_proto(
response: Vec<Range<Anchor>>,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
buffer_version: &clock::Global,
_: &mut AppContext,
@ -2964,7 +2983,7 @@ impl LspCommand for LinkedEditingRange {
async fn response_from_proto(
self,
message: proto::LinkedEditingRangeResponse,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> Result<Vec<Range<Anchor>>> {

View File

@ -1,5 +1,4 @@
use std::{path::Path, sync::Arc};
use crate::{lsp_command::LspCommand, lsp_store::LspStore};
use anyhow::{Context, Result};
use async_trait::async_trait;
use gpui::{AppContext, AsyncAppContext, Model};
@ -7,10 +6,9 @@ use language::{point_to_lsp, proto::deserialize_anchor, Buffer};
use lsp::{LanguageServer, LanguageServerId};
use rpc::proto::{self, PeerId};
use serde::{Deserialize, Serialize};
use std::{path::Path, sync::Arc};
use text::{BufferId, PointUtf16, ToPointUtf16};
use crate::{lsp_command::LspCommand, Project};
pub enum LspExpandMacro {}
impl lsp::request::Request for LspExpandMacro {
@ -67,7 +65,7 @@ impl LspCommand for ExpandMacro {
async fn response_from_lsp(
self,
message: Option<ExpandedMacro>,
_: Model<Project>,
_: Model<LspStore>,
_: Model<Buffer>,
_: LanguageServerId,
_: AsyncAppContext,
@ -92,7 +90,7 @@ impl LspCommand for ExpandMacro {
async fn from_proto(
message: Self::ProtoRequest,
_: Model<Project>,
_: Model<LspStore>,
buffer: Model<Buffer>,
mut cx: AsyncAppContext,
) -> anyhow::Result<Self> {
@ -107,7 +105,7 @@ impl LspCommand for ExpandMacro {
fn response_to_proto(
response: ExpandedMacro,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
_: &clock::Global,
_: &mut AppContext,
@ -121,7 +119,7 @@ impl LspCommand for ExpandMacro {
async fn response_from_proto(
self,
message: proto::LspExtExpandMacroResponse,
_: Model<Project>,
_: Model<LspStore>,
_: Model<Buffer>,
_: AsyncAppContext,
) -> anyhow::Result<ExpandedMacro> {
@ -177,7 +175,7 @@ impl LspCommand for SwitchSourceHeader {
async fn response_from_lsp(
self,
message: Option<SwitchSourceHeaderResult>,
_: Model<Project>,
_: Model<LspStore>,
_: Model<Buffer>,
_: LanguageServerId,
_: AsyncAppContext,
@ -196,7 +194,7 @@ impl LspCommand for SwitchSourceHeader {
async fn from_proto(
_: Self::ProtoRequest,
_: Model<Project>,
_: Model<LspStore>,
_: Model<Buffer>,
_: AsyncAppContext,
) -> anyhow::Result<Self> {
@ -205,7 +203,7 @@ impl LspCommand for SwitchSourceHeader {
fn response_to_proto(
response: SwitchSourceHeaderResult,
_: &mut Project,
_: &mut LspStore,
_: PeerId,
_: &clock::Global,
_: &mut AppContext,
@ -218,7 +216,7 @@ impl LspCommand for SwitchSourceHeader {
async fn response_from_proto(
self,
message: proto::LspExtSwitchSourceHeaderResponse,
_: Model<Project>,
_: Model<LspStore>,
_: Model<Buffer>,
_: AsyncAppContext,
) -> anyhow::Result<SwitchSourceHeaderResult> {

File diff suppressed because it is too large Load Diff

View File

@ -22,9 +22,7 @@ use paths::default_prettier_dir;
use prettier::Prettier;
use util::{ResultExt, TryFutureExt};
use crate::{
Event, File, FormatOperation, PathChange, Project, ProjectEntryId, Worktree, WorktreeId,
};
use crate::{File, FormatOperation, PathChange, Project, ProjectEntryId, Worktree, WorktreeId};
pub fn prettier_plugins_for_language(
language_settings: &LanguageSettings,
@ -352,10 +350,14 @@ fn register_new_prettier(
};
LanguageServerName(Arc::from(name))
};
project
.supplementary_language_servers
.insert(new_server_id, (name, Arc::clone(prettier_server)));
cx.emit(Event::LanguageServerAdded(new_server_id));
project.lsp_store.update(cx, |lsp_store, cx| {
lsp_store.register_supplementary_language_server(
new_server_id,
name,
Arc::clone(prettier_server),
cx,
)
});
})
.ok();
}

File diff suppressed because it is too large Load Diff

View File

@ -5,19 +5,20 @@ use gpui::{AppContext, SemanticVersion, UpdateGlobal};
use http_client::Url;
use language::{
language_settings::{AllLanguageSettings, LanguageSettingsContent},
tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
LanguageConfig, LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
};
use lsp::NumberOrString;
use lsp::{DiagnosticSeverity, NumberOrString};
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
use serde_json::json;
#[cfg(not(windows))]
use std::os;
use std::task::Poll;
use std::{mem, ops::Range, task::Poll};
use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
use unindent::Unindent as _;
use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
#[gpui::test]
async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
@ -923,7 +924,7 @@ async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
version: None,
diagnostics: vec![lsp::Diagnostic {
range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
severity: Some(lsp::DiagnosticSeverity::WARNING),
severity: Some(DiagnosticSeverity::WARNING),
message: "error 2".to_string(),
..Default::default()
}],
@ -1284,10 +1285,10 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
language_server_id: LanguageServerId(1)
}
);
project.update(cx, |project, _| {
project.update(cx, |project, cx| {
assert_eq!(
project
.language_servers_running_disk_based_diagnostics()
.language_servers_running_disk_based_diagnostics(cx)
.collect::<Vec<_>>(),
[LanguageServerId(1)]
);
@ -1302,10 +1303,10 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
language_server_id: LanguageServerId(1)
}
);
project.update(cx, |project, _| {
project.update(cx, |project, cx| {
assert_eq!(
project
.language_servers_running_disk_based_diagnostics()
.language_servers_running_disk_based_diagnostics(cx)
.collect::<Vec<_>>(),
[] as [language::LanguageServerId; 0]
);
@ -1908,32 +1909,36 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
.unwrap();
project.update(cx, |project, cx| {
project
.update_buffer_diagnostics(
&buffer,
LanguageServerId(0),
None,
vec![
DiagnosticEntry {
range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "syntax error 1".to_string(),
..Default::default()
project.lsp_store.update(cx, |lsp_store, cx| {
lsp_store
.update_buffer_diagnostics(
&buffer,
LanguageServerId(0),
None,
vec![
DiagnosticEntry {
range: Unclipped(PointUtf16::new(0, 10))
..Unclipped(PointUtf16::new(0, 10)),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "syntax error 1".to_string(),
..Default::default()
},
},
},
DiagnosticEntry {
range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "syntax error 2".to_string(),
..Default::default()
DiagnosticEntry {
range: Unclipped(PointUtf16::new(1, 10))
..Unclipped(PointUtf16::new(1, 10)),
diagnostic: Diagnostic {
severity: DiagnosticSeverity::ERROR,
message: "syntax error 2".to_string(),
..Default::default()
},
},
},
],
cx,
)
.unwrap();
],
cx,
)
.unwrap();
})
});
// An empty range is extended forward to include the following character.
@ -2040,6 +2045,7 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
.await;
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
@ -2104,9 +2110,9 @@ async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
);
});
let edits = project
.update(cx, |project, cx| {
project.edits_from_lsp(
let edits = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.edits_from_lsp(
&buffer,
vec![
// replace body of first function
@ -2191,6 +2197,7 @@ async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAp
.await;
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let buffer = project
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
.await
@ -2198,9 +2205,9 @@ async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAp
// Simulate the language server sending us a small edit in the form of a very large diff.
// Rust-analyzer does this when performing a merge-imports code action.
let edits = project
.update(cx, |project, cx| {
project.edits_from_lsp(
let edits = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.edits_from_lsp(
&buffer,
[
// Replace the first use statement without editing the semicolon.
@ -2299,6 +2306,7 @@ async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
.await;
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
let buffer = project
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
.await
@ -2306,9 +2314,9 @@ async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
// Simulate the language server sending us edits in a non-ordered fashion,
// with ranges sometimes being inverted or pointing to invalid locations.
let edits = project
.update(cx, |project, cx| {
project.edits_from_lsp(
let edits = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.edits_from_lsp(
&buffer,
[
lsp::TextEdit {
@ -4186,10 +4194,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
false,
Default::default(),
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
None,
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
None,
).unwrap(),
cx
)
@ -4597,14 +4603,6 @@ async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
assert!(search.next().await.is_none())
}
#[test]
fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), "");
assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
}
#[gpui::test]
async fn test_create_entry(cx: &mut gpui::TestAppContext) {
init_test(cx);
@ -4628,8 +4626,8 @@ async fn test_create_entry(cx: &mut gpui::TestAppContext) {
let id = project.worktrees(cx).next().unwrap().read(cx).id();
project.create_entry((id, "b.."), true, cx)
})
.unwrap()
.await
.unwrap()
.to_included()
.unwrap();

View File

@ -1,13 +1,20 @@
use std::{
cell::RefCell,
path::{Path, PathBuf},
sync::Arc,
sync::{atomic::AtomicUsize, Arc},
};
use anyhow::{anyhow, Context as _, Result};
use client::{Client, DevServerProjectId};
use collections::{HashMap, HashSet};
use fs::Fs;
use futures::{future::BoxFuture, SinkExt};
use gpui::{AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, WeakModel};
use futures::{
future::{BoxFuture, Shared},
FutureExt, SinkExt,
};
use gpui::{
AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, Task, WeakModel,
};
use postage::oneshot;
use rpc::{
proto::{self, AnyProtoClient},
@ -15,7 +22,6 @@ use rpc::{
};
use smol::{
channel::{Receiver, Sender},
future::FutureExt,
stream::StreamExt,
};
use text::ReplicaId;
@ -31,9 +37,16 @@ struct MatchingEntry {
}
pub struct WorktreeStore {
next_entry_id: Arc<AtomicUsize>,
upstream_client: Option<AnyProtoClient>,
dev_server_project_id: Option<DevServerProjectId>,
is_shared: bool,
worktrees: Vec<WorktreeHandle>,
worktrees_reordered: bool,
#[allow(clippy::type_complexity)]
loading_worktrees:
HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
fs: Arc<dyn Fs>,
}
pub enum WorktreeStoreEvent {
@ -45,14 +58,35 @@ pub enum WorktreeStoreEvent {
impl EventEmitter<WorktreeStoreEvent> for WorktreeStore {}
impl WorktreeStore {
pub fn new(retain_worktrees: bool) -> Self {
pub fn init(client: &Arc<Client>) {
client.add_model_request_handler(WorktreeStore::handle_create_project_entry);
client.add_model_request_handler(WorktreeStore::handle_rename_project_entry);
client.add_model_request_handler(WorktreeStore::handle_copy_project_entry);
client.add_model_request_handler(WorktreeStore::handle_delete_project_entry);
client.add_model_request_handler(WorktreeStore::handle_expand_project_entry);
}
pub fn new(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
Self {
next_entry_id: Default::default(),
loading_worktrees: Default::default(),
upstream_client: None,
dev_server_project_id: None,
is_shared: retain_worktrees,
worktrees: Vec::new(),
worktrees_reordered: false,
fs,
}
}
pub fn set_upstream_client(&mut self, client: AnyProtoClient) {
self.upstream_client = Some(client);
}
pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) {
self.dev_server_project_id = Some(id);
}
/// Iterates through all worktrees, including ones that don't appear in the project panel
pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
self.worktrees
@ -83,6 +117,19 @@ impl WorktreeStore {
.find(|worktree| worktree.read(cx).contains_entry(entry_id))
}
pub fn find_worktree(
&self,
abs_path: &Path,
cx: &AppContext,
) -> Option<(Model<Worktree>, PathBuf)> {
for tree in self.worktrees() {
if let Ok(relative_path) = abs_path.strip_prefix(tree.read(cx).abs_path()) {
return Some((tree.clone(), relative_path.into()));
}
}
None
}
pub fn entry_for_id<'a>(
&'a self,
entry_id: ProjectEntryId,
@ -92,6 +139,157 @@ impl WorktreeStore {
.find_map(|worktree| worktree.read(cx).entry_for_id(entry_id))
}
pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
self.worktree_for_id(path.worktree_id, cx)?
.read(cx)
.entry_for_path(&path.path)
.cloned()
}
pub fn create_worktree(
&mut self,
abs_path: impl AsRef<Path>,
visible: bool,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Worktree>>> {
let path: Arc<Path> = abs_path.as_ref().into();
if !self.loading_worktrees.contains_key(&path) {
let task = if let Some(client) = self.upstream_client.clone() {
if let Some(dev_server_project_id) = self.dev_server_project_id {
self.create_dev_server_worktree(client, dev_server_project_id, abs_path, cx)
} else {
self.create_ssh_worktree(client, abs_path, visible, cx)
}
} else {
self.create_local_worktree(abs_path, visible, cx)
};
self.loading_worktrees.insert(path.clone(), task.shared());
}
let task = self.loading_worktrees.get(&path).unwrap().clone();
cx.background_executor().spawn(async move {
let result = match task.await {
Ok(worktree) => Ok(worktree),
Err(err) => Err(anyhow!("{}", err)),
};
result
})
}
fn create_ssh_worktree(
&mut self,
client: AnyProtoClient,
abs_path: impl AsRef<Path>,
visible: bool,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
let abs_path = abs_path.as_ref();
let root_name = abs_path.file_name().unwrap().to_string_lossy().to_string();
let path = abs_path.to_string_lossy().to_string();
cx.spawn(|this, mut cx| async move {
let response = client
.request(proto::AddWorktree { path: path.clone() })
.await?;
let worktree = cx.update(|cx| {
Worktree::remote(
0,
0,
proto::WorktreeMetadata {
id: response.worktree_id,
root_name,
visible,
abs_path: path,
},
client,
cx,
)
})?;
this.update(&mut cx, |this, cx| this.add(&worktree, cx))?;
Ok(worktree)
})
}
fn create_local_worktree(
&mut self,
abs_path: impl AsRef<Path>,
visible: bool,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
let fs = self.fs.clone();
let next_entry_id = self.next_entry_id.clone();
let path: Arc<Path> = abs_path.as_ref().into();
cx.spawn(move |this, mut cx| async move {
let worktree = Worktree::local(path.clone(), visible, fs, next_entry_id, &mut cx).await;
this.update(&mut cx, |project, _| {
project.loading_worktrees.remove(&path);
})?;
let worktree = worktree?;
this.update(&mut cx, |this, cx| this.add(&worktree, cx))?;
if visible {
cx.update(|cx| {
cx.add_recent_document(&path);
})
.log_err();
}
Ok(worktree)
})
}
fn create_dev_server_worktree(
&mut self,
client: AnyProtoClient,
dev_server_project_id: DevServerProjectId,
abs_path: impl AsRef<Path>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
let path: Arc<Path> = abs_path.as_ref().into();
let mut paths: Vec<String> = self
.visible_worktrees(cx)
.map(|worktree| worktree.read(cx).abs_path().to_string_lossy().to_string())
.collect();
paths.push(path.to_string_lossy().to_string());
let request = client.request(proto::UpdateDevServerProject {
dev_server_project_id: dev_server_project_id.0,
paths,
});
let abs_path = abs_path.as_ref().to_path_buf();
cx.spawn(move |project, mut cx| async move {
let (tx, rx) = futures::channel::oneshot::channel();
let tx = RefCell::new(Some(tx));
let Some(project) = project.upgrade() else {
return Err(anyhow!("project dropped"))?;
};
let observer = cx.update(|cx| {
cx.observe(&project, move |project, cx| {
let abs_path = abs_path.clone();
project.update(cx, |project, cx| {
if let Some((worktree, _)) = project.find_worktree(&abs_path, cx) {
if let Some(tx) = tx.borrow_mut().take() {
tx.send(worktree).ok();
}
}
})
})
})?;
request.await?;
let worktree = rx.await.map_err(|e| anyhow!(e))?;
drop(observer);
project.update(&mut cx, |project, _| {
project.loading_worktrees.remove(&path);
})?;
Ok(worktree)
})
}
pub fn add(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
let push_strong_handle = self.is_shared || worktree.read(cx).is_visible();
let handle = if push_strong_handle {

View File

@ -67,7 +67,7 @@ pub trait ProtoClient: Send + Sync {
request_type: &'static str,
) -> BoxFuture<'static, anyhow::Result<Envelope>>;
fn send(&self, envelope: Envelope) -> anyhow::Result<()>;
fn send(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>;
}
#[derive(Clone)]
@ -101,11 +101,7 @@ impl AnyProtoClient {
pub fn send<T: EnvelopedMessage>(&self, request: T) -> anyhow::Result<()> {
let envelope = request.into_envelope(0, None, None);
self.0.send(envelope)
}
pub fn send_dynamic(&self, message: Envelope) -> anyhow::Result<()> {
self.0.send(message)
self.0.send(envelope, T::NAME)
}
}

View File

@ -485,7 +485,7 @@ impl ProtoClient for SshSession {
self.request_dynamic(envelope, request_type).boxed()
}
fn send(&self, envelope: proto::Envelope) -> Result<()> {
fn send(&self, envelope: proto::Envelope, _message_type: &'static str) -> Result<()> {
self.send_dynamic(envelope)
}
}

View File

@ -35,7 +35,6 @@ serde.workspace = true
serde_json.workspace = true
shellexpand.workspace = true
smol.workspace = true
util.workspace = true
worktree.workspace = true
[dev-dependencies]

View File

@ -2,10 +2,8 @@ use anyhow::{anyhow, Result};
use fs::Fs;
use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext};
use project::{
buffer_store::{BufferStore, BufferStoreEvent},
search::SearchQuery,
worktree_store::WorktreeStore,
ProjectPath, WorktreeId, WorktreeSettings,
buffer_store::BufferStore, search::SearchQuery, worktree_store::WorktreeStore, ProjectPath,
WorktreeId, WorktreeSettings,
};
use remote::SshSession;
use rpc::{
@ -18,7 +16,6 @@ use std::{
path::{Path, PathBuf},
sync::{atomic::AtomicUsize, Arc},
};
use util::ResultExt as _;
use worktree::Worktree;
const PEER_ID: PeerId = PeerId { owner_id: 0, id: 0 };
@ -41,11 +38,12 @@ impl HeadlessProject {
pub fn new(session: Arc<SshSession>, fs: Arc<dyn Fs>, cx: &mut ModelContext<Self>) -> Self {
let this = cx.weak_model();
let worktree_store = cx.new_model(|_| WorktreeStore::new(true));
let buffer_store =
cx.new_model(|cx| BufferStore::new(worktree_store.clone(), Some(PROJECT_ID), cx));
cx.subscribe(&buffer_store, Self::on_buffer_store_event)
.detach();
let worktree_store = cx.new_model(|_| WorktreeStore::new(true, fs.clone()));
let buffer_store = cx.new_model(|cx| {
let mut buffer_store = BufferStore::new(worktree_store.clone(), Some(PROJECT_ID), cx);
buffer_store.shared(PROJECT_ID, session.clone().into(), cx);
buffer_store
});
session.add_request_handler(this.clone(), Self::handle_list_remote_directory);
session.add_request_handler(this.clone(), Self::handle_add_worktree);
@ -128,7 +126,7 @@ impl HeadlessProject {
mut cx: AsyncAppContext,
) -> Result<proto::OpenBufferResponse> {
let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
let (buffer_store, buffer, session) = this.update(&mut cx, |this, cx| {
let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
let buffer_store = this.buffer_store.clone();
let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_buffer(
@ -139,14 +137,14 @@ impl HeadlessProject {
cx,
)
});
anyhow::Ok((buffer_store, buffer, this.session.clone()))
anyhow::Ok((buffer_store, buffer))
})??;
let buffer = buffer.await?;
let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
buffer_store.update(&mut cx, |buffer_store, cx| {
buffer_store
.create_buffer_for_peer(&buffer, PEER_ID, PROJECT_ID, session, cx)
.create_buffer_for_peer(&buffer, PEER_ID, cx)
.detach_and_log_err(cx);
})?;
@ -176,22 +174,14 @@ impl HeadlessProject {
buffer_ids: Vec::new(),
};
let (buffer_store, client) = this.update(&mut cx, |this, _| {
(this.buffer_store.clone(), this.session.clone())
})?;
let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
while let Some(buffer) = results.next().await {
let buffer_id = buffer.update(&mut cx, |this, _| this.remote_id())?;
response.buffer_ids.push(buffer_id.to_proto());
buffer_store
.update(&mut cx, |buffer_store, cx| {
buffer_store.create_buffer_for_peer(
&buffer,
PEER_ID,
PROJECT_ID,
client.clone(),
cx,
)
buffer_store.create_buffer_for_peer(&buffer, PEER_ID, cx)
})?
.await?;
}
@ -216,20 +206,4 @@ impl HeadlessProject {
}
Ok(proto::ListRemoteDirectoryResponse { entries })
}
pub fn on_buffer_store_event(
&mut self,
_: Model<BufferStore>,
event: &BufferStoreEvent,
_: &mut ModelContext<Self>,
) {
match event {
BufferStoreEvent::MessageToReplicas(message) => {
self.session
.send_dynamic(message.as_ref().clone())
.log_err();
}
_ => {}
}
}
}