This commit is contained in:
Antonio Scandurra 2023-10-23 16:23:38 +02:00
parent cc445f7cef
commit efbf0c828d
8 changed files with 336 additions and 289 deletions

2
Cargo.lock generated
View File

@ -5964,7 +5964,7 @@ dependencies = [
"client2",
"clock",
"collections",
"copilot",
"copilot2",
"ctor",
"db2",
"env_logger 0.9.3",

View File

@ -4,7 +4,7 @@ use derive_more::{Deref, DerefMut};
use parking_lot::{RwLock, RwLockUpgradableReadGuard};
use slotmap::{SecondaryMap, SlotMap};
use std::{
any::{Any, TypeId},
any::{type_name, Any, TypeId},
fmt::{self, Display},
hash::{Hash, Hasher},
marker::PhantomData,
@ -17,6 +17,12 @@ use std::{
slotmap::new_key_type! { pub struct EntityId; }
impl EntityId {
pub fn as_u64(self) -> u64 {
self.0.as_ffi()
}
}
impl Display for EntityId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self)
@ -234,6 +240,20 @@ where
}
}
impl Hash for AnyHandle {
fn hash<H: Hasher>(&self, state: &mut H) {
self.entity_id.hash(state);
}
}
impl PartialEq for AnyHandle {
fn eq(&self, other: &Self) -> bool {
self.entity_id == other.entity_id
}
}
impl Eq for AnyHandle {}
#[derive(Deref, DerefMut)]
pub struct Handle<T: Send + Sync> {
#[deref]
@ -284,6 +304,31 @@ impl<T: Send + Sync> Clone for Handle<T> {
}
}
impl<T: 'static + Send + Sync> std::fmt::Debug for Handle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Handle {{ entity_id: {:?}, entity_type: {:?} }}",
self.any_handle.entity_id,
type_name::<T>()
)
}
}
impl<T: Send + Sync + 'static> Hash for Handle<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.any_handle.hash(state);
}
}
impl<T: Send + Sync + 'static> PartialEq for Handle<T> {
fn eq(&self, other: &Self) -> bool {
self.any_handle == other.any_handle
}
}
impl<T: Send + Sync + 'static> Eq for Handle<T> {}
#[derive(Clone)]
pub struct AnyWeakHandle {
pub(crate) entity_id: EntityId,

View File

@ -24,6 +24,10 @@ impl<'a, T: Send + Sync + 'static> ModelContext<'a, T> {
}
}
pub fn entity_id(&self) -> EntityId {
self.entity_id
}
pub fn handle(&self) -> WeakHandle<T> {
self.app.entities.weak_handle(self.entity_id)
}

View File

@ -20,7 +20,7 @@ test-support = [
[dependencies]
text = { path = "../text" }
copilot = { path = "../copilot" }
copilot2 = { path = "../copilot2" }
client2 = { path = "../client2" }
clock = { path = "../clock" }
collections = { path = "../collections" }

View File

@ -185,7 +185,7 @@ impl LspCommand for PrepareRename {
_: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Option<Range<Anchor>>> {
buffer.read_with(&cx, |buffer, _| {
buffer.update(&mut cx, |buffer, _| {
if let Some(
lsp2::PrepareRenameResponse::Range(range)
| lsp2::PrepareRenameResponse::RangeWithPlaceholder { range, .. },
@ -199,7 +199,7 @@ impl LspCommand for PrepareRename {
}
}
Ok(None)
})
})?
}
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename {
@ -226,11 +226,11 @@ impl LspCommand for PrepareRename {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
})
}
@ -264,7 +264,7 @@ impl LspCommand for PrepareRename {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
let start = message.start.and_then(deserialize_anchor);
let end = message.end.and_then(deserialize_anchor);
@ -354,10 +354,10 @@ impl LspCommand for PerformRename {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
new_name: message.new_name,
push_to_history: false,
})
@ -389,7 +389,7 @@ impl LspCommand for PerformRename {
project
.update(&mut cx, |project, cx| {
project.deserialize_project_transaction(message, self.push_to_history, cx)
})
})?
.await
}
@ -458,10 +458,10 @@ impl LspCommand for GetDefinition {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
})
}
@ -559,10 +559,10 @@ impl LspCommand for GetTypeDefinition {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
})
}
@ -599,11 +599,11 @@ fn language_server_for_buffer(
cx: &mut AsyncAppContext,
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
project
.read_with(cx, |project, cx| {
.update(cx, |project, cx| {
project
.language_server_for_buffer(buffer.read(cx), server_id, cx)
.map(|(adapter, server)| (adapter.clone(), server.clone()))
})
})?
.ok_or_else(|| anyhow!("no language server found for buffer"))
}
@ -620,7 +620,7 @@ async fn location_links_from_proto(
let buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(origin.buffer_id, cx)
})
})?
.await?;
let start = origin
.start
@ -631,7 +631,7 @@ async fn location_links_from_proto(
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing origin end"))?;
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
Some(Location {
buffer,
@ -645,7 +645,7 @@ async fn location_links_from_proto(
let buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(target.buffer_id, cx)
})
})?
.await?;
let start = target
.start
@ -656,7 +656,7 @@ async fn location_links_from_proto(
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?;
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
let target = Location {
buffer,
@ -714,12 +714,11 @@ async fn location_links_from_lsp(
lsp_adapter.name.clone(),
cx,
)
})
})?
.await?;
cx.read(|cx| {
buffer.update(&mut cx, |origin_buffer, cx| {
let origin_location = origin_range.map(|origin_range| {
let origin_buffer = buffer.read(cx);
let origin_start =
origin_buffer.clip_point_utf16(point_from_lsp(origin_range.start), Bias::Left);
let origin_end =
@ -746,7 +745,7 @@ async fn location_links_from_lsp(
origin: origin_location,
target: target_location,
})
});
})?;
}
Ok(definitions)
}
@ -834,11 +833,10 @@ impl LspCommand for GetReferences {
lsp_adapter.name.clone(),
cx,
)
})
})?
.await?;
cx.read(|cx| {
let target_buffer = target_buffer_handle.read(cx);
target_buffer_handle.update(&mut cx, |target_buffer, cx| {
let target_start = target_buffer
.clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left);
let target_end = target_buffer
@ -848,7 +846,7 @@ impl LspCommand for GetReferences {
range: target_buffer.anchor_after(target_start)
..target_buffer.anchor_before(target_end),
});
});
})?;
}
}
@ -879,10 +877,10 @@ impl LspCommand for GetReferences {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
})
}
@ -919,7 +917,7 @@ impl LspCommand for GetReferences {
let target_buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(location.buffer_id, cx)
})
})?
.await?;
let start = location
.start
@ -930,7 +928,7 @@ impl LspCommand for GetReferences {
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?;
target_buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
locations.push(Location {
buffer: target_buffer,
@ -982,10 +980,10 @@ impl LspCommand for GetDocumentHighlights {
_: LanguageServerId,
cx: AsyncAppContext,
) -> Result<Vec<DocumentHighlight>> {
buffer.read_with(&cx, |buffer, _| {
buffer.update(&mut cx, |buffer, _| {
let mut lsp_highlights = lsp_highlights.unwrap_or_default();
lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end)));
Ok(lsp_highlights
lsp_highlights
.into_iter()
.map(|lsp_highlight| {
let start = buffer
@ -999,7 +997,7 @@ impl LspCommand for GetDocumentHighlights {
.unwrap_or(lsp2::DocumentHighlightKind::READ),
}
})
.collect())
.collect()
})
}
@ -1027,10 +1025,10 @@ impl LspCommand for GetDocumentHighlights {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
})
}
@ -1075,7 +1073,7 @@ impl LspCommand for GetDocumentHighlights {
.and_then(deserialize_anchor)
.ok_or_else(|| anyhow!("missing target end"))?;
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
.await?;
let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) {
Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT,
@ -1126,71 +1124,70 @@ impl LspCommand for GetHover {
_: Handle<Project>,
buffer: Handle<Buffer>,
_: LanguageServerId,
cx: AsyncAppContext,
mut cx: AsyncAppContext,
) -> Result<Self::Response> {
Ok(message.and_then(|hover| {
let (language, range) = cx.read(|cx| {
let buffer = buffer.read(cx);
(
buffer.language().cloned(),
hover.range.map(|range| {
let token_start =
buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
let token_end =
buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
}),
)
});
let Some(hover) = message else {
return Ok(None);
};
fn hover_blocks_from_marked_string(
marked_string: lsp2::MarkedString,
) -> Option<HoverBlock> {
let block = match marked_string {
lsp2::MarkedString::String(content) => HoverBlock {
text: content,
kind: HoverBlockKind::Markdown,
},
lsp2::MarkedString::LanguageString(lsp2::LanguageString {
language,
value,
}) => HoverBlock {
let (language, range) = buffer.update(&mut cx, |buffer, cx| {
(
buffer.language().cloned(),
hover.range.map(|range| {
let token_start =
buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
let token_end = buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
}),
)
})?;
fn hover_blocks_from_marked_string(
marked_string: lsp2::MarkedString,
) -> Option<HoverBlock> {
let block = match marked_string {
lsp2::MarkedString::String(content) => HoverBlock {
text: content,
kind: HoverBlockKind::Markdown,
},
lsp2::MarkedString::LanguageString(lsp2::LanguageString { language, value }) => {
HoverBlock {
text: value,
kind: HoverBlockKind::Code { language },
},
};
if block.text.is_empty() {
None
} else {
Some(block)
}
}
};
if block.text.is_empty() {
None
} else {
Some(block)
}
}
let contents = cx.read(|_| match hover.contents {
lsp2::HoverContents::Scalar(marked_string) => {
hover_blocks_from_marked_string(marked_string)
.into_iter()
.collect()
}
lsp2::HoverContents::Array(marked_strings) => marked_strings
let contents = match hover.contents {
lsp2::HoverContents::Scalar(marked_string) => {
hover_blocks_from_marked_string(marked_string)
.into_iter()
.filter_map(hover_blocks_from_marked_string)
.collect(),
lsp2::HoverContents::Markup(markup_content) => vec![HoverBlock {
text: markup_content.value,
kind: if markup_content.kind == lsp2::MarkupKind::Markdown {
HoverBlockKind::Markdown
} else {
HoverBlockKind::PlainText
},
}],
});
.collect()
}
lsp2::HoverContents::Array(marked_strings) => marked_strings
.into_iter()
.filter_map(hover_blocks_from_marked_string)
.collect(),
lsp2::HoverContents::Markup(markup_content) => vec![HoverBlock {
text: markup_content.value,
kind: if markup_content.kind == lsp2::MarkupKind::Markdown {
HoverBlockKind::Markdown
} else {
HoverBlockKind::PlainText
},
}],
};
Some(Hover {
contents,
range,
language,
})
Ok(Some(Hover {
contents,
range,
language,
}))
}
@ -1218,10 +1215,10 @@ impl LspCommand for GetHover {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
})
}
@ -1295,7 +1292,7 @@ impl LspCommand for GetHover {
return Ok(None);
}
let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
let language = buffer.update(&mut cx, |buffer, _| buffer.language().cloned())?;
let range = if let (Some(start), Some(end)) = (message.start, message.end) {
language2::proto::deserialize_anchor(start)
.and_then(|start| language2::proto::deserialize_anchor(end).map(|end| start..end))
@ -1362,7 +1359,7 @@ impl LspCommand for GetCompletions {
Default::default()
};
let completions = buffer.read_with(&cx, |buffer, _| {
let completions = buffer.update(&mut cx, |buffer, _| {
let language = buffer.language().cloned();
let snapshot = buffer.snapshot();
let clipped_position = buffer.clip_point_utf16(Unclipped(self.position), Bias::Left);
@ -1468,7 +1465,7 @@ impl LspCommand for GetCompletions {
}
})
})
});
})?;
Ok(future::join_all(completions).await)
}
@ -1491,17 +1488,17 @@ impl LspCommand for GetCompletions {
) -> Result<Self> {
let version = deserialize_version(&message.version);
buffer
.update(&mut cx, |buffer, _| buffer.wait_for_version(version))
.update(&mut cx, |buffer, _| buffer.wait_for_version(version))?
.await?;
let position = message
.position
.and_then(language2::proto::deserialize_anchor)
.map(|p| {
buffer.read_with(&cx, |buffer, _| {
buffer.update(&mut cx, |buffer, _| {
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
})
})
.ok_or_else(|| anyhow!("invalid position"))?;
.ok_or_else(|| anyhow!("invalid position"))??;
Ok(Self { position })
}
@ -1531,10 +1528,10 @@ impl LspCommand for GetCompletions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
let language = buffer.update(&mut cx, |buffer, _| buffer.language().cloned())?;
let completions = message.completions.into_iter().map(|completion| {
language2::proto::deserialize_completion(completion, language.clone())
});
@ -1639,7 +1636,7 @@ impl LspCommand for GetCodeActions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self { range: start..end })
@ -1671,7 +1668,7 @@ impl LspCommand for GetCodeActions {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
message
.actions
@ -1775,15 +1772,15 @@ impl LspCommand for OnTypeFormatting {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
let tab_size = buffer.read_with(&cx, |buffer, cx| {
let tab_size = buffer.update(&mut cx, |buffer, cx| {
language_settings(buffer.language(), buffer.file(), cx).tab_size
});
})?;
Ok(Self {
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
trigger: message.trigger.clone(),
options: lsp_formatting_options(tab_size.get()).into(),
push_to_history: false,
@ -1824,7 +1821,7 @@ impl LspCommand for OnTypeFormatting {
}
impl InlayHints {
pub async fn lsp2_to_project_hint(
pub async fn lsp_to_project_hint(
lsp_hint: lsp2::InlayHint,
buffer_handle: &Handle<Buffer>,
server_id: LanguageServerId,
@ -1838,15 +1835,14 @@ impl InlayHints {
_ => None,
});
let position = cx.update(|cx| {
let buffer = buffer_handle.read(cx);
let position = buffer_handle.update(cx, |buffer, _| {
let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
if kind == Some(InlayHintKind::Parameter) {
buffer.anchor_before(position)
} else {
buffer.anchor_after(position)
}
});
})?;
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
.await
.context("lsp to project inlay hint conversion")?;
@ -1878,7 +1874,7 @@ impl InlayHints {
})
}
async fn lsp2_inlay_label_to_project(
async fn lsp_inlay_label_to_project(
lsp_label: lsp2::InlayHintLabel,
server_id: LanguageServerId,
) -> anyhow::Result<InlayHintLabel> {
@ -2109,7 +2105,7 @@ impl InlayHints {
})
}
pub fn project_to_lsp2_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp2::InlayHint {
pub fn project_to_lsp_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp2::InlayHint {
lsp2::InlayHint {
position: point_to_lsp(hint.position.to_point_utf16(snapshot)),
kind: hint.kind.map(|kind| match kind {
@ -2303,7 +2299,7 @@ impl LspCommand for InlayHints {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
Ok(Self { range: start..end })
@ -2335,7 +2331,7 @@ impl LspCommand for InlayHints {
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version))
})
})?
.await?;
let mut hints = Vec::new();

View File

@ -10,11 +10,11 @@ mod project_tests;
#[cfg(test)]
mod worktree_tests;
use anyhow::{anyhow, Context, Result};
use anyhow::{anyhow, Context as _, Result};
use client2::{proto, Client, Collaborator, TypedEnvelope, UserStore};
use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet};
use copilot::Copilot;
use copilot2::Copilot;
use futures::{
channel::{
mpsc::{self, UnboundedReceiver},
@ -26,7 +26,8 @@ use futures::{
};
use globset::{Glob, GlobSet, GlobSetBuilder};
use gpui2::{
AnyHandle, AppContext, AsyncAppContext, EventEmitter, Handle, ModelContext, Task, WeakHandle,
AnyHandle, AppContext, AsyncAppContext, EventEmitter, Executor, Handle, ModelContext, Task,
WeakHandle,
};
use itertools::Itertools;
use language2::{
@ -195,8 +196,8 @@ impl DelayedDebounced {
self.cancel_channel = Some(sender);
let previous_task = self.task.take();
self.task = Some(cx.spawn(|workspace, mut cx| async move {
let mut timer = cx.background().timer(delay).fuse();
self.task = Some(cx.executor().spawn(|workspace, mut cx| async move {
let mut timer = cx.executor().timer(delay).fuse();
if let Some(previous_task) = previous_task {
previous_task.await;
}
@ -206,9 +207,9 @@ impl DelayedDebounced {
_ = timer => {}
}
workspace
.update(&mut cx, |workspace, cx| (func)(workspace, cx))
.await;
if let Ok(task) = workspace.update(&mut cx, |workspace, cx| (func)(workspace, cx)) {
task.await;
}
}));
}
}
@ -646,7 +647,7 @@ impl Project {
opened_buffer: watch::channel(),
client_subscriptions: Vec::new(),
_subscriptions: vec![
cx.observe_global::<SettingsStore, _>(Self::on_settings_changed),
cx.observe_global::<SettingsStore>(Self::on_settings_changed),
cx.on_release(Self::release),
cx.on_app_quit(Self::shutdown_language_servers),
],
@ -673,7 +674,7 @@ impl Project {
},
copilot_lsp_subscription,
copilot_log_subscription: None,
current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp2.clone(),
current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp.clone(),
node: Some(node),
prettier_instances: HashMap::default(),
}
@ -696,7 +697,7 @@ impl Project {
project_id: remote_id,
})
.await?;
let this = cx.add_model(|cx| {
let this = cx.entity(|cx| {
let replica_id = response.payload.replica_id as ReplicaId;
let mut worktrees = Vec::new();
@ -775,7 +776,7 @@ impl Project {
},
copilot_lsp_subscription,
copilot_log_subscription: None,
current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp2.clone(),
current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp.clone(),
node: None,
prettier_instances: HashMap::default(),
};
@ -793,7 +794,7 @@ impl Project {
.map(|peer| peer.user_id)
.collect();
user_store
.update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
.update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
.await?;
this.update(&mut cx, |this, cx| {
@ -914,7 +915,7 @@ impl Project {
let mut language_servers_to_restart = Vec::new();
let languages = self.languages.to_vec();
let new_lsp_settings = settings2::get::<ProjectSettings>(cx).lsp2.clone();
let new_lsp_settings = settings2::get::<ProjectSettings>(cx).lsp.clone();
let current_lsp_settings = &self.current_lsp_settings;
for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
let language = languages.iter().find_map(|l| {
@ -1153,7 +1154,7 @@ impl Project {
} else {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move {
Some(cx.spawn(|_, mut cx| async move {
let response = client
.request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(),
@ -1197,7 +1198,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move {
Some(cx.spawn(|_, mut cx| async move {
let response = client
.request(proto::CopyProjectEntry {
project_id,
@ -1240,7 +1241,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move {
Some(cx.spawn(|_, mut cx| async move {
let response = client
.request(proto::RenameProjectEntry {
project_id,
@ -1258,7 +1259,7 @@ impl Project {
response.worktree_scan_id as usize,
cx,
)
})
})?
.await
}))
}
@ -1280,7 +1281,7 @@ impl Project {
} else {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
Some(cx.spawn_weak(|_, mut cx| async move {
Some(cx.spawn(|_, mut cx| async move {
let response = client
.request(proto::DeleteProjectEntry {
project_id,
@ -1317,7 +1318,7 @@ impl Project {
project_id: self.remote_id().unwrap(),
entry_id: entry_id.to_proto(),
});
Some(cx.spawn_weak(|_, mut cx| async move {
Some(cx.spawn(|_, mut cx| async move {
let response = request.await?;
if let Some(worktree) = worktree.upgrade() {
worktree
@ -1341,7 +1342,7 @@ impl Project {
self.client_subscriptions.push(
self.client
.subscribe_to_entity(project_id)?
.set_model(&cx.handle(), &mut cx.to_async()),
.set_model(&cx.handle().upgrade(), &mut cx.to_async()),
);
for open_buffer in self.opened_buffers.values_mut() {
@ -1382,7 +1383,7 @@ impl Project {
let store = cx.global::<SettingsStore>();
for worktree in self.worktrees(cx) {
let worktree_id = worktree.read(cx).id().to_proto();
for (path, content) in store.local_settings(worktree.id()) {
for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
self.client
.send(proto::UpdateWorktreeSettings {
project_id,
@ -1506,7 +1507,7 @@ impl Project {
message_id: u32,
cx: &mut ModelContext<Self>,
) -> Result<()> {
cx.update_global::<SettingsStore, _, _>(|store, cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
for worktree in &self.worktrees {
store
.clear_local_settings(worktree.handle_id(), cx)
@ -2432,7 +2433,7 @@ impl Project {
Duration::from_secs(1);
let task = cx.spawn_weak(|this, mut cx| async move {
cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
cx.executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
this.disk_based_diagnostics_finished(
@ -2786,7 +2787,7 @@ impl Project {
};
let project_settings = settings2::get::<ProjectSettings>(cx);
let lsp = project_settings.lsp2.get(&adapter.name.0);
let lsp = project_settings.lsp.get(&adapter.name.0);
let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
let mut initialization_options = adapter.initialization_options.clone();
@ -3429,7 +3430,7 @@ impl Project {
});
const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
let mut timeout = cx.background().timer(PROCESS_TIMEOUT).fuse();
let mut timeout = cx.executor().timer(PROCESS_TIMEOUT).fuse();
let mut errored = false;
if let Some(mut process) = process {
@ -4013,7 +4014,7 @@ impl Project {
project_id,
buffer_ids: remote_buffers
.iter()
.map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
.map(|buffer| buffer.update(&mut cx, |buffer, _| buffer.remote_id()))
.collect(),
})
.await?
@ -4022,13 +4023,13 @@ impl Project {
project_transaction = this
.update(&mut cx, |this, cx| {
this.deserialize_project_transaction(response, push_to_history, cx)
})
})?
.await?;
}
for buffer in local_buffers {
let transaction = buffer
.update(&mut cx, |buffer, cx| buffer.reload(cx))
.update(&mut cx, |buffer, cx| buffer.reload(cx))?
.await?;
buffer.update(&mut cx, |buffer, cx| {
if let Some(transaction) = transaction {
@ -4091,9 +4092,9 @@ impl Project {
let mut project_transaction = ProjectTransaction::default();
for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
let settings = buffer.read_with(&cx, |buffer, cx| {
let settings = buffer.update(&mut cx, |buffer, cx| {
language_settings(buffer.language(), buffer.file(), cx).clone()
});
})?;
let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
let ensure_final_newline = settings.ensure_final_newline_on_save;
@ -4105,7 +4106,7 @@ impl Project {
let trailing_whitespace_diff = if remove_trailing_whitespace {
Some(
buffer
.read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx))
.update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
.await,
)
} else {
@ -4182,13 +4183,13 @@ impl Project {
if let Some(prettier_task) = this
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
}).await {
})?.await {
match prettier_task.await
{
Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| {
let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
});
})?;
format_operation = Some(FormatOperation::Prettier(
prettier
.format(buffer, buffer_path, &cx)
@ -4225,7 +4226,7 @@ impl Project {
match prettier_task.await
{
Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| {
let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
});
format_operation = Some(FormatOperation::Prettier(
@ -4298,8 +4299,10 @@ impl Project {
trigger: trigger as i32,
buffer_ids: buffers
.iter()
.map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
.collect(),
.map(|buffer| {
buffer.update(&mut cx, |buffer, _| buffer.remote_id())
})
.collect::<Result<_>>()?,
})
.await?
.transaction
@ -4307,7 +4310,7 @@ impl Project {
project_transaction = this
.update(&mut cx, |this, cx| {
this.deserialize_project_transaction(response, push_to_history, cx)
})
})?
.await?;
}
Ok(project_transaction)
@ -4316,7 +4319,7 @@ impl Project {
}
async fn format_via_lsp(
this: &Handle<Self>,
this: &WeakHandle<Self>,
buffer: &Handle<Buffer>,
abs_path: &Path,
language_server: &Arc<LanguageServer>,
@ -4341,7 +4344,7 @@ impl Project {
.await?
} else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
let buffer_start = lsp2::Position::new(0, 0);
let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()));
let buffer_end = buffer.update(&mut cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
language_server
.request::<lsp2::request::RangeFormatting>(lsp2::DocumentRangeFormattingParams {
@ -4358,7 +4361,7 @@ impl Project {
if let Some(lsp_edits) = lsp_edits {
this.update(cx, |this, cx| {
this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
})
})?
.await
} else {
Ok(Vec::new())
@ -4372,7 +4375,7 @@ impl Project {
arguments: &[String],
cx: &mut AsyncAppContext,
) -> Result<Option<Diff>> {
let working_dir_path = buffer.read_with(cx, |buffer, cx| {
let working_dir_path = buffer.update(&mut cx, |buffer, cx| {
let file = File::from_dyn(buffer.file())?;
let worktree = file.worktree.read(cx).as_local()?;
let mut worktree_path = worktree.abs_path().to_path_buf();
@ -4397,7 +4400,7 @@ impl Project {
.stdin
.as_mut()
.ok_or_else(|| anyhow!("failed to acquire stdin"))?;
let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone());
let text = buffer.update(&mut cx, |buffer, _| buffer.as_rope().clone());
for chunk in text.chunks() {
stdin.write_all(chunk.as_bytes()).await?;
}
@ -4797,7 +4800,7 @@ impl Project {
.unwrap_or(false);
let additional_text_edits = if can_resolve {
lang_server
.request::<lsp2::request::ResolveCompletionItem>(completion.lsp2_completion)
.request::<lsp2::request::ResolveCompletionItem>(completion.lsp_completion)
.await?
.additional_text_edits
} else {
@ -4925,8 +4928,8 @@ impl Project {
.and_then(|d| d.get_mut("range"))
{
*lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
action.lsp2_action = lang_server
.request::<lsp2::request::CodeActionResolveRequest>(action.lsp2_action)
action.lsp_action = lang_server
.request::<lsp2::request::CodeActionResolveRequest>(action.lsp_action)
.await?;
} else {
let actions = this
@ -5537,7 +5540,7 @@ impl Project {
})
.collect::<Vec<_>>();
let background = cx.background().clone();
let background = cx.executor().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024);
@ -5560,11 +5563,11 @@ impl Project {
}
})
.collect();
cx.background()
cx.executor()
.spawn(Self::background_search(
unnamed_files,
opened_buffers,
cx.background().clone(),
cx.executor().clone(),
self.fs.clone(),
workers,
query.clone(),
@ -5575,9 +5578,9 @@ impl Project {
.detach();
let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
let background = cx.background().clone();
let background = cx.executor().clone();
let (result_tx, result_rx) = smol::channel::bounded(1024);
cx.background()
cx.executor()
.spawn(async move {
let Ok(buffers) = buffers.await else {
return;
@ -5685,7 +5688,7 @@ impl Project {
async fn background_search(
unnamed_buffers: Vec<Handle<Buffer>>,
opened_buffers: HashMap<Arc<Path>, (Handle<Buffer>, BufferSnapshot)>,
executor: Arc<Background>,
executor: Executor,
fs: Arc<dyn Fs>,
workers: usize,
query: SearchQuery,
@ -6459,7 +6462,7 @@ impl Project {
})
.collect::<Vec<_>>();
cx.background()
cx.executor()
.spawn(async move {
for task_result in future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| {
async move {
@ -6599,7 +6602,7 @@ impl Project {
this.disconnected_from_host(cx);
}
Ok(())
})
})?
}
async fn handle_add_collaborator(
@ -6673,7 +6676,7 @@ impl Project {
});
cx.notify();
Ok(())
})
})?
}
async fn handle_remove_collaborator(
@ -6702,7 +6705,7 @@ impl Project {
cx.emit(Event::CollaboratorLeft(peer_id));
cx.notify();
Ok(())
})
})?
}
async fn handle_update_project(
@ -6717,7 +6720,7 @@ impl Project {
this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
}
Ok(())
})
})?
}
async fn handle_update_worktree(
@ -6735,7 +6738,7 @@ impl Project {
});
}
Ok(())
})
})?
}
async fn handle_update_worktree_settings(
@ -6759,7 +6762,7 @@ impl Project {
});
}
Ok(())
})
})?
}
async fn handle_create_project_entry(
@ -6773,13 +6776,13 @@ impl Project {
this.worktree_for_id(worktree_id, cx)
.ok_or_else(|| anyhow!("worktree not found"))
})?;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id());
let entry = worktree
.update(&mut cx, |worktree, cx| {
let worktree = worktree.as_local_mut().unwrap();
let path = PathBuf::from(envelope.payload.path);
worktree.create_entry(path, envelope.payload.is_directory, cx)
})
})?
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
@ -6794,11 +6797,11 @@ impl Project {
mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this.read_with(&cx, |this, cx| {
let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx)
.ok_or_else(|| anyhow!("worktree not found"))
})?;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
})??;
let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
let entry = worktree
.update(&mut cx, |worktree, cx| {
let new_path = PathBuf::from(envelope.payload.new_path);
@ -6807,7 +6810,7 @@ impl Project {
.unwrap()
.rename_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry"))
})?
})??
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
@ -6822,11 +6825,11 @@ impl Project {
mut cx: AsyncAppContext,
) -> Result<proto::ProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this.read_with(&cx, |this, cx| {
let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx)
.ok_or_else(|| anyhow!("worktree not found"))
})?;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
})??;
let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
let entry = worktree
.update(&mut cx, |worktree, cx| {
let new_path = PathBuf::from(envelope.payload.new_path);
@ -6835,7 +6838,7 @@ impl Project {
.unwrap()
.copy_entry(entry_id, new_path, cx)
.ok_or_else(|| anyhow!("invalid entry"))
})?
})??
.await?;
Ok(proto::ProjectEntryResponse {
entry: Some((&entry).into()),
@ -6853,10 +6856,10 @@ impl Project {
this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
let worktree = this.read_with(&cx, |this, cx| {
let worktree = this.update(&mut cx, |this, cx| {
this.worktree_for_entry(entry_id, cx)
.ok_or_else(|| anyhow!("worktree not found"))
})?;
})??;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
worktree
.update(&mut cx, |worktree, cx| {
@ -6881,7 +6884,7 @@ impl Project {
) -> Result<proto::ExpandProjectEntryResponse> {
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
let worktree = this
.read_with(&cx, |this, cx| this.worktree_for_entry(entry_id, cx))
.update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
.ok_or_else(|| anyhow!("invalid request"))?;
worktree
.update(&mut cx, |worktree, cx| {
@ -6890,7 +6893,7 @@ impl Project {
.unwrap()
.expand_entry(entry_id, cx)
.ok_or_else(|| anyhow!("invalid entry"))
})?
})??
.await?;
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64;
Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
@ -6923,7 +6926,7 @@ impl Project {
}
}
Ok(())
})
})?
}
async fn handle_start_language_server(
@ -7005,7 +7008,7 @@ impl Project {
}
Ok(())
})
})?
}
async fn handle_update_buffer(
@ -7041,7 +7044,7 @@ impl Project {
}
}
Ok(proto::Ack {})
})
})?
}
async fn handle_create_buffer_for_peer(
@ -7101,7 +7104,7 @@ impl Project {
}
Ok(())
})
})?
}
async fn handle_update_diff_base(
@ -7127,7 +7130,7 @@ impl Project {
buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
}
Ok(())
})
})?
}
async fn handle_update_buffer_file(
@ -7162,7 +7165,7 @@ impl Project {
this.detect_language_for_buffer(&buffer, cx);
}
Ok(())
})
})?
}
async fn handle_save_buffer(
@ -7180,17 +7183,17 @@ impl Project {
.and_then(|buffer| buffer.upgrade())
.ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
anyhow::Ok((project_id, buffer))
})?;
})??;
buffer
.update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&envelope.payload.version))
})
})?
.await?;
let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
.await?;
Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
project_id,
buffer_id,
version: serialize_version(buffer.saved_version()),
@ -7198,7 +7201,7 @@ impl Project {
fingerprint: language2::proto::serialize_fingerprint(
buffer.saved_version_fingerprint(),
),
}))
})?)
}
async fn handle_reload_buffers(
@ -7219,12 +7222,12 @@ impl Project {
);
}
Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
})?;
})??;
let project_transaction = reload.await?;
let project_transaction = this.update(&mut cx, |this, cx| {
this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
});
})?;
Ok(proto::ReloadBuffersResponse {
transaction: Some(project_transaction),
})
@ -7298,7 +7301,7 @@ impl Project {
})
.log_err();
cx.background()
cx.executor()
.spawn(
async move {
let operations = operations.await;
@ -7917,7 +7920,7 @@ impl Project {
// Any incomplete buffers have open requests waiting. Request that the host sends
// creates these buffers for us again to unblock any waiting futures.
for id in incomplete_buffer_ids {
cx.background()
cx.executor()
.spawn(client.request(proto::OpenBufferById { project_id, id }))
.detach();
}
@ -8082,7 +8085,7 @@ impl Project {
});
}
Ok(())
})
})?
}
async fn handle_buffer_reloaded(
@ -8116,7 +8119,7 @@ impl Project {
if let Some(buffer) = buffer {
buffer.update(cx, |buffer, cx| {
buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
});
})?;
}
Ok(())
})
@ -8132,7 +8135,7 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
cx.background().spawn(async move {
cx.executor().spawn(async move {
let snapshot = snapshot?;
let mut lsp_edits = lsp_edits
.into_iter()
@ -8375,9 +8378,9 @@ impl Project {
return Task::ready(None);
};
cx.spawn(|this, mut cx| async move {
let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs));
let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs))?;
let prettier_dir = match cx
.background()
.executor()
.spawn(Prettier::locate(
worktree_path.zip(buffer_path).map(
|(worktree_root_path, starting_path)| LocateStart {
@ -8535,7 +8538,7 @@ impl Project {
.cloned();
let fs = Arc::clone(&self.fs);
cx.background()
cx.executor()
.spawn(async move {
let prettier_wrapper_path = default_prettier_dir.join(PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
@ -8583,15 +8586,15 @@ fn subscribe_for_copilot_events(
cx.subscribe(
copilot,
|project, copilot, copilot_event, cx| match copilot_event {
copilot::Event::CopilotLanguageServerStarted => {
copilot2::Event::CopilotLanguageServerStarted => {
match copilot.read(cx).language_server() {
Some((name, copilot_server)) => {
// Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
if !copilot_server.has_notification_handler::<copilot2::request::LogMessage>() {
let new_server_id = copilot_server.server_id();
let weak_project = cx.weak_handle();
let copilot_log_subscription = copilot_server
.on_notification::<copilot::request::LogMessage, _>(
.on_notification::<copilot2::request::LogMessage, _>(
move |params, mut cx| {
if let Some(project) = weak_project.upgrade(&mut cx) {
project.update(&mut cx, |_, cx| {

View File

@ -2,7 +2,7 @@ use crate::{
copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
};
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context, Result};
use anyhow::{anyhow, Context as _, Result};
use client2::{proto, Client};
use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque};
@ -21,7 +21,9 @@ use futures::{
};
use fuzzy2::CharBag;
use git::{DOT_GIT, GITIGNORE};
use gpui2::{AppContext, AsyncAppContext, EventEmitter, Executor, Handle, ModelContext, Task};
use gpui2::{
AppContext, AsyncAppContext, Context, EventEmitter, Executor, Handle, ModelContext, Task,
};
use language2::{
proto::{
deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
@ -299,7 +301,7 @@ impl Worktree {
.await
.context("failed to stat worktree path")?;
Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
cx.entity(move |cx: &mut ModelContext<Worktree>| {
let root_name = abs_path
.file_name()
.map_or(String::new(), |f| f.to_string_lossy().to_string());
@ -308,7 +310,7 @@ impl Worktree {
ignores_by_parent_abs_path: Default::default(),
git_repositories: Default::default(),
snapshot: Snapshot {
id: WorktreeId::from_usize(cx.model_id()),
id: WorktreeId::from_usize(cx.entity_id()),
abs_path: abs_path.clone(),
root_name: root_name.clone(),
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
@ -336,8 +338,8 @@ impl Worktree {
let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
cx.spawn_weak(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
cx.spawn(|this, mut cx| async move {
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
this.update(&mut cx, |this, cx| {
let this = this.as_local_mut().unwrap();
match state {
@ -361,10 +363,10 @@ impl Worktree {
})
.detach();
let background_scanner_task = cx.background().spawn({
let background_scanner_task = cx.executor().spawn({
let fs = fs.clone();
let snapshot = snapshot.clone();
let background = cx.background().clone();
let background = cx.executor().clone();
async move {
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
BackgroundScanner::new(
@ -394,10 +396,9 @@ impl Worktree {
fs,
visible,
})
}))
})
}
// abcdefghi
pub fn remote(
project_remote_id: u64,
replica_id: ReplicaId,
@ -426,7 +427,7 @@ impl Worktree {
let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
cx.background()
cx.executor()
.spawn({
let background_snapshot = background_snapshot.clone();
async move {
@ -442,27 +443,24 @@ impl Worktree {
})
.detach();
cx.spawn_weak(|this, mut cx| async move {
cx.spawn(|this, mut cx| async move {
while (snapshot_updated_rx.recv().await).is_some() {
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
let this = this.as_remote_mut().unwrap();
this.snapshot = this.background_snapshot.lock().clone();
cx.emit(Event::UpdatedEntries(Arc::from([])));
cx.notify();
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
if this.observed_snapshot(*scan_id) {
let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
let _ = tx.send(());
} else {
break;
}
this.update(&mut cx, |this, cx| {
let this = this.as_remote_mut().unwrap();
this.snapshot = this.background_snapshot.lock().clone();
cx.emit(Event::UpdatedEntries(Arc::from([])));
cx.notify();
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
if this.observed_snapshot(*scan_id) {
let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
let _ = tx.send(());
} else {
break;
}
});
} else {
break;
}
}
})?;
}
anyhow::Ok(())
})
.detach();
@ -598,13 +596,13 @@ impl LocalWorktree {
let path = Arc::from(path);
cx.spawn(move |this, mut cx| async move {
let (file, contents, diff_base) = this
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))?
.await?;
let text_buffer = cx
.background()
.executor()
.spawn(async move { text::Buffer::new(0, id, contents) })
.await;
Ok(cx.add_model(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file)))))
cx.entity(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file))))
})
}
@ -878,18 +876,18 @@ impl LocalWorktree {
let fs = self.fs.clone();
let entry = self.refresh_entry(path.clone(), None, cx);
cx.spawn(|this, cx| async move {
cx.spawn(|this, mut cx| async move {
let text = fs.load(&abs_path).await?;
let entry = entry.await?;
let mut index_task = None;
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) {
let repo_path = repo.work_directory.relativize(&snapshot, &path).unwrap();
if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) {
let repo = repo.repo_ptr.clone();
index_task = Some(
cx.background()
cx.executor()
.spawn(async move { repo.lock().load_index_text(&repo_path) }),
);
}
@ -901,10 +899,13 @@ impl LocalWorktree {
None
};
let worktree = this
.upgrade()
.ok_or_else(|| anyhow!("worktree was dropped"))?;
Ok((
File {
entry_id: entry.id,
worktree: this,
worktree,
path: entry.path,
mtime: entry.mtime,
is_local: true,
@ -923,7 +924,6 @@ impl LocalWorktree {
has_changed_file: bool,
cx: &mut ModelContext<Worktree>,
) -> Task<Result<()>> {
let handle = cx.handle();
let buffer = buffer_handle.read(cx);
let rpc = self.client.clone();
@ -935,13 +935,14 @@ impl LocalWorktree {
let version = buffer.version();
let save = self.write_file(path, text, buffer.line_ending(), cx);
cx.as_mut().spawn(|mut cx| async move {
cx.spawn(|this, mut cx| async move {
let entry = save.await?;
let this = this.upgrade().context("worktree dropped")?;
if has_changed_file {
let new_file = Arc::new(File {
entry_id: entry.id,
worktree: handle,
worktree: this,
path: entry.path,
mtime: entry.mtime,
is_local: true,
@ -1005,7 +1006,7 @@ impl LocalWorktree {
let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let write = cx.background().spawn(async move {
let write = cx.executor().spawn(async move {
if is_dir {
fs.create_dir(&abs_path).await
} else {
@ -1035,7 +1036,7 @@ impl LocalWorktree {
this.as_local_mut().unwrap().refresh_entry(path, None, cx),
refreshes,
)
});
})?;
for refresh in refreshes {
refresh.await.log_err();
}
@ -1055,14 +1056,14 @@ impl LocalWorktree {
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let write = cx
.background()
.executor()
.spawn(async move { fs.save(&abs_path, &text, line_ending).await });
cx.spawn(|this, mut cx| async move {
write.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut().unwrap().refresh_entry(path, None, cx)
})
})?
.await
})
}
@ -1076,7 +1077,7 @@ impl LocalWorktree {
let abs_path = self.absolutize(&entry.path);
let fs = self.fs.clone();
let delete = cx.background().spawn(async move {
let delete = cx.executor().spawn(async move {
if entry.is_file() {
fs.remove_file(&abs_path, Default::default()).await?;
} else {
@ -1098,7 +1099,7 @@ impl LocalWorktree {
this.as_local_mut()
.unwrap()
.refresh_entries_for_paths(vec![path])
})
})?
.recv()
.await;
Ok(())
@ -1116,7 +1117,7 @@ impl LocalWorktree {
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
let fs = self.fs.clone();
let rename = cx.background().spawn(async move {
let rename = cx.executor().spawn(async move {
fs.rename(&abs_old_path, &abs_new_path, Default::default())
.await
});
@ -1127,7 +1128,7 @@ impl LocalWorktree {
this.as_local_mut()
.unwrap()
.refresh_entry(new_path.clone(), Some(old_path), cx)
})
})?
.await
}))
}
@ -1143,7 +1144,7 @@ impl LocalWorktree {
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
let fs = self.fs.clone();
let copy = cx.background().spawn(async move {
let copy = cx.executor().spawn(async move {
copy_recursive(
fs.as_ref(),
&abs_old_path,
@ -1159,7 +1160,7 @@ impl LocalWorktree {
this.as_local_mut()
.unwrap()
.refresh_entry(new_path.clone(), None, cx)
})
})?
.await
}))
}
@ -1171,7 +1172,7 @@ impl LocalWorktree {
) -> Option<Task<Result<()>>> {
let path = self.entry_for_id(entry_id)?.path.clone();
let mut refresh = self.refresh_entries_for_paths(vec![path]);
Some(cx.background().spawn(async move {
Some(cx.executor().spawn(async move {
refresh.next().await;
Ok(())
}))
@ -1204,15 +1205,13 @@ impl LocalWorktree {
vec![path.clone()]
};
let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn_weak(move |this, mut cx| async move {
cx.spawn(move |this, mut cx| async move {
refresh.recv().await;
this.upgrade(&cx)
.ok_or_else(|| anyhow!("worktree was dropped"))?
.update(&mut cx, |this, _| {
this.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
})
this.update(&mut cx, |this, _| {
this.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
})?
})
}
@ -1246,8 +1245,8 @@ impl LocalWorktree {
.unbounded_send((self.snapshot(), Arc::from([]), Arc::from([])))
.ok();
let worktree_id = cx.model_id() as u64;
let _maintain_remote_snapshot = cx.background().spawn(async move {
let worktree_id = cx.entity_id().as_u64();
let _maintain_remote_snapshot = cx.executor().spawn(async move {
let mut is_first = true;
while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
let update;
@ -1294,7 +1293,7 @@ impl LocalWorktree {
for (&server_id, summary) in summaries {
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
project_id,
worktree_id: cx.model_id() as u64,
worktree_id: cx.entity_id().as_u64(),
summary: Some(summary.to_proto(server_id, &path)),
}) {
return Task::ready(Err(e));
@ -1305,7 +1304,7 @@ impl LocalWorktree {
let rx = self.observe_updates(project_id, cx, move |update| {
client.request(update).map(|result| result.is_ok())
});
cx.foreground()
cx.executor()
.spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
}
@ -1339,7 +1338,7 @@ impl RemoteWorktree {
let version = buffer.version();
let rpc = self.client.clone();
let project_id = self.project_id;
cx.as_mut().spawn(|mut cx| async move {
cx.spawn(|_, mut cx| async move {
let response = rpc
.request(proto::SaveBuffer {
project_id,
@ -1356,7 +1355,7 @@ impl RemoteWorktree {
buffer_handle.update(&mut cx, |buffer, cx| {
buffer.did_save(version.clone(), fingerprint, mtime, cx);
});
})?;
Ok(())
})
@ -1436,7 +1435,7 @@ impl RemoteWorktree {
let entry = snapshot.insert_entry(entry);
worktree.snapshot = snapshot.clone();
entry
})
})?
})
}
@ -2634,7 +2633,7 @@ impl language2::File for File {
}
fn worktree_id(&self) -> usize {
self.worktree.id()
self.worktree.entity_id().as_u64() as usize
}
fn is_deleted(&self) -> bool {
@ -2647,7 +2646,7 @@ impl language2::File for File {
fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File {
worktree_id: self.worktree.id() as u64,
worktree_id: self.worktree.entity_id().as_u64(),
entry_id: self.entry_id.to_proto(),
path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()),
@ -2670,8 +2669,7 @@ impl language2::LocalFile for File {
let worktree = self.worktree.read(cx).as_local().unwrap();
let abs_path = worktree.absolutize(&self.path);
let fs = worktree.fs.clone();
cx.background()
.spawn(async move { fs.load(&abs_path).await })
cx.executor().spawn(async move { fs.load(&abs_path).await })
}
fn buffer_reloaded(

View File

@ -13,6 +13,7 @@ use fs::RealFs;
use futures::{channel::mpsc, SinkExt, StreamExt};
use gpui2::{App, AppContext, AssetSource, AsyncAppContext, SemanticVersion, Task};
use isahc::{prelude::Configurable, Request};
use language2::LanguageRegistry;
use log::LevelFilter;
use parking_lot::Mutex;
@ -73,7 +74,7 @@ fn main() {
let _user_keymap_file_rx =
watch_config_file(&app.executor(), fs.clone(), paths::KEYMAP.clone());
let _login_shell_env_loaded = if stdout_is_a_pty() {
let login_shell_env_loaded = if stdout_is_a_pty() {
Task::ready(())
} else {
app.executor().spawn(async {
@ -114,7 +115,7 @@ fn main() {
// languages.set_executor(cx.background().clone());
// languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
// let languages = Arc::new(languages);
// let node_runtime = RealNodeRuntime::new(http.clone());
let node_runtime = RealNodeRuntime::new(http.clone());
// languages::init(languages.clone(), node_runtime.clone(), cx);
// let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));