mirror of
https://github.com/zed-industries/zed.git
synced 2025-01-06 11:06:58 +03:00
Merge branch 'main' into terminal-element
This commit is contained in:
commit
8d57d6ca6f
117
Cargo.lock
generated
117
Cargo.lock
generated
@ -1222,7 +1222,6 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-broadcast",
|
||||
"async-trait",
|
||||
"audio2",
|
||||
"client2",
|
||||
"collections",
|
||||
@ -1242,9 +1241,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings2",
|
||||
"smallvec",
|
||||
"ui2",
|
||||
"util",
|
||||
"workspace2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2115,7 +2112,7 @@ dependencies = [
|
||||
"lsp2",
|
||||
"node_runtime",
|
||||
"parking_lot 0.11.2",
|
||||
"rpc",
|
||||
"rpc2",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"settings2",
|
||||
@ -2143,6 +2140,25 @@ dependencies = [
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "copilot_button2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"copilot2",
|
||||
"editor2",
|
||||
"fs2",
|
||||
"futures 0.3.28",
|
||||
"gpui2",
|
||||
"language2",
|
||||
"settings2",
|
||||
"smol",
|
||||
"theme2",
|
||||
"util",
|
||||
"workspace2",
|
||||
"zed_actions2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.9.3"
|
||||
@ -4791,6 +4807,24 @@ dependencies = [
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "language_selector2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"editor2",
|
||||
"fuzzy2",
|
||||
"gpui2",
|
||||
"language2",
|
||||
"picker2",
|
||||
"project2",
|
||||
"settings2",
|
||||
"theme2",
|
||||
"ui2",
|
||||
"util",
|
||||
"workspace2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "language_tools"
|
||||
version = "0.1.0"
|
||||
@ -6127,6 +6161,26 @@ dependencies = [
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "outline2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"editor2",
|
||||
"fuzzy2",
|
||||
"gpui2",
|
||||
"language2",
|
||||
"ordered-float 2.10.0",
|
||||
"picker2",
|
||||
"postage",
|
||||
"settings2",
|
||||
"smol",
|
||||
"text2",
|
||||
"theme2",
|
||||
"ui2",
|
||||
"util",
|
||||
"workspace2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "overload"
|
||||
version = "0.1.1"
|
||||
@ -8178,6 +8232,57 @@ dependencies = [
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semantic_index2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ai2",
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"client2",
|
||||
"collections",
|
||||
"ctor",
|
||||
"env_logger 0.9.3",
|
||||
"futures 0.3.28",
|
||||
"globset",
|
||||
"gpui2",
|
||||
"language2",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"ndarray",
|
||||
"node_runtime",
|
||||
"ordered-float 2.10.0",
|
||||
"parking_lot 0.11.2",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"project2",
|
||||
"rand 0.8.5",
|
||||
"rpc2",
|
||||
"rusqlite",
|
||||
"rust-embed",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings2",
|
||||
"sha1",
|
||||
"smol",
|
||||
"tempdir",
|
||||
"tiktoken-rs",
|
||||
"tree-sitter",
|
||||
"tree-sitter-cpp",
|
||||
"tree-sitter-elixir",
|
||||
"tree-sitter-json 0.20.0",
|
||||
"tree-sitter-lua",
|
||||
"tree-sitter-php",
|
||||
"tree-sitter-ruby",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-toml",
|
||||
"tree-sitter-typescript",
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "1.0.18"
|
||||
@ -11477,7 +11582,6 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-recursion 1.0.5",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"call2",
|
||||
"client2",
|
||||
@ -11753,6 +11857,7 @@ dependencies = [
|
||||
"collections",
|
||||
"command_palette2",
|
||||
"copilot2",
|
||||
"copilot_button2",
|
||||
"ctor",
|
||||
"db2",
|
||||
"diagnostics2",
|
||||
@ -11772,6 +11877,7 @@ dependencies = [
|
||||
"isahc",
|
||||
"journal2",
|
||||
"language2",
|
||||
"language_selector2",
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"log",
|
||||
@ -11779,6 +11885,7 @@ dependencies = [
|
||||
"menu2",
|
||||
"node_runtime",
|
||||
"num_cpus",
|
||||
"outline2",
|
||||
"parking_lot 0.11.2",
|
||||
"postage",
|
||||
"project2",
|
||||
|
@ -61,6 +61,7 @@ members = [
|
||||
"crates/language",
|
||||
"crates/language2",
|
||||
"crates/language_selector",
|
||||
"crates/language_selector2",
|
||||
"crates/language_tools",
|
||||
"crates/live_kit_client",
|
||||
"crates/live_kit_server",
|
||||
@ -75,6 +76,7 @@ members = [
|
||||
"crates/notifications",
|
||||
"crates/notifications2",
|
||||
"crates/outline",
|
||||
"crates/outline2",
|
||||
"crates/picker",
|
||||
"crates/picker2",
|
||||
"crates/plugin",
|
||||
@ -93,6 +95,8 @@ members = [
|
||||
"crates/rpc2",
|
||||
"crates/search",
|
||||
"crates/search2",
|
||||
"crates/semantic_index",
|
||||
"crates/semantic_index2",
|
||||
"crates/settings",
|
||||
"crates/settings2",
|
||||
"crates/snippet",
|
||||
@ -112,7 +116,6 @@ members = [
|
||||
"crates/theme_selector2",
|
||||
"crates/ui2",
|
||||
"crates/util",
|
||||
"crates/semantic_index",
|
||||
"crates/story",
|
||||
"crates/vim",
|
||||
"crates/vcs_menu",
|
||||
|
1
assets/icons/copy.svg
Normal file
1
assets/icons/copy.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-copy"><rect width="14" height="14" x="8" y="8" rx="2" ry="2"/><path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2"/></svg>
|
After Width: | Height: | Size: 338 B |
@ -7,7 +7,7 @@ pub enum ProviderCredential {
|
||||
NotNeeded,
|
||||
}
|
||||
|
||||
pub trait CredentialProvider {
|
||||
pub trait CredentialProvider: Send + Sync {
|
||||
fn has_credentials(&self) -> bool;
|
||||
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
|
||||
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);
|
||||
|
@ -35,7 +35,7 @@ pub struct OpenAIEmbeddingProvider {
|
||||
model: OpenAILanguageModel,
|
||||
credential: Arc<RwLock<ProviderCredential>>,
|
||||
pub client: Arc<dyn HttpClient>,
|
||||
pub executor: Arc<BackgroundExecutor>,
|
||||
pub executor: BackgroundExecutor,
|
||||
rate_limit_count_rx: watch::Receiver<Option<Instant>>,
|
||||
rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>,
|
||||
}
|
||||
@ -66,7 +66,7 @@ struct OpenAIEmbeddingUsage {
|
||||
}
|
||||
|
||||
impl OpenAIEmbeddingProvider {
|
||||
pub fn new(client: Arc<dyn HttpClient>, executor: Arc<BackgroundExecutor>) -> Self {
|
||||
pub fn new(client: Arc<dyn HttpClient>, executor: BackgroundExecutor) -> Self {
|
||||
let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
|
||||
let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));
|
||||
|
||||
|
@ -31,9 +31,7 @@ media = { path = "../media" }
|
||||
project = { package = "project2", path = "../project2" }
|
||||
settings = { package = "settings2", path = "../settings2" }
|
||||
util = { path = "../util" }
|
||||
ui = {package = "ui2", path = "../ui2"}
|
||||
workspace = {package = "workspace2", path = "../workspace2"}
|
||||
async-trait.workspace = true
|
||||
|
||||
anyhow.workspace = true
|
||||
async-broadcast = "0.4"
|
||||
futures.workspace = true
|
||||
|
@ -1,32 +1,25 @@
|
||||
pub mod call_settings;
|
||||
pub mod participant;
|
||||
pub mod room;
|
||||
mod shared_screen;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use audio::Audio;
|
||||
use call_settings::CallSettings;
|
||||
use client::{
|
||||
proto::{self, PeerId},
|
||||
Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE,
|
||||
};
|
||||
use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
|
||||
use collections::HashSet;
|
||||
use futures::{channel::oneshot, future::Shared, Future, FutureExt};
|
||||
use gpui::{
|
||||
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, PromptLevel,
|
||||
Subscription, Task, View, ViewContext, VisualContext, WeakModel, WindowHandle,
|
||||
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
|
||||
WeakModel,
|
||||
};
|
||||
pub use participant::ParticipantLocation;
|
||||
use postage::watch;
|
||||
use project::Project;
|
||||
use room::Event;
|
||||
pub use room::Room;
|
||||
use settings::Settings;
|
||||
use shared_screen::SharedScreen;
|
||||
use std::sync::Arc;
|
||||
use util::ResultExt;
|
||||
use workspace::{item::ItemHandle, CallHandler, Pane, Workspace};
|
||||
|
||||
pub use participant::ParticipantLocation;
|
||||
pub use room::Room;
|
||||
|
||||
pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
|
||||
CallSettings::register(cx);
|
||||
@ -334,55 +327,12 @@ impl ActiveCall {
|
||||
pub fn join_channel(
|
||||
&mut self,
|
||||
channel_id: u64,
|
||||
requesting_window: Option<WindowHandle<Workspace>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Option<Model<Room>>>> {
|
||||
if let Some(room) = self.room().cloned() {
|
||||
if room.read(cx).channel_id() == Some(channel_id) {
|
||||
return cx.spawn(|_, _| async move {
|
||||
todo!();
|
||||
// let future = room.update(&mut cx, |room, cx| {
|
||||
// room.most_active_project(cx).map(|(host, project)| {
|
||||
// room.join_project(project, host, app_state.clone(), cx)
|
||||
// })
|
||||
// })
|
||||
|
||||
// if let Some(future) = future {
|
||||
// future.await?;
|
||||
// }
|
||||
|
||||
// Ok(Some(room))
|
||||
});
|
||||
}
|
||||
|
||||
let should_prompt = room.update(cx, |room, _| {
|
||||
room.channel_id().is_some()
|
||||
&& room.is_sharing_project()
|
||||
&& room.remote_participants().len() > 0
|
||||
});
|
||||
if should_prompt && requesting_window.is_some() {
|
||||
return cx.spawn(|this, mut cx| async move {
|
||||
let answer = requesting_window.unwrap().update(&mut cx, |_, cx| {
|
||||
cx.prompt(
|
||||
PromptLevel::Warning,
|
||||
"Leaving this call will unshare your current project.\nDo you want to switch channels?",
|
||||
&["Yes, Join Channel", "Cancel"],
|
||||
)
|
||||
})?;
|
||||
if answer.await? == 1 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
room.update(&mut cx, |room, cx| room.clear_state(cx))?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.join_channel(channel_id, requesting_window, cx)
|
||||
})?
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
if room.read(cx).channel_id().is_some() {
|
||||
return Task::ready(Ok(Some(room)));
|
||||
} else {
|
||||
room.update(cx, |room, cx| room.clear_state(cx));
|
||||
}
|
||||
}
|
||||
@ -555,197 +505,6 @@ pub fn report_call_event_for_channel(
|
||||
)
|
||||
}
|
||||
|
||||
pub struct Call {
|
||||
active_call: Option<(Model<ActiveCall>, Vec<Subscription>)>,
|
||||
}
|
||||
|
||||
impl Call {
|
||||
pub fn new(cx: &mut ViewContext<'_, Workspace>) -> Box<dyn CallHandler> {
|
||||
let mut active_call = None;
|
||||
if cx.has_global::<Model<ActiveCall>>() {
|
||||
let call = cx.global::<Model<ActiveCall>>().clone();
|
||||
let subscriptions = vec![cx.subscribe(&call, Self::on_active_call_event)];
|
||||
active_call = Some((call, subscriptions));
|
||||
}
|
||||
Box::new(Self { active_call })
|
||||
}
|
||||
fn on_active_call_event(
|
||||
workspace: &mut Workspace,
|
||||
_: Model<ActiveCall>,
|
||||
event: &room::Event,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) {
|
||||
match event {
|
||||
room::Event::ParticipantLocationChanged { participant_id }
|
||||
| room::Event::RemoteVideoTracksChanged { participant_id } => {
|
||||
workspace.leader_updated(*participant_id, cx);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl CallHandler for Call {
|
||||
fn peer_state(
|
||||
&mut self,
|
||||
leader_id: PeerId,
|
||||
project: &Model<Project>,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) -> Option<(bool, bool)> {
|
||||
let (call, _) = self.active_call.as_ref()?;
|
||||
let room = call.read(cx).room()?.read(cx);
|
||||
let participant = room.remote_participant_for_peer_id(leader_id)?;
|
||||
|
||||
let leader_in_this_app;
|
||||
let leader_in_this_project;
|
||||
match participant.location {
|
||||
ParticipantLocation::SharedProject { project_id } => {
|
||||
leader_in_this_app = true;
|
||||
leader_in_this_project = Some(project_id) == project.read(cx).remote_id();
|
||||
}
|
||||
ParticipantLocation::UnsharedProject => {
|
||||
leader_in_this_app = true;
|
||||
leader_in_this_project = false;
|
||||
}
|
||||
ParticipantLocation::External => {
|
||||
leader_in_this_app = false;
|
||||
leader_in_this_project = false;
|
||||
}
|
||||
};
|
||||
|
||||
Some((leader_in_this_project, leader_in_this_app))
|
||||
}
|
||||
|
||||
fn shared_screen_for_peer(
|
||||
&self,
|
||||
peer_id: PeerId,
|
||||
pane: &View<Pane>,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) -> Option<Box<dyn ItemHandle>> {
|
||||
let (call, _) = self.active_call.as_ref()?;
|
||||
let room = call.read(cx).room()?.read(cx);
|
||||
let participant = room.remote_participant_for_peer_id(peer_id)?;
|
||||
let track = participant.video_tracks.values().next()?.clone();
|
||||
let user = participant.user.clone();
|
||||
for item in pane.read(cx).items_of_type::<SharedScreen>() {
|
||||
if item.read(cx).peer_id == peer_id {
|
||||
return Some(Box::new(item));
|
||||
}
|
||||
}
|
||||
|
||||
Some(Box::new(cx.build_view(|cx| {
|
||||
SharedScreen::new(&track, peer_id, user.clone(), cx)
|
||||
})))
|
||||
}
|
||||
fn room_id(&self, cx: &AppContext) -> Option<u64> {
|
||||
Some(self.active_call.as_ref()?.0.read(cx).room()?.read(cx).id())
|
||||
}
|
||||
fn hang_up(&self, cx: &mut AppContext) -> Task<Result<()>> {
|
||||
let Some((call, _)) = self.active_call.as_ref() else {
|
||||
return Task::ready(Err(anyhow!("Cannot exit a call; not in a call")));
|
||||
};
|
||||
|
||||
call.update(cx, |this, cx| this.hang_up(cx))
|
||||
}
|
||||
fn active_project(&self, cx: &AppContext) -> Option<WeakModel<Project>> {
|
||||
ActiveCall::global(cx).read(cx).location().cloned()
|
||||
}
|
||||
fn invite(
|
||||
&mut self,
|
||||
called_user_id: u64,
|
||||
initial_project: Option<Model<Project>>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<()>> {
|
||||
ActiveCall::global(cx).update(cx, |this, cx| {
|
||||
this.invite(called_user_id, initial_project, cx)
|
||||
})
|
||||
}
|
||||
fn remote_participants(&self, cx: &AppContext) -> Option<Vec<(Arc<User>, PeerId)>> {
|
||||
self.active_call
|
||||
.as_ref()
|
||||
.map(|call| {
|
||||
call.0.read(cx).room().map(|room| {
|
||||
room.read(cx)
|
||||
.remote_participants()
|
||||
.iter()
|
||||
.map(|participant| {
|
||||
(participant.1.user.clone(), participant.1.peer_id.clone())
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
fn is_muted(&self, cx: &AppContext) -> Option<bool> {
|
||||
self.active_call
|
||||
.as_ref()
|
||||
.map(|call| {
|
||||
call.0
|
||||
.read(cx)
|
||||
.room()
|
||||
.map(|room| room.read(cx).is_muted(cx))
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
fn toggle_mute(&self, cx: &mut AppContext) {
|
||||
self.active_call.as_ref().map(|call| {
|
||||
call.0.update(cx, |this, cx| {
|
||||
this.room().map(|room| {
|
||||
let room = room.clone();
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
room.update(&mut cx, |this, cx| this.toggle_mute(cx))??
|
||||
.await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
fn toggle_screen_share(&self, cx: &mut AppContext) {
|
||||
self.active_call.as_ref().map(|call| {
|
||||
call.0.update(cx, |this, cx| {
|
||||
this.room().map(|room| {
|
||||
room.update(cx, |this, cx| {
|
||||
if this.is_screen_sharing() {
|
||||
this.unshare_screen(cx).log_err();
|
||||
} else {
|
||||
let t = this.share_screen(cx);
|
||||
cx.spawn(move |_, _| async move {
|
||||
t.await.log_err();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
fn toggle_deafen(&self, cx: &mut AppContext) {
|
||||
self.active_call.as_ref().map(|call| {
|
||||
call.0.update(cx, |this, cx| {
|
||||
this.room().map(|room| {
|
||||
room.update(cx, |this, cx| {
|
||||
this.toggle_deafen(cx).log_err();
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
fn is_deafened(&self, cx: &AppContext) -> Option<bool> {
|
||||
self.active_call
|
||||
.as_ref()
|
||||
.map(|call| {
|
||||
call.0
|
||||
.read(cx)
|
||||
.room()
|
||||
.map(|room| room.read(cx).is_deafened())
|
||||
})
|
||||
.flatten()
|
||||
.flatten()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use gpui::TestAppContext;
|
||||
|
@ -4,7 +4,7 @@ use client::{proto, User};
|
||||
use collections::HashMap;
|
||||
use gpui::WeakModel;
|
||||
pub use live_kit_client::Frame;
|
||||
pub(crate) use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
|
||||
pub use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
|
||||
use project::Project;
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -4,8 +4,10 @@ use collab_ui::notifications::project_shared_notification::ProjectSharedNotifica
|
||||
use editor::{Editor, ExcerptRange, MultiBuffer};
|
||||
use gpui::{executor::Deterministic, geometry::vector::vec2f, TestAppContext, ViewHandle};
|
||||
use live_kit_client::MacOSDisplay;
|
||||
use project::project_settings::ProjectSettings;
|
||||
use rpc::proto::PeerId;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{borrow::Cow, sync::Arc};
|
||||
use workspace::{
|
||||
dock::{test::TestPanel, DockPosition},
|
||||
@ -1602,6 +1604,141 @@ async fn test_following_across_workspaces(
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_following_into_excluded_file(
|
||||
deterministic: Arc<Deterministic>,
|
||||
mut cx_a: &mut TestAppContext,
|
||||
mut cx_b: &mut TestAppContext,
|
||||
) {
|
||||
deterministic.forbid_parking();
|
||||
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
for cx in [&mut cx_a, &mut cx_b] {
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
cx_a.update(editor::init);
|
||||
cx_b.update(editor::init);
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
json!({
|
||||
".git": {
|
||||
"COMMIT_EDITMSG": "write your commit message here",
|
||||
},
|
||||
"1.txt": "one\none\none",
|
||||
"2.txt": "two\ntwo\ntwo",
|
||||
"3.txt": "three\nthree\nthree",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let window_a = client_a.build_workspace(&project_a, cx_a);
|
||||
let workspace_a = window_a.root(cx_a);
|
||||
let peer_id_a = client_a.peer_id().unwrap();
|
||||
let window_b = client_b.build_workspace(&project_b, cx_b);
|
||||
let workspace_b = window_b.root(cx_b);
|
||||
|
||||
// Client A opens editors for a regular file and an excluded file.
|
||||
let editor_for_regular = workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "1.txt"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
let editor_for_excluded_a = workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
// Client A updates their selections in those editors
|
||||
editor_for_regular.update(cx_a, |editor, cx| {
|
||||
editor.handle_input("a", cx);
|
||||
editor.handle_input("b", cx);
|
||||
editor.handle_input("c", cx);
|
||||
editor.select_left(&Default::default(), cx);
|
||||
assert_eq!(editor.selections.ranges(cx), vec![3..2]);
|
||||
});
|
||||
editor_for_excluded_a.update(cx_a, |editor, cx| {
|
||||
editor.select_all(&Default::default(), cx);
|
||||
editor.handle_input("new commit message", cx);
|
||||
editor.select_left(&Default::default(), cx);
|
||||
assert_eq!(editor.selections.ranges(cx), vec![18..17]);
|
||||
});
|
||||
|
||||
// When client B starts following client A, currently visible file is replicated
|
||||
workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.follow(peer_id_a, cx).unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let editor_for_excluded_b = workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
workspace
|
||||
.active_item(cx)
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap()
|
||||
});
|
||||
assert_eq!(
|
||||
cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
|
||||
Some((worktree_id, ".git/COMMIT_EDITMSG").into())
|
||||
);
|
||||
assert_eq!(
|
||||
editor_for_excluded_b.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
|
||||
vec![18..17]
|
||||
);
|
||||
|
||||
// Changes from B to the excluded file are replicated in A's editor
|
||||
editor_for_excluded_b.update(cx_b, |editor, cx| {
|
||||
editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
|
||||
});
|
||||
deterministic.run_until_parked();
|
||||
editor_for_excluded_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"new commit messag\nCo-Authored-By: B <b@b.b>"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn visible_push_notifications(
|
||||
cx: &mut TestAppContext,
|
||||
) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {
|
||||
|
@ -2981,11 +2981,10 @@ async fn test_fs_operations(
|
||||
|
||||
let entry = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "c.txt"), false, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "c.txt"), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
assert_eq!(
|
||||
@ -3010,7 +3009,6 @@ async fn test_fs_operations(
|
||||
.update(cx_b, |project, cx| {
|
||||
project.rename_entry(entry.id, Path::new("d.txt"), cx)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
@ -3034,11 +3032,10 @@ async fn test_fs_operations(
|
||||
|
||||
let dir_entry = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR"), true, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
assert_eq!(
|
||||
@ -3061,25 +3058,19 @@ async fn test_fs_operations(
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR/e.txt"), false, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@ -3120,9 +3111,7 @@ async fn test_fs_operations(
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.copy_entry(entry.id, Path::new("f.txt"), cx)
|
||||
.unwrap()
|
||||
project.copy_entry(entry.id, Path::new("f.txt"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
ensure_project_shared(&project, client, cx).await;
|
||||
project
|
||||
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
|
||||
.unwrap()
|
||||
.await?;
|
||||
}
|
||||
|
||||
|
@ -364,8 +364,7 @@ async fn test_joining_channel_ancestor_member(
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
assert!(active_call_b
|
||||
.update(cx_b, |active_call, cx| active_call
|
||||
.join_channel(sub_id, None, cx))
|
||||
.update(cx_b, |active_call, cx| active_call.join_channel(sub_id, cx))
|
||||
.await
|
||||
.is_ok());
|
||||
}
|
||||
@ -395,9 +394,7 @@ async fn test_channel_room(
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |active_call, cx| {
|
||||
active_call.join_channel(zed_id, None, cx)
|
||||
})
|
||||
.update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -445,9 +442,7 @@ async fn test_channel_room(
|
||||
});
|
||||
|
||||
active_call_b
|
||||
.update(cx_b, |active_call, cx| {
|
||||
active_call.join_channel(zed_id, None, cx)
|
||||
})
|
||||
.update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -564,16 +559,12 @@ async fn test_channel_room(
|
||||
});
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |active_call, cx| {
|
||||
active_call.join_channel(zed_id, None, cx)
|
||||
})
|
||||
.update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
active_call_b
|
||||
.update(cx_b, |active_call, cx| {
|
||||
active_call.join_channel(zed_id, None, cx)
|
||||
})
|
||||
.update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -617,9 +608,7 @@ async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppCo
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |active_call, cx| {
|
||||
active_call.join_channel(zed_id, None, cx)
|
||||
})
|
||||
.update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -638,7 +627,7 @@ async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppCo
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |active_call, cx| {
|
||||
active_call.join_channel(rust_id, None, cx)
|
||||
active_call.join_channel(rust_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@ -804,7 +793,7 @@ async fn test_call_from_channel(
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.join_channel(channel_id, None, cx))
|
||||
.update(cx_a, |call, cx| call.join_channel(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -1297,7 +1286,7 @@ async fn test_guest_access(
|
||||
|
||||
// Non-members should not be allowed to join
|
||||
assert!(active_call_b
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_a, None, cx))
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_a, cx))
|
||||
.await
|
||||
.is_err());
|
||||
|
||||
@ -1319,7 +1308,7 @@ async fn test_guest_access(
|
||||
|
||||
// Client B joins channel A as a guest
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_a, None, cx))
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_a, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -1352,7 +1341,7 @@ async fn test_guest_access(
|
||||
assert_channels_list_shape(client_b.channel_store(), cx_b, &[]);
|
||||
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_b, None, cx))
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_b, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -1383,7 +1372,7 @@ async fn test_invite_access(
|
||||
|
||||
// should not be allowed to join
|
||||
assert!(active_call_b
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_b_id, None, cx))
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
|
||||
.await
|
||||
.is_err());
|
||||
|
||||
@ -1401,7 +1390,7 @@ async fn test_invite_access(
|
||||
.unwrap();
|
||||
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_b_id, None, cx))
|
||||
.update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -4,10 +4,12 @@
|
||||
// use call::ActiveCall;
|
||||
// use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
|
||||
// use editor::{Editor, ExcerptRange, MultiBuffer};
|
||||
// use gpui::{BackgroundExecutor, TestAppContext, View};
|
||||
// use gpui::{point, BackgroundExecutor, TestAppContext, View, VisualTestContext, WindowContext};
|
||||
// use live_kit_client::MacOSDisplay;
|
||||
// use project::project_settings::ProjectSettings;
|
||||
// use rpc::proto::PeerId;
|
||||
// use serde_json::json;
|
||||
// use settings::SettingsStore;
|
||||
// use std::borrow::Cow;
|
||||
// use workspace::{
|
||||
// dock::{test::TestPanel, DockPosition},
|
||||
@ -24,7 +26,7 @@
|
||||
// cx_c: &mut TestAppContext,
|
||||
// cx_d: &mut TestAppContext,
|
||||
// ) {
|
||||
// let mut server = TestServer::start(&executor).await;
|
||||
// let mut server = TestServer::start(executor.clone()).await;
|
||||
// let client_a = server.create_client(cx_a, "user_a").await;
|
||||
// let client_b = server.create_client(cx_b, "user_b").await;
|
||||
// let client_c = server.create_client(cx_c, "user_c").await;
|
||||
@ -71,12 +73,22 @@
|
||||
// .unwrap();
|
||||
|
||||
// let window_a = client_a.build_workspace(&project_a, cx_a);
|
||||
// let workspace_a = window_a.root(cx_a);
|
||||
// let workspace_a = window_a.root(cx_a).unwrap();
|
||||
// let window_b = client_b.build_workspace(&project_b, cx_b);
|
||||
// let workspace_b = window_b.root(cx_b);
|
||||
// let workspace_b = window_b.root(cx_b).unwrap();
|
||||
|
||||
// todo!("could be wrong")
|
||||
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
|
||||
// let cx_a = &mut cx_a;
|
||||
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
|
||||
// let cx_b = &mut cx_b;
|
||||
// let mut cx_c = VisualTestContext::from_window(*window_c, cx_c);
|
||||
// let cx_c = &mut cx_c;
|
||||
// let mut cx_d = VisualTestContext::from_window(*window_d, cx_d);
|
||||
// let cx_d = &mut cx_d;
|
||||
|
||||
// // Client A opens some editors.
|
||||
// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
|
||||
// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
|
||||
// let editor_a1 = workspace_a
|
||||
// .update(cx_a, |workspace, cx| {
|
||||
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
|
||||
@ -132,8 +144,8 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// cx_c.foreground().run_until_parked();
|
||||
// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
// cx_c.executor().run_until_parked();
|
||||
// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
|
||||
// workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
@ -145,19 +157,19 @@
|
||||
// Some((worktree_id, "2.txt").into())
|
||||
// );
|
||||
// assert_eq!(
|
||||
// editor_b2.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
|
||||
// editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
|
||||
// vec![2..1]
|
||||
// );
|
||||
// assert_eq!(
|
||||
// editor_b1.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
|
||||
// editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
|
||||
// vec![3..2]
|
||||
// );
|
||||
|
||||
// cx_c.foreground().run_until_parked();
|
||||
// cx_c.executor().run_until_parked();
|
||||
// let active_call_c = cx_c.read(ActiveCall::global);
|
||||
// let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
// let window_c = client_c.build_workspace(&project_c, cx_c);
|
||||
// let workspace_c = window_c.root(cx_c);
|
||||
// let workspace_c = window_c.root(cx_c).unwrap();
|
||||
// active_call_c
|
||||
// .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
|
||||
// .await
|
||||
@ -172,10 +184,13 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// cx_d.foreground().run_until_parked();
|
||||
// cx_d.executor().run_until_parked();
|
||||
// let active_call_d = cx_d.read(ActiveCall::global);
|
||||
// let project_d = client_d.build_remote_project(project_id, cx_d).await;
|
||||
// let workspace_d = client_d.build_workspace(&project_d, cx_d).root(cx_d);
|
||||
// let workspace_d = client_d
|
||||
// .build_workspace(&project_d, cx_d)
|
||||
// .root(cx_d)
|
||||
// .unwrap();
|
||||
// active_call_d
|
||||
// .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
|
||||
// .await
|
||||
@ -183,7 +198,7 @@
|
||||
// drop(project_d);
|
||||
|
||||
// // All clients see that clients B and C are following client A.
|
||||
// cx_c.foreground().run_until_parked();
|
||||
// cx_c.executor().run_until_parked();
|
||||
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
// assert_eq!(
|
||||
// followers_by_leader(project_id, cx),
|
||||
@ -198,7 +213,7 @@
|
||||
// });
|
||||
|
||||
// // All clients see that clients B is following client A.
|
||||
// cx_c.foreground().run_until_parked();
|
||||
// cx_c.executor().run_until_parked();
|
||||
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
// assert_eq!(
|
||||
// followers_by_leader(project_id, cx),
|
||||
@ -216,7 +231,7 @@
|
||||
// .unwrap();
|
||||
|
||||
// // All clients see that clients B and C are following client A.
|
||||
// cx_c.foreground().run_until_parked();
|
||||
// cx_c.executor().run_until_parked();
|
||||
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
// assert_eq!(
|
||||
// followers_by_leader(project_id, cx),
|
||||
@ -240,7 +255,7 @@
|
||||
// .unwrap();
|
||||
|
||||
// // All clients see that D is following C
|
||||
// cx_d.foreground().run_until_parked();
|
||||
// cx_d.executor().run_until_parked();
|
||||
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
// assert_eq!(
|
||||
// followers_by_leader(project_id, cx),
|
||||
@ -257,7 +272,7 @@
|
||||
// cx_c.drop_last(workspace_c);
|
||||
|
||||
// // Clients A and B see that client B is following A, and client C is not present in the followers.
|
||||
// cx_c.foreground().run_until_parked();
|
||||
// cx_c.executor().run_until_parked();
|
||||
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
|
||||
// assert_eq!(
|
||||
// followers_by_leader(project_id, cx),
|
||||
@ -271,12 +286,15 @@
|
||||
// workspace.activate_item(&editor_a1, cx)
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
|
||||
// workspace_b.update(cx_b, |workspace, cx| {
|
||||
// assert_eq!(
|
||||
// workspace.active_item(cx).unwrap().item_id(),
|
||||
// editor_b1.item_id()
|
||||
// );
|
||||
// });
|
||||
|
||||
// // When client A opens a multibuffer, client B does so as well.
|
||||
// let multibuffer_a = cx_a.add_model(|cx| {
|
||||
// let multibuffer_a = cx_a.build_model(|cx| {
|
||||
// let buffer_a1 = project_a.update(cx, |project, cx| {
|
||||
// project
|
||||
// .get_open_buffer(&(worktree_id, "1.txt").into(), cx)
|
||||
@ -308,12 +326,12 @@
|
||||
// });
|
||||
// let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| {
|
||||
// let editor =
|
||||
// cx.add_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
|
||||
// cx.build_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
|
||||
// workspace.add_item(Box::new(editor.clone()), cx);
|
||||
// editor
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// let multibuffer_editor_b = workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
// let multibuffer_editor_b = workspace_b.update(cx_b, |workspace, cx| {
|
||||
// workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
@ -321,8 +339,8 @@
|
||||
// .unwrap()
|
||||
// });
|
||||
// assert_eq!(
|
||||
// multibuffer_editor_a.read_with(cx_a, |editor, cx| editor.text(cx)),
|
||||
// multibuffer_editor_b.read_with(cx_b, |editor, cx| editor.text(cx)),
|
||||
// multibuffer_editor_a.update(cx_a, |editor, cx| editor.text(cx)),
|
||||
// multibuffer_editor_b.update(cx_b, |editor, cx| editor.text(cx)),
|
||||
// );
|
||||
|
||||
// // When client A navigates back and forth, client B does so as well.
|
||||
@ -333,8 +351,11 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// executor.run_until_parked();
|
||||
// workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
|
||||
// workspace_b.update(cx_b, |workspace, cx| {
|
||||
// assert_eq!(
|
||||
// workspace.active_item(cx).unwrap().item_id(),
|
||||
// editor_b1.item_id()
|
||||
// );
|
||||
// });
|
||||
|
||||
// workspace_a
|
||||
@ -344,8 +365,11 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// executor.run_until_parked();
|
||||
// workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b2.id());
|
||||
// workspace_b.update(cx_b, |workspace, cx| {
|
||||
// assert_eq!(
|
||||
// workspace.active_item(cx).unwrap().item_id(),
|
||||
// editor_b2.item_id()
|
||||
// );
|
||||
// });
|
||||
|
||||
// workspace_a
|
||||
@ -355,8 +379,11 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// executor.run_until_parked();
|
||||
// workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
|
||||
// workspace_b.update(cx_b, |workspace, cx| {
|
||||
// assert_eq!(
|
||||
// workspace.active_item(cx).unwrap().item_id(),
|
||||
// editor_b1.item_id()
|
||||
// );
|
||||
// });
|
||||
|
||||
// // Changes to client A's editor are reflected on client B.
|
||||
@ -364,20 +391,20 @@
|
||||
// editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2]));
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// editor_b1.read_with(cx_b, |editor, cx| {
|
||||
// editor_b1.update(cx_b, |editor, cx| {
|
||||
// assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]);
|
||||
// });
|
||||
|
||||
// editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx));
|
||||
// executor.run_until_parked();
|
||||
// editor_b1.read_with(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
|
||||
// editor_b1.update(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
|
||||
|
||||
// editor_a1.update(cx_a, |editor, cx| {
|
||||
// editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
|
||||
// editor.set_scroll_position(vec2f(0., 100.), cx);
|
||||
// editor.set_scroll_position(point(0., 100.), cx);
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// editor_b1.read_with(cx_b, |editor, cx| {
|
||||
// editor_b1.update(cx_b, |editor, cx| {
|
||||
// assert_eq!(editor.selections.ranges(cx), &[3..3]);
|
||||
// });
|
||||
|
||||
@ -390,11 +417,11 @@
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, cx| workspace
|
||||
// workspace_b.update(cx_b, |workspace, cx| workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
// .id()),
|
||||
// editor_b1.id()
|
||||
// .item_id()),
|
||||
// editor_b1.item_id()
|
||||
// );
|
||||
|
||||
// // Client A starts following client B.
|
||||
@ -405,15 +432,15 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
|
||||
// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
|
||||
// Some(peer_id_b)
|
||||
// );
|
||||
// assert_eq!(
|
||||
// workspace_a.read_with(cx_a, |workspace, cx| workspace
|
||||
// workspace_a.update(cx_a, |workspace, cx| workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
// .id()),
|
||||
// editor_a1.id()
|
||||
// .item_id()),
|
||||
// editor_a1.item_id()
|
||||
// );
|
||||
|
||||
// // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
|
||||
@ -432,7 +459,7 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// executor.run_until_parked();
|
||||
// let shared_screen = workspace_a.read_with(cx_a, |workspace, cx| {
|
||||
// let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
|
||||
// workspace
|
||||
// .active_item(cx)
|
||||
// .expect("no active item")
|
||||
@ -446,8 +473,11 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// executor.run_until_parked();
|
||||
// workspace_a.read_with(cx_a, |workspace, cx| {
|
||||
// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_a1.id())
|
||||
// workspace_a.update(cx_a, |workspace, cx| {
|
||||
// assert_eq!(
|
||||
// workspace.active_item(cx).unwrap().item_id(),
|
||||
// editor_a1.item_id()
|
||||
// )
|
||||
// });
|
||||
|
||||
// // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
|
||||
@ -455,26 +485,26 @@
|
||||
// workspace.activate_item(&multibuffer_editor_b, cx)
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// workspace_a.read_with(cx_a, |workspace, cx| {
|
||||
// workspace_a.update(cx_a, |workspace, cx| {
|
||||
// assert_eq!(
|
||||
// workspace.active_item(cx).unwrap().id(),
|
||||
// multibuffer_editor_a.id()
|
||||
// workspace.active_item(cx).unwrap().item_id(),
|
||||
// multibuffer_editor_a.item_id()
|
||||
// )
|
||||
// });
|
||||
|
||||
// // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
|
||||
// let panel = window_b.add_view(cx_b, |_| TestPanel::new(DockPosition::Left));
|
||||
// let panel = window_b.build_view(cx_b, |_| TestPanel::new(DockPosition::Left));
|
||||
// workspace_b.update(cx_b, |workspace, cx| {
|
||||
// workspace.add_panel(panel, cx);
|
||||
// workspace.toggle_panel_focus::<TestPanel>(cx);
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// assert_eq!(
|
||||
// workspace_a.read_with(cx_a, |workspace, cx| workspace
|
||||
// workspace_a.update(cx_a, |workspace, cx| workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
// .id()),
|
||||
// shared_screen.id()
|
||||
// .item_id()),
|
||||
// shared_screen.item_id()
|
||||
// );
|
||||
|
||||
// // Toggling the focus back to the pane causes client A to return to the multibuffer.
|
||||
@ -482,16 +512,16 @@
|
||||
// workspace.toggle_panel_focus::<TestPanel>(cx);
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// workspace_a.read_with(cx_a, |workspace, cx| {
|
||||
// workspace_a.update(cx_a, |workspace, cx| {
|
||||
// assert_eq!(
|
||||
// workspace.active_item(cx).unwrap().id(),
|
||||
// multibuffer_editor_a.id()
|
||||
// workspace.active_item(cx).unwrap().item_id(),
|
||||
// multibuffer_editor_a.item_id()
|
||||
// )
|
||||
// });
|
||||
|
||||
// // Client B activates an item that doesn't implement following,
|
||||
// // so the previously-opened screen-sharing item gets activated.
|
||||
// let unfollowable_item = window_b.add_view(cx_b, |_| TestItem::new());
|
||||
// let unfollowable_item = window_b.build_view(cx_b, |_| TestItem::new());
|
||||
// workspace_b.update(cx_b, |workspace, cx| {
|
||||
// workspace.active_pane().update(cx, |pane, cx| {
|
||||
// pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
|
||||
@ -499,18 +529,18 @@
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// assert_eq!(
|
||||
// workspace_a.read_with(cx_a, |workspace, cx| workspace
|
||||
// workspace_a.update(cx_a, |workspace, cx| workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
// .id()),
|
||||
// shared_screen.id()
|
||||
// .item_id()),
|
||||
// shared_screen.item_id()
|
||||
// );
|
||||
|
||||
// // Following interrupts when client B disconnects.
|
||||
// client_b.disconnect(&cx_b.to_async());
|
||||
// executor.advance_clock(RECONNECT_TIMEOUT);
|
||||
// assert_eq!(
|
||||
// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
|
||||
// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
|
||||
// None
|
||||
// );
|
||||
// }
|
||||
@ -521,7 +551,7 @@
|
||||
// cx_a: &mut TestAppContext,
|
||||
// cx_b: &mut TestAppContext,
|
||||
// ) {
|
||||
// let mut server = TestServer::start(&executor).await;
|
||||
// let mut server = TestServer::start(executor.clone()).await;
|
||||
// let client_a = server.create_client(cx_a, "user_a").await;
|
||||
// let client_b = server.create_client(cx_b, "user_b").await;
|
||||
// server
|
||||
@ -560,13 +590,19 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
|
||||
// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
|
||||
// let workspace_a = client_a
|
||||
// .build_workspace(&project_a, cx_a)
|
||||
// .root(cx_a)
|
||||
// .unwrap();
|
||||
// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
|
||||
|
||||
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
|
||||
// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
|
||||
// let workspace_b = client_b
|
||||
// .build_workspace(&project_b, cx_b)
|
||||
// .root(cx_b)
|
||||
// .unwrap();
|
||||
// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
|
||||
|
||||
// let client_b_id = project_a.read_with(cx_a, |project, _| {
|
||||
// let client_b_id = project_a.update(cx_a, |project, _| {
|
||||
// project.collaborators().values().next().unwrap().peer_id
|
||||
// });
|
||||
|
||||
@ -584,7 +620,7 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let pane_paths = |pane: &ViewHandle<workspace::Pane>, cx: &mut TestAppContext| {
|
||||
// let pane_paths = |pane: &View<workspace::Pane>, cx: &mut TestAppContext| {
|
||||
// pane.update(cx, |pane, cx| {
|
||||
// pane.items()
|
||||
// .map(|item| {
|
||||
@ -642,7 +678,7 @@
|
||||
// cx_a: &mut TestAppContext,
|
||||
// cx_b: &mut TestAppContext,
|
||||
// ) {
|
||||
// let mut server = TestServer::start(&executor).await;
|
||||
// let mut server = TestServer::start(executor.clone()).await;
|
||||
// let client_a = server.create_client(cx_a, "user_a").await;
|
||||
// let client_b = server.create_client(cx_b, "user_b").await;
|
||||
// server
|
||||
@ -685,7 +721,10 @@
|
||||
// .unwrap();
|
||||
|
||||
// // Client A opens a file.
|
||||
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
|
||||
// let workspace_a = client_a
|
||||
// .build_workspace(&project_a, cx_a)
|
||||
// .root(cx_a)
|
||||
// .unwrap();
|
||||
// workspace_a
|
||||
// .update(cx_a, |workspace, cx| {
|
||||
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
|
||||
@ -696,7 +735,10 @@
|
||||
// .unwrap();
|
||||
|
||||
// // Client B opens a different file.
|
||||
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
|
||||
// let workspace_b = client_b
|
||||
// .build_workspace(&project_b, cx_b)
|
||||
// .root(cx_b)
|
||||
// .unwrap();
|
||||
// workspace_b
|
||||
// .update(cx_b, |workspace, cx| {
|
||||
// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
|
||||
@ -1167,7 +1209,7 @@
|
||||
// cx_b: &mut TestAppContext,
|
||||
// ) {
|
||||
// // 2 clients connect to a server.
|
||||
// let mut server = TestServer::start(&executor).await;
|
||||
// let mut server = TestServer::start(executor.clone()).await;
|
||||
// let client_a = server.create_client(cx_a, "user_a").await;
|
||||
// let client_b = server.create_client(cx_b, "user_b").await;
|
||||
// server
|
||||
@ -1207,8 +1249,17 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// todo!("could be wrong")
|
||||
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
|
||||
// let cx_a = &mut cx_a;
|
||||
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
|
||||
// let cx_b = &mut cx_b;
|
||||
|
||||
// // Client A opens some editors.
|
||||
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
|
||||
// let workspace_a = client_a
|
||||
// .build_workspace(&project_a, cx_a)
|
||||
// .root(cx_a)
|
||||
// .unwrap();
|
||||
// let _editor_a1 = workspace_a
|
||||
// .update(cx_a, |workspace, cx| {
|
||||
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
|
||||
@ -1219,9 +1270,12 @@
|
||||
// .unwrap();
|
||||
|
||||
// // Client B starts following client A.
|
||||
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
|
||||
// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
|
||||
// let leader_id = project_b.read_with(cx_b, |project, _| {
|
||||
// let workspace_b = client_b
|
||||
// .build_workspace(&project_b, cx_b)
|
||||
// .root(cx_b)
|
||||
// .unwrap();
|
||||
// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
|
||||
// let leader_id = project_b.update(cx_b, |project, _| {
|
||||
// project.collaborators().values().next().unwrap().peer_id
|
||||
// });
|
||||
// workspace_b
|
||||
@ -1231,10 +1285,10 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// Some(leader_id)
|
||||
// );
|
||||
// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
|
||||
// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
|
||||
// workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
@ -1245,7 +1299,7 @@
|
||||
// // When client B moves, it automatically stops following client A.
|
||||
// editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx));
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// None
|
||||
// );
|
||||
|
||||
@ -1256,14 +1310,14 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// Some(leader_id)
|
||||
// );
|
||||
|
||||
// // When client B edits, it automatically stops following client A.
|
||||
// editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx));
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// None
|
||||
// );
|
||||
|
||||
@ -1274,16 +1328,16 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// Some(leader_id)
|
||||
// );
|
||||
|
||||
// // When client B scrolls, it automatically stops following client A.
|
||||
// editor_b2.update(cx_b, |editor, cx| {
|
||||
// editor.set_scroll_position(vec2f(0., 3.), cx)
|
||||
// editor.set_scroll_position(point(0., 3.), cx)
|
||||
// });
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// None
|
||||
// );
|
||||
|
||||
@ -1294,7 +1348,7 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// Some(leader_id)
|
||||
// );
|
||||
|
||||
@ -1303,13 +1357,13 @@
|
||||
// workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
|
||||
// });
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// Some(leader_id)
|
||||
// );
|
||||
|
||||
// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// Some(leader_id)
|
||||
// );
|
||||
|
||||
@ -1321,7 +1375,7 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
|
||||
// None
|
||||
// );
|
||||
// }
|
||||
@ -1332,7 +1386,7 @@
|
||||
// cx_a: &mut TestAppContext,
|
||||
// cx_b: &mut TestAppContext,
|
||||
// ) {
|
||||
// let mut server = TestServer::start(&executor).await;
|
||||
// let mut server = TestServer::start(executor.clone()).await;
|
||||
// let client_a = server.create_client(cx_a, "user_a").await;
|
||||
// let client_b = server.create_client(cx_b, "user_b").await;
|
||||
// server
|
||||
@ -1345,20 +1399,26 @@
|
||||
|
||||
// client_a.fs().insert_tree("/a", json!({})).await;
|
||||
// let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
|
||||
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
|
||||
// let workspace_a = client_a
|
||||
// .build_workspace(&project_a, cx_a)
|
||||
// .root(cx_a)
|
||||
// .unwrap();
|
||||
// let project_id = active_call_a
|
||||
// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
|
||||
// let workspace_b = client_b
|
||||
// .build_workspace(&project_b, cx_b)
|
||||
// .root(cx_b)
|
||||
// .unwrap();
|
||||
|
||||
// executor.run_until_parked();
|
||||
// let client_a_id = project_b.read_with(cx_b, |project, _| {
|
||||
// let client_a_id = project_b.update(cx_b, |project, _| {
|
||||
// project.collaborators().values().next().unwrap().peer_id
|
||||
// });
|
||||
// let client_b_id = project_a.read_with(cx_a, |project, _| {
|
||||
// let client_b_id = project_a.update(cx_a, |project, _| {
|
||||
// project.collaborators().values().next().unwrap().peer_id
|
||||
// });
|
||||
|
||||
@ -1370,13 +1430,13 @@
|
||||
// });
|
||||
|
||||
// futures::try_join!(a_follow_b, b_follow_a).unwrap();
|
||||
// workspace_a.read_with(cx_a, |workspace, _| {
|
||||
// workspace_a.update(cx_a, |workspace, _| {
|
||||
// assert_eq!(
|
||||
// workspace.leader_for_pane(workspace.active_pane()),
|
||||
// Some(client_b_id)
|
||||
// );
|
||||
// });
|
||||
// workspace_b.read_with(cx_b, |workspace, _| {
|
||||
// workspace_b.update(cx_b, |workspace, _| {
|
||||
// assert_eq!(
|
||||
// workspace.leader_for_pane(workspace.active_pane()),
|
||||
// Some(client_a_id)
|
||||
@ -1398,7 +1458,7 @@
|
||||
// // b opens a different file in project 2, a follows b
|
||||
// // b opens a different file in project 1, a cannot follow b
|
||||
// // b shares the project, a joins the project and follows b
|
||||
// let mut server = TestServer::start(&executor).await;
|
||||
// let mut server = TestServer::start(executor.clone()).await;
|
||||
// let client_a = server.create_client(cx_a, "user_a").await;
|
||||
// let client_b = server.create_client(cx_b, "user_b").await;
|
||||
// cx_a.update(editor::init);
|
||||
@ -1435,8 +1495,14 @@
|
||||
// let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
|
||||
// let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
|
||||
|
||||
// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
|
||||
// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
|
||||
// let workspace_a = client_a
|
||||
// .build_workspace(&project_a, cx_a)
|
||||
// .root(cx_a)
|
||||
// .unwrap();
|
||||
// let workspace_b = client_b
|
||||
// .build_workspace(&project_b, cx_b)
|
||||
// .root(cx_b)
|
||||
// .unwrap();
|
||||
|
||||
// cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx));
|
||||
// cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx));
|
||||
@ -1455,6 +1521,12 @@
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// todo!("could be wrong")
|
||||
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
|
||||
// let cx_a = &mut cx_a;
|
||||
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
|
||||
// let cx_b = &mut cx_b;
|
||||
|
||||
// workspace_a
|
||||
// .update(cx_a, |workspace, cx| {
|
||||
// workspace.open_path((worktree_id_a, "w.rs"), None, true, cx)
|
||||
@ -1476,11 +1548,12 @@
|
||||
// let workspace_b_project_a = cx_b
|
||||
// .windows()
|
||||
// .iter()
|
||||
// .max_by_key(|window| window.id())
|
||||
// .max_by_key(|window| window.item_id())
|
||||
// .unwrap()
|
||||
// .downcast::<Workspace>()
|
||||
// .unwrap()
|
||||
// .root(cx_b);
|
||||
// .root(cx_b)
|
||||
// .unwrap();
|
||||
|
||||
// // assert that b is following a in project a in w.rs
|
||||
// workspace_b_project_a.update(cx_b, |workspace, cx| {
|
||||
@ -1534,7 +1607,7 @@
|
||||
// workspace.leader_for_pane(workspace.active_pane())
|
||||
// );
|
||||
// let item = workspace.active_pane().read(cx).active_item().unwrap();
|
||||
// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs"));
|
||||
// assert_eq!(item.tab_description(0, cx).unwrap(), "x.rs".into());
|
||||
// });
|
||||
|
||||
// // b moves to y.rs in b's project, a is still following but can't yet see
|
||||
@ -1578,11 +1651,12 @@
|
||||
// let workspace_a_project_b = cx_a
|
||||
// .windows()
|
||||
// .iter()
|
||||
// .max_by_key(|window| window.id())
|
||||
// .max_by_key(|window| window.item_id())
|
||||
// .unwrap()
|
||||
// .downcast::<Workspace>()
|
||||
// .unwrap()
|
||||
// .root(cx_a);
|
||||
// .root(cx_a)
|
||||
// .unwrap();
|
||||
|
||||
// workspace_a_project_b.update(cx_a, |workspace, cx| {
|
||||
// assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id));
|
||||
@ -1596,12 +1670,151 @@
|
||||
// });
|
||||
// }
|
||||
|
||||
// #[gpui::test]
|
||||
// async fn test_following_into_excluded_file(
|
||||
// executor: BackgroundExecutor,
|
||||
// mut cx_a: &mut TestAppContext,
|
||||
// mut cx_b: &mut TestAppContext,
|
||||
// ) {
|
||||
// let mut server = TestServer::start(executor.clone()).await;
|
||||
// let client_a = server.create_client(cx_a, "user_a").await;
|
||||
// let client_b = server.create_client(cx_b, "user_b").await;
|
||||
// for cx in [&mut cx_a, &mut cx_b] {
|
||||
// cx.update(|cx| {
|
||||
// cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||
// store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||
// project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
|
||||
// });
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
// server
|
||||
// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
// .await;
|
||||
// let active_call_a = cx_a.read(ActiveCall::global);
|
||||
// let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
// cx_a.update(editor::init);
|
||||
// cx_b.update(editor::init);
|
||||
|
||||
// client_a
|
||||
// .fs()
|
||||
// .insert_tree(
|
||||
// "/a",
|
||||
// json!({
|
||||
// ".git": {
|
||||
// "COMMIT_EDITMSG": "write your commit message here",
|
||||
// },
|
||||
// "1.txt": "one\none\none",
|
||||
// "2.txt": "two\ntwo\ntwo",
|
||||
// "3.txt": "three\nthree\nthree",
|
||||
// }),
|
||||
// )
|
||||
// .await;
|
||||
// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
// active_call_a
|
||||
// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let project_id = active_call_a
|
||||
// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
// .await
|
||||
// .unwrap();
|
||||
// let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
// active_call_b
|
||||
// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let window_a = client_a.build_workspace(&project_a, cx_a);
|
||||
// let workspace_a = window_a.root(cx_a).unwrap();
|
||||
// let peer_id_a = client_a.peer_id().unwrap();
|
||||
// let window_b = client_b.build_workspace(&project_b, cx_b);
|
||||
// let workspace_b = window_b.root(cx_b).unwrap();
|
||||
|
||||
// todo!("could be wrong")
|
||||
// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
|
||||
// let cx_a = &mut cx_a;
|
||||
// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
|
||||
// let cx_b = &mut cx_b;
|
||||
|
||||
// // Client A opens editors for a regular file and an excluded file.
|
||||
// let editor_for_regular = workspace_a
|
||||
// .update(cx_a, |workspace, cx| {
|
||||
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
|
||||
// })
|
||||
// .await
|
||||
// .unwrap()
|
||||
// .downcast::<Editor>()
|
||||
// .unwrap();
|
||||
// let editor_for_excluded_a = workspace_a
|
||||
// .update(cx_a, |workspace, cx| {
|
||||
// workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
|
||||
// })
|
||||
// .await
|
||||
// .unwrap()
|
||||
// .downcast::<Editor>()
|
||||
// .unwrap();
|
||||
|
||||
// // Client A updates their selections in those editors
|
||||
// editor_for_regular.update(cx_a, |editor, cx| {
|
||||
// editor.handle_input("a", cx);
|
||||
// editor.handle_input("b", cx);
|
||||
// editor.handle_input("c", cx);
|
||||
// editor.select_left(&Default::default(), cx);
|
||||
// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
|
||||
// });
|
||||
// editor_for_excluded_a.update(cx_a, |editor, cx| {
|
||||
// editor.select_all(&Default::default(), cx);
|
||||
// editor.handle_input("new commit message", cx);
|
||||
// editor.select_left(&Default::default(), cx);
|
||||
// assert_eq!(editor.selections.ranges(cx), vec![18..17]);
|
||||
// });
|
||||
|
||||
// // When client B starts following client A, currently visible file is replicated
|
||||
// workspace_b
|
||||
// .update(cx_b, |workspace, cx| {
|
||||
// workspace.follow(peer_id_a, cx).unwrap()
|
||||
// })
|
||||
// .await
|
||||
// .unwrap();
|
||||
|
||||
// let editor_for_excluded_b = workspace_b.update(cx_b, |workspace, cx| {
|
||||
// workspace
|
||||
// .active_item(cx)
|
||||
// .unwrap()
|
||||
// .downcast::<Editor>()
|
||||
// .unwrap()
|
||||
// });
|
||||
// assert_eq!(
|
||||
// cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
|
||||
// Some((worktree_id, ".git/COMMIT_EDITMSG").into())
|
||||
// );
|
||||
// assert_eq!(
|
||||
// editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
|
||||
// vec![18..17]
|
||||
// );
|
||||
|
||||
// // Changes from B to the excluded file are replicated in A's editor
|
||||
// editor_for_excluded_b.update(cx_b, |editor, cx| {
|
||||
// editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
|
||||
// });
|
||||
// executor.run_until_parked();
|
||||
// editor_for_excluded_a.update(cx_a, |editor, cx| {
|
||||
// assert_eq!(
|
||||
// editor.text(cx),
|
||||
// "new commit messag\nCo-Authored-By: B <b@b.b>"
|
||||
// );
|
||||
// });
|
||||
// }
|
||||
|
||||
// fn visible_push_notifications(
|
||||
// cx: &mut TestAppContext,
|
||||
// ) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {
|
||||
// ) -> Vec<gpui::View<ProjectSharedNotification>> {
|
||||
// let mut ret = Vec::new();
|
||||
// for window in cx.windows() {
|
||||
// window.read_with(cx, |window| {
|
||||
// window.update(cx, |window| {
|
||||
// if let Some(handle) = window
|
||||
// .root_view()
|
||||
// .clone()
|
||||
@ -1645,8 +1858,8 @@
|
||||
// })
|
||||
// }
|
||||
|
||||
// fn pane_summaries(workspace: &ViewHandle<Workspace>, cx: &mut TestAppContext) -> Vec<PaneSummary> {
|
||||
// workspace.read_with(cx, |workspace, cx| {
|
||||
// fn pane_summaries(workspace: &View<Workspace>, cx: &mut WindowContext<'_>) -> Vec<PaneSummary> {
|
||||
// workspace.update(cx, |workspace, cx| {
|
||||
// let active_pane = workspace.active_pane();
|
||||
// workspace
|
||||
// .panes()
|
||||
|
@ -510,10 +510,9 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
|
||||
|
||||
// Simultaneously join channel 1 and then channel 2
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx))
|
||||
.update(cx_a, |call, cx| call.join_channel(channel_1, cx))
|
||||
.detach();
|
||||
let join_channel_2 =
|
||||
active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, None, cx));
|
||||
let join_channel_2 = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, cx));
|
||||
|
||||
join_channel_2.await.unwrap();
|
||||
|
||||
@ -539,8 +538,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
|
||||
call.invite(client_c.user_id().unwrap(), None, cx)
|
||||
});
|
||||
|
||||
let join_channel =
|
||||
active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx));
|
||||
let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
|
||||
|
||||
b_invite.await.unwrap();
|
||||
c_invite.await.unwrap();
|
||||
@ -569,8 +567,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
|
||||
.unwrap();
|
||||
|
||||
// Simultaneously join channel 1 and call user B and user C from client A.
|
||||
let join_channel =
|
||||
active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx));
|
||||
let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
|
||||
|
||||
let b_invite = active_call_a.update(cx_a, |call, cx| {
|
||||
call.invite(client_b.user_id().unwrap(), None, cx)
|
||||
@ -2784,11 +2781,10 @@ async fn test_fs_operations(
|
||||
|
||||
let entry = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "c.txt"), false, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "c.txt"), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
@ -2815,8 +2811,8 @@ async fn test_fs_operations(
|
||||
.update(cx_b, |project, cx| {
|
||||
project.rename_entry(entry.id, Path::new("d.txt"), cx)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
@ -2841,11 +2837,10 @@ async fn test_fs_operations(
|
||||
|
||||
let dir_entry = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR"), true, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
@ -2870,27 +2865,24 @@ async fn test_fs_operations(
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR/e.txt"), false, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
|
||||
.unwrap()
|
||||
project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
@ -2931,11 +2923,10 @@ async fn test_fs_operations(
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project
|
||||
.copy_entry(entry.id, Path::new("f.txt"), cx)
|
||||
.unwrap()
|
||||
project.copy_entry(entry.id, Path::new("f.txt"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
worktree_a.read_with(cx_a, |worktree, _| {
|
||||
|
@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
ensure_project_shared(&project, client, cx).await;
|
||||
project
|
||||
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
|
||||
.unwrap()
|
||||
.await?;
|
||||
}
|
||||
|
||||
|
@ -221,7 +221,6 @@ impl TestServer {
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
call_factory: |_| Box::new(workspace::TestCallHandler),
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -3,58 +3,54 @@ use client::{
|
||||
proto::{self, ChannelRole, ChannelVisibility},
|
||||
User, UserId, UserStore,
|
||||
};
|
||||
use context_menu::{ContextMenu, ContextMenuItem};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions,
|
||||
elements::*,
|
||||
platform::{CursorStyle, MouseButton},
|
||||
AppContext, ClipboardItem, Entity, ModelHandle, MouseState, Task, View, ViewContext,
|
||||
ViewHandle,
|
||||
actions, div, AppContext, ClipboardItem, DismissEvent, Div, Entity, EventEmitter,
|
||||
FocusableView, Model, ParentElement, Render, Styled, Task, View, ViewContext, VisualContext,
|
||||
WeakView,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate, PickerEvent};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use std::sync::Arc;
|
||||
use ui::v_stack;
|
||||
use util::TryFutureExt;
|
||||
use workspace::Modal;
|
||||
|
||||
actions!(
|
||||
channel_modal,
|
||||
[
|
||||
SelectNextControl,
|
||||
ToggleMode,
|
||||
ToggleMemberAdmin,
|
||||
RemoveMember
|
||||
]
|
||||
SelectNextControl,
|
||||
ToggleMode,
|
||||
ToggleMemberAdmin,
|
||||
RemoveMember
|
||||
);
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
Picker::<ChannelModalDelegate>::init(cx);
|
||||
cx.add_action(ChannelModal::toggle_mode);
|
||||
cx.add_action(ChannelModal::toggle_member_admin);
|
||||
cx.add_action(ChannelModal::remove_member);
|
||||
cx.add_action(ChannelModal::dismiss);
|
||||
}
|
||||
// pub fn init(cx: &mut AppContext) {
|
||||
// Picker::<ChannelModalDelegate>::init(cx);
|
||||
// cx.add_action(ChannelModal::toggle_mode);
|
||||
// cx.add_action(ChannelModal::toggle_member_admin);
|
||||
// cx.add_action(ChannelModal::remove_member);
|
||||
// cx.add_action(ChannelModal::dismiss);
|
||||
// }
|
||||
|
||||
pub struct ChannelModal {
|
||||
picker: ViewHandle<Picker<ChannelModalDelegate>>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
picker: View<Picker<ChannelModalDelegate>>,
|
||||
channel_store: Model<ChannelStore>,
|
||||
channel_id: ChannelId,
|
||||
has_focus: bool,
|
||||
}
|
||||
|
||||
impl ChannelModal {
|
||||
pub fn new(
|
||||
user_store: ModelHandle<UserStore>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
user_store: Model<UserStore>,
|
||||
channel_store: Model<ChannelStore>,
|
||||
channel_id: ChannelId,
|
||||
mode: Mode,
|
||||
members: Vec<ChannelMembership>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
cx.observe(&channel_store, |_, _, cx| cx.notify()).detach();
|
||||
let picker = cx.add_view(|cx| {
|
||||
let channel_modal = cx.view().downgrade();
|
||||
let picker = cx.build_view(|cx| {
|
||||
Picker::new(
|
||||
ChannelModalDelegate {
|
||||
channel_modal,
|
||||
matching_users: Vec::new(),
|
||||
matching_member_indices: Vec::new(),
|
||||
selected_index: 0,
|
||||
@ -64,20 +60,17 @@ impl ChannelModal {
|
||||
match_candidates: Vec::new(),
|
||||
members,
|
||||
mode,
|
||||
context_menu: cx.add_view(|cx| {
|
||||
let mut menu = ContextMenu::new(cx.view_id(), cx);
|
||||
menu.set_position_mode(OverlayPositionMode::Local);
|
||||
menu
|
||||
}),
|
||||
// context_menu: cx.add_view(|cx| {
|
||||
// let mut menu = ContextMenu::new(cx.view_id(), cx);
|
||||
// menu.set_position_mode(OverlayPositionMode::Local);
|
||||
// menu
|
||||
// }),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.with_theme(|theme| theme.collab_panel.tabbed_modal.picker.clone())
|
||||
});
|
||||
|
||||
cx.subscribe(&picker, |_, _, e, cx| cx.emit(*e)).detach();
|
||||
|
||||
let has_focus = picker.read(cx).has_focus();
|
||||
let has_focus = picker.focus_handle(cx).contains_focused(cx);
|
||||
|
||||
Self {
|
||||
picker,
|
||||
@ -88,7 +81,7 @@ impl ChannelModal {
|
||||
}
|
||||
|
||||
fn toggle_mode(&mut self, _: &ToggleMode, cx: &mut ViewContext<Self>) {
|
||||
let mode = match self.picker.read(cx).delegate().mode {
|
||||
let mode = match self.picker.read(cx).delegate.mode {
|
||||
Mode::ManageMembers => Mode::InviteMembers,
|
||||
Mode::InviteMembers => Mode::ManageMembers,
|
||||
};
|
||||
@ -103,20 +96,20 @@ impl ChannelModal {
|
||||
let mut members = channel_store
|
||||
.update(&mut cx, |channel_store, cx| {
|
||||
channel_store.get_channel_member_details(channel_id, cx)
|
||||
})
|
||||
})?
|
||||
.await?;
|
||||
|
||||
members.sort_by(|a, b| a.sort_key().cmp(&b.sort_key()));
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.picker
|
||||
.update(cx, |picker, _| picker.delegate_mut().members = members);
|
||||
.update(cx, |picker, _| picker.delegate.members = members);
|
||||
})?;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.picker.update(cx, |picker, cx| {
|
||||
let delegate = picker.delegate_mut();
|
||||
let delegate = &mut picker.delegate;
|
||||
delegate.mode = mode;
|
||||
delegate.selected_index = 0;
|
||||
picker.set_query("", cx);
|
||||
@ -131,203 +124,194 @@ impl ChannelModal {
|
||||
|
||||
fn toggle_member_admin(&mut self, _: &ToggleMemberAdmin, cx: &mut ViewContext<Self>) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
picker.delegate_mut().toggle_selected_member_admin(cx);
|
||||
picker.delegate.toggle_selected_member_admin(cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn remove_member(&mut self, _: &RemoveMember, cx: &mut ViewContext<Self>) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
picker.delegate_mut().remove_selected_member(cx);
|
||||
picker.delegate.remove_selected_member(cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn dismiss(&mut self, _: &menu::Cancel, cx: &mut ViewContext<Self>) {
|
||||
cx.emit(PickerEvent::Dismiss);
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for ChannelModal {
|
||||
type Event = PickerEvent;
|
||||
}
|
||||
impl EventEmitter<DismissEvent> for ChannelModal {}
|
||||
|
||||
impl View for ChannelModal {
|
||||
fn ui_name() -> &'static str {
|
||||
"ChannelModal"
|
||||
}
|
||||
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
|
||||
let theme = &theme::current(cx).collab_panel.tabbed_modal;
|
||||
|
||||
let mode = self.picker.read(cx).delegate().mode;
|
||||
let Some(channel) = self.channel_store.read(cx).channel_for_id(self.channel_id) else {
|
||||
return Empty::new().into_any();
|
||||
};
|
||||
|
||||
enum InviteMembers {}
|
||||
enum ManageMembers {}
|
||||
|
||||
fn render_mode_button<T: 'static>(
|
||||
mode: Mode,
|
||||
text: &'static str,
|
||||
current_mode: Mode,
|
||||
theme: &theme::TabbedModal,
|
||||
cx: &mut ViewContext<ChannelModal>,
|
||||
) -> AnyElement<ChannelModal> {
|
||||
let active = mode == current_mode;
|
||||
MouseEventHandler::new::<T, _>(0, cx, move |state, _| {
|
||||
let contained_text = theme.tab_button.style_for(active, state);
|
||||
Label::new(text, contained_text.text.clone())
|
||||
.contained()
|
||||
.with_style(contained_text.container.clone())
|
||||
})
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
if !active {
|
||||
this.set_mode(mode, cx);
|
||||
}
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_visibility(
|
||||
channel_id: ChannelId,
|
||||
visibility: ChannelVisibility,
|
||||
theme: &theme::TabbedModal,
|
||||
cx: &mut ViewContext<ChannelModal>,
|
||||
) -> AnyElement<ChannelModal> {
|
||||
enum TogglePublic {}
|
||||
|
||||
if visibility == ChannelVisibility::Members {
|
||||
return Flex::row()
|
||||
.with_child(
|
||||
MouseEventHandler::new::<TogglePublic, _>(0, cx, move |state, _| {
|
||||
let style = theme.visibility_toggle.style_for(state);
|
||||
Label::new(format!("{}", "Public access: OFF"), style.text.clone())
|
||||
.contained()
|
||||
.with_style(style.container.clone())
|
||||
})
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
this.channel_store
|
||||
.update(cx, |channel_store, cx| {
|
||||
channel_store.set_channel_visibility(
|
||||
channel_id,
|
||||
ChannelVisibility::Public,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand),
|
||||
)
|
||||
.into_any();
|
||||
}
|
||||
|
||||
Flex::row()
|
||||
.with_child(
|
||||
MouseEventHandler::new::<TogglePublic, _>(0, cx, move |state, _| {
|
||||
let style = theme.visibility_toggle.style_for(state);
|
||||
Label::new(format!("{}", "Public access: ON"), style.text.clone())
|
||||
.contained()
|
||||
.with_style(style.container.clone())
|
||||
})
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
this.channel_store
|
||||
.update(cx, |channel_store, cx| {
|
||||
channel_store.set_channel_visibility(
|
||||
channel_id,
|
||||
ChannelVisibility::Members,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand),
|
||||
)
|
||||
.with_spacing(14.0)
|
||||
.with_child(
|
||||
MouseEventHandler::new::<TogglePublic, _>(1, cx, move |state, _| {
|
||||
let style = theme.channel_link.style_for(state);
|
||||
Label::new(format!("{}", "copy link"), style.text.clone())
|
||||
.contained()
|
||||
.with_style(style.container.clone())
|
||||
})
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
if let Some(channel) =
|
||||
this.channel_store.read(cx).channel_for_id(channel_id)
|
||||
{
|
||||
let item = ClipboardItem::new(channel.link());
|
||||
cx.write_to_clipboard(item);
|
||||
}
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Flex::column()
|
||||
.with_child(
|
||||
Label::new(format!("#{}", channel.name), theme.title.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.title.container.clone()),
|
||||
)
|
||||
.with_child(render_visibility(channel.id, channel.visibility, theme, cx))
|
||||
.with_child(Flex::row().with_children([
|
||||
render_mode_button::<InviteMembers>(
|
||||
Mode::InviteMembers,
|
||||
"Invite members",
|
||||
mode,
|
||||
theme,
|
||||
cx,
|
||||
),
|
||||
render_mode_button::<ManageMembers>(
|
||||
Mode::ManageMembers,
|
||||
"Manage members",
|
||||
mode,
|
||||
theme,
|
||||
cx,
|
||||
),
|
||||
]))
|
||||
.expanded()
|
||||
.contained()
|
||||
.with_style(theme.header),
|
||||
)
|
||||
.with_child(
|
||||
ChildView::new(&self.picker, cx)
|
||||
.contained()
|
||||
.with_style(theme.body),
|
||||
)
|
||||
.constrained()
|
||||
.with_max_height(theme.max_height)
|
||||
.with_max_width(theme.max_width)
|
||||
.contained()
|
||||
.with_style(theme.modal)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
self.has_focus = true;
|
||||
if cx.is_self_focused() {
|
||||
cx.focus(&self.picker)
|
||||
}
|
||||
}
|
||||
|
||||
fn focus_out(&mut self, _: gpui::AnyViewHandle, _: &mut ViewContext<Self>) {
|
||||
self.has_focus = false;
|
||||
impl FocusableView for ChannelModal {
|
||||
fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Modal for ChannelModal {
|
||||
fn has_focus(&self) -> bool {
|
||||
self.has_focus
|
||||
impl Render for ChannelModal {
|
||||
type Element = Div;
|
||||
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
|
||||
v_stack().min_w_96().child(self.picker.clone())
|
||||
// let theme = &theme::current(cx).collab_panel.tabbed_modal;
|
||||
|
||||
// let mode = self.picker.read(cx).delegate().mode;
|
||||
// let Some(channel) = self.channel_store.read(cx).channel_for_id(self.channel_id) else {
|
||||
// return Empty::new().into_any();
|
||||
// };
|
||||
|
||||
// enum InviteMembers {}
|
||||
// enum ManageMembers {}
|
||||
|
||||
// fn render_mode_button<T: 'static>(
|
||||
// mode: Mode,
|
||||
// text: &'static str,
|
||||
// current_mode: Mode,
|
||||
// theme: &theme::TabbedModal,
|
||||
// cx: &mut ViewContext<ChannelModal>,
|
||||
// ) -> AnyElement<ChannelModal> {
|
||||
// let active = mode == current_mode;
|
||||
// MouseEventHandler::new::<T, _>(0, cx, move |state, _| {
|
||||
// let contained_text = theme.tab_button.style_for(active, state);
|
||||
// Label::new(text, contained_text.text.clone())
|
||||
// .contained()
|
||||
// .with_style(contained_text.container.clone())
|
||||
// })
|
||||
// .on_click(MouseButton::Left, move |_, this, cx| {
|
||||
// if !active {
|
||||
// this.set_mode(mode, cx);
|
||||
// }
|
||||
// })
|
||||
// .with_cursor_style(CursorStyle::PointingHand)
|
||||
// .into_any()
|
||||
// }
|
||||
|
||||
// fn render_visibility(
|
||||
// channel_id: ChannelId,
|
||||
// visibility: ChannelVisibility,
|
||||
// theme: &theme::TabbedModal,
|
||||
// cx: &mut ViewContext<ChannelModal>,
|
||||
// ) -> AnyElement<ChannelModal> {
|
||||
// enum TogglePublic {}
|
||||
|
||||
// if visibility == ChannelVisibility::Members {
|
||||
// return Flex::row()
|
||||
// .with_child(
|
||||
// MouseEventHandler::new::<TogglePublic, _>(0, cx, move |state, _| {
|
||||
// let style = theme.visibility_toggle.style_for(state);
|
||||
// Label::new(format!("{}", "Public access: OFF"), style.text.clone())
|
||||
// .contained()
|
||||
// .with_style(style.container.clone())
|
||||
// })
|
||||
// .on_click(MouseButton::Left, move |_, this, cx| {
|
||||
// this.channel_store
|
||||
// .update(cx, |channel_store, cx| {
|
||||
// channel_store.set_channel_visibility(
|
||||
// channel_id,
|
||||
// ChannelVisibility::Public,
|
||||
// cx,
|
||||
// )
|
||||
// })
|
||||
// .detach_and_log_err(cx);
|
||||
// })
|
||||
// .with_cursor_style(CursorStyle::PointingHand),
|
||||
// )
|
||||
// .into_any();
|
||||
// }
|
||||
|
||||
// Flex::row()
|
||||
// .with_child(
|
||||
// MouseEventHandler::new::<TogglePublic, _>(0, cx, move |state, _| {
|
||||
// let style = theme.visibility_toggle.style_for(state);
|
||||
// Label::new(format!("{}", "Public access: ON"), style.text.clone())
|
||||
// .contained()
|
||||
// .with_style(style.container.clone())
|
||||
// })
|
||||
// .on_click(MouseButton::Left, move |_, this, cx| {
|
||||
// this.channel_store
|
||||
// .update(cx, |channel_store, cx| {
|
||||
// channel_store.set_channel_visibility(
|
||||
// channel_id,
|
||||
// ChannelVisibility::Members,
|
||||
// cx,
|
||||
// )
|
||||
// })
|
||||
// .detach_and_log_err(cx);
|
||||
// })
|
||||
// .with_cursor_style(CursorStyle::PointingHand),
|
||||
// )
|
||||
// .with_spacing(14.0)
|
||||
// .with_child(
|
||||
// MouseEventHandler::new::<TogglePublic, _>(1, cx, move |state, _| {
|
||||
// let style = theme.channel_link.style_for(state);
|
||||
// Label::new(format!("{}", "copy link"), style.text.clone())
|
||||
// .contained()
|
||||
// .with_style(style.container.clone())
|
||||
// })
|
||||
// .on_click(MouseButton::Left, move |_, this, cx| {
|
||||
// if let Some(channel) =
|
||||
// this.channel_store.read(cx).channel_for_id(channel_id)
|
||||
// {
|
||||
// let item = ClipboardItem::new(channel.link());
|
||||
// cx.write_to_clipboard(item);
|
||||
// }
|
||||
// })
|
||||
// .with_cursor_style(CursorStyle::PointingHand),
|
||||
// )
|
||||
// .into_any()
|
||||
// }
|
||||
|
||||
// Flex::column()
|
||||
// .with_child(
|
||||
// Flex::column()
|
||||
// .with_child(
|
||||
// Label::new(format!("#{}", channel.name), theme.title.text.clone())
|
||||
// .contained()
|
||||
// .with_style(theme.title.container.clone()),
|
||||
// )
|
||||
// .with_child(render_visibility(channel.id, channel.visibility, theme, cx))
|
||||
// .with_child(Flex::row().with_children([
|
||||
// render_mode_button::<InviteMembers>(
|
||||
// Mode::InviteMembers,
|
||||
// "Invite members",
|
||||
// mode,
|
||||
// theme,
|
||||
// cx,
|
||||
// ),
|
||||
// render_mode_button::<ManageMembers>(
|
||||
// Mode::ManageMembers,
|
||||
// "Manage members",
|
||||
// mode,
|
||||
// theme,
|
||||
// cx,
|
||||
// ),
|
||||
// ]))
|
||||
// .expanded()
|
||||
// .contained()
|
||||
// .with_style(theme.header),
|
||||
// )
|
||||
// .with_child(
|
||||
// ChildView::new(&self.picker, cx)
|
||||
// .contained()
|
||||
// .with_style(theme.body),
|
||||
// )
|
||||
// .constrained()
|
||||
// .with_max_height(theme.max_height)
|
||||
// .with_max_width(theme.max_width)
|
||||
// .contained()
|
||||
// .with_style(theme.modal)
|
||||
// .into_any()
|
||||
}
|
||||
|
||||
fn dismiss_on_event(event: &Self::Event) -> bool {
|
||||
match event {
|
||||
PickerEvent::Dismiss => true,
|
||||
}
|
||||
}
|
||||
// fn focus_in(&mut self, _: gpui::AnyViewHandle, cx: &mut ViewContext<Self>) {
|
||||
// self.has_focus = true;
|
||||
// if cx.is_self_focused() {
|
||||
// cx.focus(&self.picker)
|
||||
// }
|
||||
// }
|
||||
|
||||
// fn focus_out(&mut self, _: gpui::AnyViewHandle, _: &mut ViewContext<Self>) {
|
||||
// self.has_focus = false;
|
||||
// }
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq)]
|
||||
@ -337,19 +321,22 @@ pub enum Mode {
|
||||
}
|
||||
|
||||
pub struct ChannelModalDelegate {
|
||||
channel_modal: WeakView<ChannelModal>,
|
||||
matching_users: Vec<Arc<User>>,
|
||||
matching_member_indices: Vec<usize>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
channel_store: ModelHandle<ChannelStore>,
|
||||
user_store: Model<UserStore>,
|
||||
channel_store: Model<ChannelStore>,
|
||||
channel_id: ChannelId,
|
||||
selected_index: usize,
|
||||
mode: Mode,
|
||||
match_candidates: Vec<StringMatchCandidate>,
|
||||
members: Vec<ChannelMembership>,
|
||||
context_menu: ViewHandle<ContextMenu>,
|
||||
// context_menu: ViewHandle<ContextMenu>,
|
||||
}
|
||||
|
||||
impl PickerDelegate for ChannelModalDelegate {
|
||||
type ListItem = Div;
|
||||
|
||||
fn placeholder_text(&self) -> Arc<str> {
|
||||
"Search collaborator by username...".into()
|
||||
}
|
||||
@ -382,19 +369,19 @@ impl PickerDelegate for ChannelModalDelegate {
|
||||
}
|
||||
}));
|
||||
|
||||
let matches = cx.background().block(match_strings(
|
||||
let matches = cx.background_executor().block(match_strings(
|
||||
&self.match_candidates,
|
||||
&query,
|
||||
true,
|
||||
usize::MAX,
|
||||
&Default::default(),
|
||||
cx.background().clone(),
|
||||
cx.background_executor().clone(),
|
||||
));
|
||||
|
||||
cx.spawn(|picker, mut cx| async move {
|
||||
picker
|
||||
.update(&mut cx, |picker, cx| {
|
||||
let delegate = picker.delegate_mut();
|
||||
let delegate = &mut picker.delegate;
|
||||
delegate.matching_member_indices.clear();
|
||||
delegate
|
||||
.matching_member_indices
|
||||
@ -412,8 +399,7 @@ impl PickerDelegate for ChannelModalDelegate {
|
||||
async {
|
||||
let users = search_users.await?;
|
||||
picker.update(&mut cx, |picker, cx| {
|
||||
let delegate = picker.delegate_mut();
|
||||
delegate.matching_users = users;
|
||||
picker.delegate.matching_users = users;
|
||||
cx.notify();
|
||||
})?;
|
||||
anyhow::Ok(())
|
||||
@ -445,138 +431,142 @@ impl PickerDelegate for ChannelModalDelegate {
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
cx.emit(PickerEvent::Dismiss);
|
||||
self.channel_modal
|
||||
.update(cx, |_, cx| {
|
||||
cx.emit(DismissEvent);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
mouse_state: &mut MouseState,
|
||||
selected: bool,
|
||||
cx: &gpui::AppContext,
|
||||
) -> AnyElement<Picker<Self>> {
|
||||
let full_theme = &theme::current(cx);
|
||||
let theme = &full_theme.collab_panel.channel_modal;
|
||||
let tabbed_modal = &full_theme.collab_panel.tabbed_modal;
|
||||
let (user, role) = self.user_at_index(ix).unwrap();
|
||||
let request_status = self.member_status(user.id, cx);
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
None
|
||||
// let full_theme = &theme::current(cx);
|
||||
// let theme = &full_theme.collab_panel.channel_modal;
|
||||
// let tabbed_modal = &full_theme.collab_panel.tabbed_modal;
|
||||
// let (user, role) = self.user_at_index(ix).unwrap();
|
||||
// let request_status = self.member_status(user.id, cx);
|
||||
|
||||
let style = tabbed_modal
|
||||
.picker
|
||||
.item
|
||||
.in_state(selected)
|
||||
.style_for(mouse_state);
|
||||
// let style = tabbed_modal
|
||||
// .picker
|
||||
// .item
|
||||
// .in_state(selected)
|
||||
// .style_for(mouse_state);
|
||||
|
||||
let in_manage = matches!(self.mode, Mode::ManageMembers);
|
||||
// let in_manage = matches!(self.mode, Mode::ManageMembers);
|
||||
|
||||
let mut result = Flex::row()
|
||||
.with_children(user.avatar.clone().map(|avatar| {
|
||||
Image::from_data(avatar)
|
||||
.with_style(theme.contact_avatar)
|
||||
.aligned()
|
||||
.left()
|
||||
}))
|
||||
.with_child(
|
||||
Label::new(user.github_login.clone(), style.label.clone())
|
||||
.contained()
|
||||
.with_style(theme.contact_username)
|
||||
.aligned()
|
||||
.left(),
|
||||
)
|
||||
.with_children({
|
||||
(in_manage && request_status == Some(proto::channel_member::Kind::Invitee)).then(
|
||||
|| {
|
||||
Label::new("Invited", theme.member_tag.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.member_tag.container)
|
||||
.aligned()
|
||||
.left()
|
||||
},
|
||||
)
|
||||
})
|
||||
.with_children(if in_manage && role == Some(ChannelRole::Admin) {
|
||||
Some(
|
||||
Label::new("Admin", theme.member_tag.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.member_tag.container)
|
||||
.aligned()
|
||||
.left(),
|
||||
)
|
||||
} else if in_manage && role == Some(ChannelRole::Guest) {
|
||||
Some(
|
||||
Label::new("Guest", theme.member_tag.text.clone())
|
||||
.contained()
|
||||
.with_style(theme.member_tag.container)
|
||||
.aligned()
|
||||
.left(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
.with_children({
|
||||
let svg = match self.mode {
|
||||
Mode::ManageMembers => Some(
|
||||
Svg::new("icons/ellipsis.svg")
|
||||
.with_color(theme.member_icon.color)
|
||||
.constrained()
|
||||
.with_width(theme.member_icon.icon_width)
|
||||
.aligned()
|
||||
.constrained()
|
||||
.with_width(theme.member_icon.button_width)
|
||||
.with_height(theme.member_icon.button_width)
|
||||
.contained()
|
||||
.with_style(theme.member_icon.container),
|
||||
),
|
||||
Mode::InviteMembers => match request_status {
|
||||
Some(proto::channel_member::Kind::Member) => Some(
|
||||
Svg::new("icons/check.svg")
|
||||
.with_color(theme.member_icon.color)
|
||||
.constrained()
|
||||
.with_width(theme.member_icon.icon_width)
|
||||
.aligned()
|
||||
.constrained()
|
||||
.with_width(theme.member_icon.button_width)
|
||||
.with_height(theme.member_icon.button_width)
|
||||
.contained()
|
||||
.with_style(theme.member_icon.container),
|
||||
),
|
||||
Some(proto::channel_member::Kind::Invitee) => Some(
|
||||
Svg::new("icons/check.svg")
|
||||
.with_color(theme.invitee_icon.color)
|
||||
.constrained()
|
||||
.with_width(theme.invitee_icon.icon_width)
|
||||
.aligned()
|
||||
.constrained()
|
||||
.with_width(theme.invitee_icon.button_width)
|
||||
.with_height(theme.invitee_icon.button_width)
|
||||
.contained()
|
||||
.with_style(theme.invitee_icon.container),
|
||||
),
|
||||
Some(proto::channel_member::Kind::AncestorMember) | None => None,
|
||||
},
|
||||
};
|
||||
// let mut result = Flex::row()
|
||||
// .with_children(user.avatar.clone().map(|avatar| {
|
||||
// Image::from_data(avatar)
|
||||
// .with_style(theme.contact_avatar)
|
||||
// .aligned()
|
||||
// .left()
|
||||
// }))
|
||||
// .with_child(
|
||||
// Label::new(user.github_login.clone(), style.label.clone())
|
||||
// .contained()
|
||||
// .with_style(theme.contact_username)
|
||||
// .aligned()
|
||||
// .left(),
|
||||
// )
|
||||
// .with_children({
|
||||
// (in_manage && request_status == Some(proto::channel_member::Kind::Invitee)).then(
|
||||
// || {
|
||||
// Label::new("Invited", theme.member_tag.text.clone())
|
||||
// .contained()
|
||||
// .with_style(theme.member_tag.container)
|
||||
// .aligned()
|
||||
// .left()
|
||||
// },
|
||||
// )
|
||||
// })
|
||||
// .with_children(if in_manage && role == Some(ChannelRole::Admin) {
|
||||
// Some(
|
||||
// Label::new("Admin", theme.member_tag.text.clone())
|
||||
// .contained()
|
||||
// .with_style(theme.member_tag.container)
|
||||
// .aligned()
|
||||
// .left(),
|
||||
// )
|
||||
// } else if in_manage && role == Some(ChannelRole::Guest) {
|
||||
// Some(
|
||||
// Label::new("Guest", theme.member_tag.text.clone())
|
||||
// .contained()
|
||||
// .with_style(theme.member_tag.container)
|
||||
// .aligned()
|
||||
// .left(),
|
||||
// )
|
||||
// } else {
|
||||
// None
|
||||
// })
|
||||
// .with_children({
|
||||
// let svg = match self.mode {
|
||||
// Mode::ManageMembers => Some(
|
||||
// Svg::new("icons/ellipsis.svg")
|
||||
// .with_color(theme.member_icon.color)
|
||||
// .constrained()
|
||||
// .with_width(theme.member_icon.icon_width)
|
||||
// .aligned()
|
||||
// .constrained()
|
||||
// .with_width(theme.member_icon.button_width)
|
||||
// .with_height(theme.member_icon.button_width)
|
||||
// .contained()
|
||||
// .with_style(theme.member_icon.container),
|
||||
// ),
|
||||
// Mode::InviteMembers => match request_status {
|
||||
// Some(proto::channel_member::Kind::Member) => Some(
|
||||
// Svg::new("icons/check.svg")
|
||||
// .with_color(theme.member_icon.color)
|
||||
// .constrained()
|
||||
// .with_width(theme.member_icon.icon_width)
|
||||
// .aligned()
|
||||
// .constrained()
|
||||
// .with_width(theme.member_icon.button_width)
|
||||
// .with_height(theme.member_icon.button_width)
|
||||
// .contained()
|
||||
// .with_style(theme.member_icon.container),
|
||||
// ),
|
||||
// Some(proto::channel_member::Kind::Invitee) => Some(
|
||||
// Svg::new("icons/check.svg")
|
||||
// .with_color(theme.invitee_icon.color)
|
||||
// .constrained()
|
||||
// .with_width(theme.invitee_icon.icon_width)
|
||||
// .aligned()
|
||||
// .constrained()
|
||||
// .with_width(theme.invitee_icon.button_width)
|
||||
// .with_height(theme.invitee_icon.button_width)
|
||||
// .contained()
|
||||
// .with_style(theme.invitee_icon.container),
|
||||
// ),
|
||||
// Some(proto::channel_member::Kind::AncestorMember) | None => None,
|
||||
// },
|
||||
// };
|
||||
|
||||
svg.map(|svg| svg.aligned().flex_float().into_any())
|
||||
})
|
||||
.contained()
|
||||
.with_style(style.container)
|
||||
.constrained()
|
||||
.with_height(tabbed_modal.row_height)
|
||||
.into_any();
|
||||
// svg.map(|svg| svg.aligned().flex_float().into_any())
|
||||
// })
|
||||
// .contained()
|
||||
// .with_style(style.container)
|
||||
// .constrained()
|
||||
// .with_height(tabbed_modal.row_height)
|
||||
// .into_any();
|
||||
|
||||
if selected {
|
||||
result = Stack::new()
|
||||
.with_child(result)
|
||||
.with_child(
|
||||
ChildView::new(&self.context_menu, cx)
|
||||
.aligned()
|
||||
.top()
|
||||
.right(),
|
||||
)
|
||||
.into_any();
|
||||
}
|
||||
// if selected {
|
||||
// result = Stack::new()
|
||||
// .with_child(result)
|
||||
// .with_child(
|
||||
// ChildView::new(&self.context_menu, cx)
|
||||
// .aligned()
|
||||
// .top()
|
||||
// .right(),
|
||||
// )
|
||||
// .into_any();
|
||||
// }
|
||||
|
||||
result
|
||||
// result
|
||||
}
|
||||
}
|
||||
|
||||
@ -623,7 +613,7 @@ impl ChannelModalDelegate {
|
||||
cx.spawn(|picker, mut cx| async move {
|
||||
update.await?;
|
||||
picker.update(&mut cx, |picker, cx| {
|
||||
let this = picker.delegate_mut();
|
||||
let this = &mut picker.delegate;
|
||||
if let Some(member) = this.members.iter_mut().find(|m| m.user.id == user.id) {
|
||||
member.role = new_role;
|
||||
}
|
||||
@ -644,7 +634,7 @@ impl ChannelModalDelegate {
|
||||
cx.spawn(|picker, mut cx| async move {
|
||||
update.await?;
|
||||
picker.update(&mut cx, |picker, cx| {
|
||||
let this = picker.delegate_mut();
|
||||
let this = &mut picker.delegate;
|
||||
if let Some(ix) = this.members.iter_mut().position(|m| m.user.id == user_id) {
|
||||
this.members.remove(ix);
|
||||
this.matching_member_indices.retain_mut(|member_ix| {
|
||||
@ -683,7 +673,7 @@ impl ChannelModalDelegate {
|
||||
kind: proto::channel_member::Kind::Invitee,
|
||||
role: ChannelRole::Member,
|
||||
};
|
||||
let members = &mut this.delegate_mut().members;
|
||||
let members = &mut this.delegate.members;
|
||||
match members.binary_search_by_key(&new_member.sort_key(), |k| k.sort_key()) {
|
||||
Ok(ix) | Err(ix) => members.insert(ix, new_member),
|
||||
}
|
||||
@ -695,23 +685,23 @@ impl ChannelModalDelegate {
|
||||
}
|
||||
|
||||
fn show_context_menu(&mut self, role: ChannelRole, cx: &mut ViewContext<Picker<Self>>) {
|
||||
self.context_menu.update(cx, |context_menu, cx| {
|
||||
context_menu.show(
|
||||
Default::default(),
|
||||
AnchorCorner::TopRight,
|
||||
vec![
|
||||
ContextMenuItem::action("Remove", RemoveMember),
|
||||
ContextMenuItem::action(
|
||||
if role == ChannelRole::Admin {
|
||||
"Make non-admin"
|
||||
} else {
|
||||
"Make admin"
|
||||
},
|
||||
ToggleMemberAdmin,
|
||||
),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
})
|
||||
// self.context_menu.update(cx, |context_menu, cx| {
|
||||
// context_menu.show(
|
||||
// Default::default(),
|
||||
// AnchorCorner::TopRight,
|
||||
// vec![
|
||||
// ContextMenuItem::action("Remove", RemoveMember),
|
||||
// ContextMenuItem::action(
|
||||
// if role == ChannelRole::Admin {
|
||||
// "Make non-admin"
|
||||
// } else {
|
||||
// "Make admin"
|
||||
// },
|
||||
// ToggleMemberAdmin,
|
||||
// ),
|
||||
// ],
|
||||
// cx,
|
||||
// )
|
||||
// })
|
||||
}
|
||||
}
|
||||
|
@ -31,9 +31,9 @@ use std::sync::Arc;
|
||||
use call::ActiveCall;
|
||||
use client::{Client, UserStore};
|
||||
use gpui::{
|
||||
div, px, rems, AppContext, Div, Element, InteractiveElement, IntoElement, Model, MouseButton,
|
||||
ParentElement, Render, RenderOnce, Stateful, StatefulInteractiveElement, Styled, Subscription,
|
||||
ViewContext, VisualContext, WeakView, WindowBounds,
|
||||
actions, div, px, rems, AppContext, Div, Element, InteractiveElement, IntoElement, Model,
|
||||
MouseButton, ParentElement, Render, RenderOnce, Stateful, StatefulInteractiveElement, Styled,
|
||||
Subscription, ViewContext, VisualContext, WeakView, WindowBounds,
|
||||
};
|
||||
use project::{Project, RepositoryEntry};
|
||||
use theme::ActiveTheme;
|
||||
@ -49,6 +49,14 @@ use crate::face_pile::FacePile;
|
||||
const MAX_PROJECT_NAME_LENGTH: usize = 40;
|
||||
const MAX_BRANCH_NAME_LENGTH: usize = 40;
|
||||
|
||||
actions!(
|
||||
ShareProject,
|
||||
UnshareProject,
|
||||
ToggleUserMenu,
|
||||
ToggleProjectMenu,
|
||||
SwitchBranch
|
||||
);
|
||||
|
||||
// actions!(
|
||||
// collab,
|
||||
// [
|
||||
@ -91,37 +99,23 @@ impl Render for CollabTitlebarItem {
|
||||
type Element = Stateful<Div>;
|
||||
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
|
||||
let is_in_room = self
|
||||
.workspace
|
||||
.update(cx, |this, cx| this.call_state().is_in_room(cx))
|
||||
.unwrap_or_default();
|
||||
let room = ActiveCall::global(cx).read(cx).room();
|
||||
let is_in_room = room.is_some();
|
||||
let is_shared = is_in_room && self.project.read(cx).is_shared();
|
||||
let current_user = self.user_store.read(cx).current_user();
|
||||
let client = self.client.clone();
|
||||
let users = self
|
||||
.workspace
|
||||
.update(cx, |this, cx| this.call_state().remote_participants(cx))
|
||||
.log_err()
|
||||
.flatten();
|
||||
let is_muted = self
|
||||
.workspace
|
||||
.update(cx, |this, cx| this.call_state().is_muted(cx))
|
||||
.log_err()
|
||||
.flatten()
|
||||
.unwrap_or_default();
|
||||
let is_deafened = self
|
||||
.workspace
|
||||
.update(cx, |this, cx| this.call_state().is_deafened(cx))
|
||||
.log_err()
|
||||
.flatten()
|
||||
.unwrap_or_default();
|
||||
let speakers_icon = if self
|
||||
.workspace
|
||||
.update(cx, |this, cx| this.call_state().is_deafened(cx))
|
||||
.log_err()
|
||||
.flatten()
|
||||
.unwrap_or_default()
|
||||
{
|
||||
let remote_participants = room.map(|room| {
|
||||
room.read(cx)
|
||||
.remote_participants()
|
||||
.values()
|
||||
.map(|participant| (participant.user.clone(), participant.peer_id))
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
let is_muted = room.map_or(false, |room| room.read(cx).is_muted(cx));
|
||||
let is_deafened = room
|
||||
.and_then(|room| room.read(cx).is_deafened())
|
||||
.unwrap_or(false);
|
||||
let speakers_icon = if is_deafened {
|
||||
ui::Icon::AudioOff
|
||||
} else {
|
||||
ui::Icon::AudioOn
|
||||
@ -157,7 +151,7 @@ impl Render for CollabTitlebarItem {
|
||||
.children(self.render_project_branch(cx)),
|
||||
)
|
||||
.when_some(
|
||||
users.zip(current_user.clone()),
|
||||
remote_participants.zip(current_user.clone()),
|
||||
|this, (remote_participants, current_user)| {
|
||||
let mut pile = FacePile::default();
|
||||
pile.extend(
|
||||
@ -168,25 +162,30 @@ impl Render for CollabTitlebarItem {
|
||||
div().child(Avatar::data(avatar.clone())).into_any_element()
|
||||
})
|
||||
.into_iter()
|
||||
.chain(remote_participants.into_iter().flat_map(|(user, peer_id)| {
|
||||
user.avatar.as_ref().map(|avatar| {
|
||||
div()
|
||||
.child(
|
||||
Avatar::data(avatar.clone()).into_element().into_any(),
|
||||
)
|
||||
.on_mouse_down(MouseButton::Left, {
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.open_shared_screen(peer_id, cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
.into_any_element()
|
||||
})
|
||||
})),
|
||||
.chain(remote_participants.into_iter().filter_map(
|
||||
|(user, peer_id)| {
|
||||
let avatar = user.avatar.as_ref()?;
|
||||
Some(
|
||||
div()
|
||||
.child(
|
||||
Avatar::data(avatar.clone())
|
||||
.into_element()
|
||||
.into_any(),
|
||||
)
|
||||
.on_mouse_down(MouseButton::Left, {
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.open_shared_screen(peer_id, cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
.into_any_element(),
|
||||
)
|
||||
},
|
||||
)),
|
||||
);
|
||||
this.child(pile.render(cx))
|
||||
},
|
||||
@ -204,20 +203,24 @@ impl Render for CollabTitlebarItem {
|
||||
"toggle_sharing",
|
||||
if is_shared { "Unshare" } else { "Share" },
|
||||
)
|
||||
.style(ButtonStyle::Subtle),
|
||||
.style(ButtonStyle::Subtle)
|
||||
.on_click(cx.listener(
|
||||
move |this, _, cx| {
|
||||
if is_shared {
|
||||
this.unshare_project(&Default::default(), cx);
|
||||
} else {
|
||||
this.share_project(&Default::default(), cx);
|
||||
}
|
||||
},
|
||||
)),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("leave-call", ui::Icon::Exit)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.on_click({
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.call_state().hang_up(cx).detach();
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
.on_click(move |_, cx| {
|
||||
ActiveCall::global(cx)
|
||||
.update(cx, |call, cx| call.hang_up(cx))
|
||||
.detach_and_log_err(cx);
|
||||
}),
|
||||
),
|
||||
)
|
||||
@ -235,15 +238,8 @@ impl Render for CollabTitlebarItem {
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.selected(is_muted)
|
||||
.on_click({
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.call_state().toggle_mute(cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
.on_click(move |_, cx| {
|
||||
crate::toggle_mute(&Default::default(), cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
@ -258,26 +254,15 @@ impl Render for CollabTitlebarItem {
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click({
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.call_state().toggle_deafen(cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
.on_click(move |_, cx| {
|
||||
crate::toggle_mute(&Default::default(), cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("screen-share", ui::Icon::Screen)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.on_click(move |_, cx| {
|
||||
workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.call_state().toggle_screen_share(cx);
|
||||
})
|
||||
.log_err();
|
||||
crate::toggle_screen_sharing(&Default::default(), cx)
|
||||
}),
|
||||
)
|
||||
.pl_2(),
|
||||
@ -451,46 +436,19 @@ impl CollabTitlebarItem {
|
||||
// render_project_owner -> resolve if you are in a room -> Option<foo>
|
||||
|
||||
pub fn render_project_owner(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> {
|
||||
// TODO: We can't finish implementing this until project sharing works
|
||||
// - [ ] Show the project owner when the project is remote (maybe done)
|
||||
// - [x] Show the project owner when the project is local
|
||||
// - [ ] Show the project owner with a lock icon when the project is local and unshared
|
||||
|
||||
let remote_id = self.project.read(cx).remote_id();
|
||||
let is_local = remote_id.is_none();
|
||||
let is_shared = self.project.read(cx).is_shared();
|
||||
let (user_name, participant_index) = {
|
||||
if let Some(host) = self.project.read(cx).host() {
|
||||
debug_assert!(!is_local);
|
||||
let (Some(host_user), Some(participant_index)) = (
|
||||
self.user_store.read(cx).get_cached_user(host.user_id),
|
||||
self.user_store
|
||||
.read(cx)
|
||||
.participant_indices()
|
||||
.get(&host.user_id),
|
||||
) else {
|
||||
return None;
|
||||
};
|
||||
(host_user.github_login.clone(), participant_index.0)
|
||||
} else {
|
||||
debug_assert!(is_local);
|
||||
let name = self
|
||||
.user_store
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.map(|user| user.github_login.clone())?;
|
||||
(name, 0)
|
||||
}
|
||||
};
|
||||
let host = self.project.read(cx).host()?;
|
||||
let host = self.user_store.read(cx).get_cached_user(host.user_id)?;
|
||||
let participant_index = self
|
||||
.user_store
|
||||
.read(cx)
|
||||
.participant_indices()
|
||||
.get(&host.id)?;
|
||||
Some(
|
||||
div().border().border_color(gpui::red()).child(
|
||||
Button::new(
|
||||
"project_owner_trigger",
|
||||
format!("{user_name} ({})", !is_shared),
|
||||
)
|
||||
.color(Color::Player(participant_index))
|
||||
.style(ButtonStyle::Subtle)
|
||||
.tooltip(move |cx| Tooltip::text("Toggle following", cx)),
|
||||
Button::new("project_owner_trigger", host.github_login.clone())
|
||||
.color(Color::Player(participant_index.0))
|
||||
.style(ButtonStyle::Subtle)
|
||||
.tooltip(move |cx| Tooltip::text("Toggle following", cx)),
|
||||
),
|
||||
)
|
||||
}
|
||||
@ -730,21 +688,21 @@ impl CollabTitlebarItem {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
// fn share_project(&mut self, _: &ShareProject, cx: &mut ViewContext<Self>) {
|
||||
// let active_call = ActiveCall::global(cx);
|
||||
// let project = self.project.clone();
|
||||
// active_call
|
||||
// .update(cx, |call, cx| call.share_project(project, cx))
|
||||
// .detach_and_log_err(cx);
|
||||
// }
|
||||
fn share_project(&mut self, _: &ShareProject, cx: &mut ViewContext<Self>) {
|
||||
let active_call = ActiveCall::global(cx);
|
||||
let project = self.project.clone();
|
||||
active_call
|
||||
.update(cx, |call, cx| call.share_project(project, cx))
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
// fn unshare_project(&mut self, _: &UnshareProject, cx: &mut ViewContext<Self>) {
|
||||
// let active_call = ActiveCall::global(cx);
|
||||
// let project = self.project.clone();
|
||||
// active_call
|
||||
// .update(cx, |call, cx| call.unshare_project(project, cx))
|
||||
// .log_err();
|
||||
// }
|
||||
fn unshare_project(&mut self, _: &UnshareProject, cx: &mut ViewContext<Self>) {
|
||||
let active_call = ActiveCall::global(cx);
|
||||
let project = self.project.clone();
|
||||
active_call
|
||||
.update(cx, |call, cx| call.unshare_project(project, cx))
|
||||
.log_err();
|
||||
}
|
||||
|
||||
// pub fn toggle_user_menu(&mut self, _: &ToggleUserMenu, cx: &mut ViewContext<Self>) {
|
||||
// self.user_menu.update(cx, |user_menu, cx| {
|
||||
|
@ -9,22 +9,21 @@ mod panel_settings;
|
||||
|
||||
use std::{rc::Rc, sync::Arc};
|
||||
|
||||
use call::{report_call_event_for_room, ActiveCall, Room};
|
||||
pub use collab_panel::CollabPanel;
|
||||
pub use collab_titlebar_item::CollabTitlebarItem;
|
||||
use gpui::{
|
||||
point, AppContext, GlobalPixels, Pixels, PlatformDisplay, Size, WindowBounds, WindowKind,
|
||||
WindowOptions,
|
||||
actions, point, AppContext, GlobalPixels, Pixels, PlatformDisplay, Size, Task, WindowBounds,
|
||||
WindowKind, WindowOptions,
|
||||
};
|
||||
pub use panel_settings::{
|
||||
ChatPanelSettings, CollaborationPanelSettings, NotificationPanelSettings,
|
||||
};
|
||||
use settings::Settings;
|
||||
use util::ResultExt;
|
||||
use workspace::AppState;
|
||||
|
||||
// actions!(
|
||||
// collab,
|
||||
// [ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall]
|
||||
// );
|
||||
actions!(ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall);
|
||||
|
||||
pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
CollaborationPanelSettings::register(cx);
|
||||
@ -42,61 +41,61 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
||||
// cx.add_global_action(toggle_deafen);
|
||||
}
|
||||
|
||||
// pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
// let call = ActiveCall::global(cx).read(cx);
|
||||
// if let Some(room) = call.room().cloned() {
|
||||
// let client = call.client();
|
||||
// let toggle_screen_sharing = room.update(cx, |room, cx| {
|
||||
// if room.is_screen_sharing() {
|
||||
// report_call_event_for_room(
|
||||
// "disable screen share",
|
||||
// room.id(),
|
||||
// room.channel_id(),
|
||||
// &client,
|
||||
// cx,
|
||||
// );
|
||||
// Task::ready(room.unshare_screen(cx))
|
||||
// } else {
|
||||
// report_call_event_for_room(
|
||||
// "enable screen share",
|
||||
// room.id(),
|
||||
// room.channel_id(),
|
||||
// &client,
|
||||
// cx,
|
||||
// );
|
||||
// room.share_screen(cx)
|
||||
// }
|
||||
// });
|
||||
// toggle_screen_sharing.detach_and_log_err(cx);
|
||||
// }
|
||||
// }
|
||||
pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
let call = ActiveCall::global(cx).read(cx);
|
||||
if let Some(room) = call.room().cloned() {
|
||||
let client = call.client();
|
||||
let toggle_screen_sharing = room.update(cx, |room, cx| {
|
||||
if room.is_screen_sharing() {
|
||||
report_call_event_for_room(
|
||||
"disable screen share",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
Task::ready(room.unshare_screen(cx))
|
||||
} else {
|
||||
report_call_event_for_room(
|
||||
"enable screen share",
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
);
|
||||
room.share_screen(cx)
|
||||
}
|
||||
});
|
||||
toggle_screen_sharing.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
// pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
|
||||
// let call = ActiveCall::global(cx).read(cx);
|
||||
// if let Some(room) = call.room().cloned() {
|
||||
// let client = call.client();
|
||||
// room.update(cx, |room, cx| {
|
||||
// let operation = if room.is_muted(cx) {
|
||||
// "enable microphone"
|
||||
// } else {
|
||||
// "disable microphone"
|
||||
// };
|
||||
// report_call_event_for_room(operation, room.id(), room.channel_id(), &client, cx);
|
||||
pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
|
||||
let call = ActiveCall::global(cx).read(cx);
|
||||
if let Some(room) = call.room().cloned() {
|
||||
let client = call.client();
|
||||
room.update(cx, |room, cx| {
|
||||
let operation = if room.is_muted(cx) {
|
||||
"enable microphone"
|
||||
} else {
|
||||
"disable microphone"
|
||||
};
|
||||
report_call_event_for_room(operation, room.id(), room.channel_id(), &client, cx);
|
||||
|
||||
// room.toggle_mute(cx)
|
||||
// })
|
||||
// .map(|task| task.detach_and_log_err(cx))
|
||||
// .log_err();
|
||||
// }
|
||||
// }
|
||||
room.toggle_mute(cx)
|
||||
})
|
||||
.map(|task| task.detach_and_log_err(cx))
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
// pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) {
|
||||
// if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
|
||||
// room.update(cx, Room::toggle_deafen)
|
||||
// .map(|task| task.detach_and_log_err(cx))
|
||||
// .log_err();
|
||||
// }
|
||||
// }
|
||||
pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) {
|
||||
if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
|
||||
room.update(cx, Room::toggle_deafen)
|
||||
.map(|task| task.detach_and_log_err(cx))
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
fn notification_window_options(
|
||||
screen: Rc<dyn PlatformDisplay>,
|
||||
|
@ -311,7 +311,11 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
command.name.clone(),
|
||||
r#match.positions.clone(),
|
||||
))
|
||||
.children(KeyBinding::for_action(&*command.action, cx)),
|
||||
.children(KeyBinding::for_action_in(
|
||||
&*command.action,
|
||||
&self.previous_focus_handle,
|
||||
cx,
|
||||
)),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
@ -45,6 +45,6 @@ fs = { path = "../fs", features = ["test-support"] }
|
||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||
util = { path = "../util", features = ["test-support"] }
|
||||
|
@ -1002,229 +1002,231 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
|
||||
}
|
||||
}
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use gpui::{executor::Deterministic, TestAppContext};
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::TestAppContext;
|
||||
|
||||
// #[gpui::test(iterations = 10)]
|
||||
// async fn test_buffer_management(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
|
||||
// deterministic.forbid_parking();
|
||||
// let (copilot, mut lsp) = Copilot::fake(cx);
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_buffer_management(cx: &mut TestAppContext) {
|
||||
let (copilot, mut lsp) = Copilot::fake(cx);
|
||||
|
||||
// let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Hello"));
|
||||
// let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.id()).parse().unwrap();
|
||||
// copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidOpenTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentItem::new(
|
||||
// buffer_1_uri.clone(),
|
||||
// "plaintext".into(),
|
||||
// 0,
|
||||
// "Hello".into()
|
||||
// ),
|
||||
// }
|
||||
// );
|
||||
let buffer_1 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Hello"));
|
||||
let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64())
|
||||
.parse()
|
||||
.unwrap();
|
||||
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await,
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
buffer_1_uri.clone(),
|
||||
"plaintext".into(),
|
||||
0,
|
||||
"Hello".into()
|
||||
),
|
||||
}
|
||||
);
|
||||
|
||||
// let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Goodbye"));
|
||||
// let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.id()).parse().unwrap();
|
||||
// copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidOpenTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentItem::new(
|
||||
// buffer_2_uri.clone(),
|
||||
// "plaintext".into(),
|
||||
// 0,
|
||||
// "Goodbye".into()
|
||||
// ),
|
||||
// }
|
||||
// );
|
||||
let buffer_2 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Goodbye"));
|
||||
let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64())
|
||||
.parse()
|
||||
.unwrap();
|
||||
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await,
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
buffer_2_uri.clone(),
|
||||
"plaintext".into(),
|
||||
0,
|
||||
"Goodbye".into()
|
||||
),
|
||||
}
|
||||
);
|
||||
|
||||
// buffer_1.update(cx, |buffer, cx| buffer.edit([(5..5, " world")], None, cx));
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidChangeTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidChangeTextDocumentParams {
|
||||
// text_document: lsp::VersionedTextDocumentIdentifier::new(buffer_1_uri.clone(), 1),
|
||||
// content_changes: vec![lsp::TextDocumentContentChangeEvent {
|
||||
// range: Some(lsp::Range::new(
|
||||
// lsp::Position::new(0, 5),
|
||||
// lsp::Position::new(0, 5)
|
||||
// )),
|
||||
// range_length: None,
|
||||
// text: " world".into(),
|
||||
// }],
|
||||
// }
|
||||
// );
|
||||
buffer_1.update(cx, |buffer, cx| buffer.edit([(5..5, " world")], None, cx));
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidChangeTextDocument>()
|
||||
.await,
|
||||
lsp::DidChangeTextDocumentParams {
|
||||
text_document: lsp::VersionedTextDocumentIdentifier::new(buffer_1_uri.clone(), 1),
|
||||
content_changes: vec![lsp::TextDocumentContentChangeEvent {
|
||||
range: Some(lsp::Range::new(
|
||||
lsp::Position::new(0, 5),
|
||||
lsp::Position::new(0, 5)
|
||||
)),
|
||||
range_length: None,
|
||||
text: " world".into(),
|
||||
}],
|
||||
}
|
||||
);
|
||||
|
||||
// // Ensure updates to the file are reflected in the LSP.
|
||||
// buffer_1
|
||||
// .update(cx, |buffer, cx| {
|
||||
// buffer.file_updated(
|
||||
// Arc::new(File {
|
||||
// abs_path: "/root/child/buffer-1".into(),
|
||||
// path: Path::new("child/buffer-1").into(),
|
||||
// }),
|
||||
// cx,
|
||||
// )
|
||||
// })
|
||||
// .await;
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidCloseTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
|
||||
// }
|
||||
// );
|
||||
// let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap();
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidOpenTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentItem::new(
|
||||
// buffer_1_uri.clone(),
|
||||
// "plaintext".into(),
|
||||
// 1,
|
||||
// "Hello world".into()
|
||||
// ),
|
||||
// }
|
||||
// );
|
||||
// Ensure updates to the file are reflected in the LSP.
|
||||
buffer_1.update(cx, |buffer, cx| {
|
||||
buffer.file_updated(
|
||||
Arc::new(File {
|
||||
abs_path: "/root/child/buffer-1".into(),
|
||||
path: Path::new("child/buffer-1").into(),
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
.await,
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
|
||||
}
|
||||
);
|
||||
let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap();
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await,
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
buffer_1_uri.clone(),
|
||||
"plaintext".into(),
|
||||
1,
|
||||
"Hello world".into()
|
||||
),
|
||||
}
|
||||
);
|
||||
|
||||
// // Ensure all previously-registered buffers are closed when signing out.
|
||||
// lsp.handle_request::<request::SignOut, _, _>(|_, _| async {
|
||||
// Ok(request::SignOutResult {})
|
||||
// });
|
||||
// copilot
|
||||
// .update(cx, |copilot, cx| copilot.sign_out(cx))
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidCloseTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri.clone()),
|
||||
// }
|
||||
// );
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidCloseTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri.clone()),
|
||||
// }
|
||||
// );
|
||||
// Ensure all previously-registered buffers are closed when signing out.
|
||||
lsp.handle_request::<request::SignOut, _, _>(|_, _| async {
|
||||
Ok(request::SignOutResult {})
|
||||
});
|
||||
copilot
|
||||
.update(cx, |copilot, cx| copilot.sign_out(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
// todo!() po: these notifications now happen in reverse order?
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
.await,
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri.clone()),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
.await,
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri.clone()),
|
||||
}
|
||||
);
|
||||
|
||||
// // Ensure all previously-registered buffers are re-opened when signing in.
|
||||
// lsp.handle_request::<request::SignInInitiate, _, _>(|_, _| async {
|
||||
// Ok(request::SignInInitiateResult::AlreadySignedIn {
|
||||
// user: "user-1".into(),
|
||||
// })
|
||||
// });
|
||||
// copilot
|
||||
// .update(cx, |copilot, cx| copilot.sign_in(cx))
|
||||
// .await
|
||||
// .unwrap();
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidOpenTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentItem::new(
|
||||
// buffer_2_uri.clone(),
|
||||
// "plaintext".into(),
|
||||
// 0,
|
||||
// "Goodbye".into()
|
||||
// ),
|
||||
// }
|
||||
// );
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidOpenTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentItem::new(
|
||||
// buffer_1_uri.clone(),
|
||||
// "plaintext".into(),
|
||||
// 0,
|
||||
// "Hello world".into()
|
||||
// ),
|
||||
// }
|
||||
// );
|
||||
// Ensure all previously-registered buffers are re-opened when signing in.
|
||||
lsp.handle_request::<request::SignInInitiate, _, _>(|_, _| async {
|
||||
Ok(request::SignInInitiateResult::AlreadySignedIn {
|
||||
user: "user-1".into(),
|
||||
})
|
||||
});
|
||||
copilot
|
||||
.update(cx, |copilot, cx| copilot.sign_in(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// // Dropping a buffer causes it to be closed on the LSP side as well.
|
||||
// cx.update(|_| drop(buffer_2));
|
||||
// assert_eq!(
|
||||
// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
// .await,
|
||||
// lsp::DidCloseTextDocumentParams {
|
||||
// text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri),
|
||||
// }
|
||||
// );
|
||||
// }
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await,
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
buffer_1_uri.clone(),
|
||||
"plaintext".into(),
|
||||
0,
|
||||
"Hello world".into()
|
||||
),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await,
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
buffer_2_uri.clone(),
|
||||
"plaintext".into(),
|
||||
0,
|
||||
"Goodbye".into()
|
||||
),
|
||||
}
|
||||
);
|
||||
// Dropping a buffer causes it to be closed on the LSP side as well.
|
||||
cx.update(|_| drop(buffer_2));
|
||||
assert_eq!(
|
||||
lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
|
||||
.await,
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// struct File {
|
||||
// abs_path: PathBuf,
|
||||
// path: Arc<Path>,
|
||||
// }
|
||||
struct File {
|
||||
abs_path: PathBuf,
|
||||
path: Arc<Path>,
|
||||
}
|
||||
|
||||
// impl language2::File for File {
|
||||
// fn as_local(&self) -> Option<&dyn language2::LocalFile> {
|
||||
// Some(self)
|
||||
// }
|
||||
impl language::File for File {
|
||||
fn as_local(&self) -> Option<&dyn language::LocalFile> {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
// fn mtime(&self) -> std::time::SystemTime {
|
||||
// unimplemented!()
|
||||
// }
|
||||
fn mtime(&self) -> std::time::SystemTime {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// fn path(&self) -> &Arc<Path> {
|
||||
// &self.path
|
||||
// }
|
||||
fn path(&self) -> &Arc<Path> {
|
||||
&self.path
|
||||
}
|
||||
|
||||
// fn full_path(&self, _: &AppContext) -> PathBuf {
|
||||
// unimplemented!()
|
||||
// }
|
||||
fn full_path(&self, _: &AppContext) -> PathBuf {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// fn file_name<'a>(&'a self, _: &'a AppContext) -> &'a std::ffi::OsStr {
|
||||
// unimplemented!()
|
||||
// }
|
||||
fn file_name<'a>(&'a self, _: &'a AppContext) -> &'a std::ffi::OsStr {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// fn is_deleted(&self) -> bool {
|
||||
// unimplemented!()
|
||||
// }
|
||||
fn is_deleted(&self) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// fn as_any(&self) -> &dyn std::any::Any {
|
||||
// unimplemented!()
|
||||
// }
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// fn to_proto(&self) -> rpc::proto::File {
|
||||
// unimplemented!()
|
||||
// }
|
||||
fn to_proto(&self) -> rpc::proto::File {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// fn worktree_id(&self) -> usize {
|
||||
// 0
|
||||
// }
|
||||
// }
|
||||
fn worktree_id(&self) -> usize {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
// impl language::LocalFile for File {
|
||||
// fn abs_path(&self, _: &AppContext) -> PathBuf {
|
||||
// self.abs_path.clone()
|
||||
// }
|
||||
impl language::LocalFile for File {
|
||||
fn abs_path(&self, _: &AppContext) -> PathBuf {
|
||||
self.abs_path.clone()
|
||||
}
|
||||
|
||||
// fn load(&self, _: &AppContext) -> Task<Result<String>> {
|
||||
// unimplemented!()
|
||||
// }
|
||||
fn load(&self, _: &AppContext) -> Task<Result<String>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
// fn buffer_reloaded(
|
||||
// &self,
|
||||
// _: u64,
|
||||
// _: &clock::Global,
|
||||
// _: language::RopeFingerprint,
|
||||
// _: language::LineEnding,
|
||||
// _: std::time::SystemTime,
|
||||
// _: &mut AppContext,
|
||||
// ) {
|
||||
// unimplemented!()
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
fn buffer_reloaded(
|
||||
&self,
|
||||
_: u64,
|
||||
_: &clock::Global,
|
||||
_: language::RopeFingerprint,
|
||||
_: language::LineEnding,
|
||||
_: std::time::SystemTime,
|
||||
_: &mut AppContext,
|
||||
) {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
27
crates/copilot_button2/Cargo.toml
Normal file
27
crates/copilot_button2/Cargo.toml
Normal file
@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "copilot_button2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/copilot_button.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
copilot = { package = "copilot2", path = "../copilot2" }
|
||||
editor = { package = "editor2", path = "../editor2" }
|
||||
fs = { package = "fs2", path = "../fs2" }
|
||||
zed-actions = { package="zed_actions2", path = "../zed_actions2"}
|
||||
gpui = { package = "gpui2", path = "../gpui2" }
|
||||
language = { package = "language2", path = "../language2" }
|
||||
settings = { package = "settings2", path = "../settings2" }
|
||||
theme = { package = "theme2", path = "../theme2" }
|
||||
util = { path = "../util" }
|
||||
workspace = { package = "workspace2", path = "../workspace2" }
|
||||
anyhow.workspace = true
|
||||
smol.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { package = "editor2", path = "../editor2", features = ["test-support"] }
|
370
crates/copilot_button2/src/copilot_button.rs
Normal file
370
crates/copilot_button2/src/copilot_button.rs
Normal file
@ -0,0 +1,370 @@
|
||||
#![allow(unused)]
|
||||
use anyhow::Result;
|
||||
use copilot::{Copilot, SignOut, Status};
|
||||
use editor::{scroll::autoscroll::Autoscroll, Editor};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
div, Action, AnchorCorner, AppContext, AsyncAppContext, AsyncWindowContext, Div, Entity,
|
||||
ParentElement, Render, Subscription, View, ViewContext, WeakView, WindowContext,
|
||||
};
|
||||
use language::{
|
||||
language_settings::{self, all_language_settings, AllLanguageSettings},
|
||||
File, Language,
|
||||
};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use std::{path::Path, sync::Arc};
|
||||
use util::{paths, ResultExt};
|
||||
use workspace::{
|
||||
create_and_open_local_file,
|
||||
item::ItemHandle,
|
||||
ui::{
|
||||
popover_menu, ButtonCommon, Clickable, ContextMenu, Icon, IconButton, PopoverMenu, Tooltip,
|
||||
},
|
||||
StatusItemView, Toast, Workspace,
|
||||
};
|
||||
use zed_actions::OpenBrowser;
|
||||
|
||||
const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot";
|
||||
const COPILOT_STARTING_TOAST_ID: usize = 1337;
|
||||
const COPILOT_ERROR_TOAST_ID: usize = 1338;
|
||||
|
||||
pub struct CopilotButton {
|
||||
editor_subscription: Option<(Subscription, usize)>,
|
||||
editor_enabled: Option<bool>,
|
||||
language: Option<Arc<Language>>,
|
||||
file: Option<Arc<dyn File>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
impl Render for CopilotButton {
|
||||
type Element = Div;
|
||||
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
|
||||
let all_language_settings = all_language_settings(None, cx);
|
||||
if !all_language_settings.copilot.feature_enabled {
|
||||
return div();
|
||||
}
|
||||
|
||||
let Some(copilot) = Copilot::global(cx) else {
|
||||
return div();
|
||||
};
|
||||
let status = copilot.read(cx).status();
|
||||
|
||||
let enabled = self
|
||||
.editor_enabled
|
||||
.unwrap_or_else(|| all_language_settings.copilot_enabled(None, None));
|
||||
|
||||
let icon = match status {
|
||||
Status::Error(_) => Icon::CopilotError,
|
||||
Status::Authorized => {
|
||||
if enabled {
|
||||
Icon::Copilot
|
||||
} else {
|
||||
Icon::CopilotDisabled
|
||||
}
|
||||
}
|
||||
_ => Icon::CopilotInit,
|
||||
};
|
||||
|
||||
if let Status::Error(e) = status {
|
||||
return div().child(
|
||||
IconButton::new("copilot-error", icon)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
if let Some(workspace) = cx.window_handle().downcast::<Workspace>() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(
|
||||
Toast::new(
|
||||
COPILOT_ERROR_TOAST_ID,
|
||||
format!("Copilot can't be started: {}", e),
|
||||
)
|
||||
.on_click(
|
||||
"Reinstall Copilot",
|
||||
|cx| {
|
||||
if let Some(copilot) = Copilot::global(cx) {
|
||||
copilot
|
||||
.update(cx, |copilot, cx| copilot.reinstall(cx))
|
||||
.detach();
|
||||
}
|
||||
},
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}))
|
||||
.tooltip(|cx| Tooltip::text("GitHub Copilot", cx)),
|
||||
);
|
||||
}
|
||||
let this = cx.view().clone();
|
||||
|
||||
div().child(
|
||||
popover_menu("copilot")
|
||||
.menu(move |cx| match status {
|
||||
Status::Authorized => this.update(cx, |this, cx| this.build_copilot_menu(cx)),
|
||||
_ => this.update(cx, |this, cx| this.build_copilot_start_menu(cx)),
|
||||
})
|
||||
.anchor(AnchorCorner::BottomRight)
|
||||
.trigger(
|
||||
IconButton::new("copilot-icon", icon)
|
||||
.tooltip(|cx| Tooltip::text("GitHub Copilot", cx)),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl CopilotButton {
|
||||
pub fn new(fs: Arc<dyn Fs>, cx: &mut ViewContext<Self>) -> Self {
|
||||
Copilot::global(cx).map(|copilot| cx.observe(&copilot, |_, _, cx| cx.notify()).detach());
|
||||
|
||||
cx.observe_global::<SettingsStore>(move |_, cx| cx.notify())
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
editor_subscription: None,
|
||||
editor_enabled: None,
|
||||
language: None,
|
||||
file: None,
|
||||
fs,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_copilot_start_menu(&mut self, cx: &mut ViewContext<Self>) -> View<ContextMenu> {
|
||||
let fs = self.fs.clone();
|
||||
ContextMenu::build(cx, |menu, cx| {
|
||||
menu.entry("Sign In", initiate_sign_in)
|
||||
.entry("Disable Copilot", move |cx| hide_copilot(fs.clone(), cx))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn build_copilot_menu(&mut self, cx: &mut ViewContext<Self>) -> View<ContextMenu> {
|
||||
let fs = self.fs.clone();
|
||||
|
||||
return ContextMenu::build(cx, move |mut menu, cx| {
|
||||
if let Some(language) = self.language.clone() {
|
||||
let fs = fs.clone();
|
||||
let language_enabled =
|
||||
language_settings::language_settings(Some(&language), None, cx)
|
||||
.show_copilot_suggestions;
|
||||
|
||||
menu = menu.entry(
|
||||
format!(
|
||||
"{} Suggestions for {}",
|
||||
if language_enabled { "Hide" } else { "Show" },
|
||||
language.name()
|
||||
),
|
||||
move |cx| toggle_copilot_for_language(language.clone(), fs.clone(), cx),
|
||||
);
|
||||
}
|
||||
|
||||
let settings = AllLanguageSettings::get_global(cx);
|
||||
|
||||
if let Some(file) = &self.file {
|
||||
let path = file.path().clone();
|
||||
let path_enabled = settings.copilot_enabled_for_path(&path);
|
||||
|
||||
menu = menu.entry(
|
||||
format!(
|
||||
"{} Suggestions for This Path",
|
||||
if path_enabled { "Hide" } else { "Show" }
|
||||
),
|
||||
move |cx| {
|
||||
if let Some(workspace) = cx.window_handle().downcast::<Workspace>() {
|
||||
if let Ok(workspace) = workspace.root_view(cx) {
|
||||
let workspace = workspace.downgrade();
|
||||
cx.spawn(|cx| {
|
||||
configure_disabled_globs(
|
||||
workspace,
|
||||
path_enabled.then_some(path.clone()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let globally_enabled = settings.copilot_enabled(None, None);
|
||||
menu.entry(
|
||||
if globally_enabled {
|
||||
"Hide Suggestions for All Files"
|
||||
} else {
|
||||
"Show Suggestions for All Files"
|
||||
},
|
||||
move |cx| toggle_copilot_globally(fs.clone(), cx),
|
||||
)
|
||||
.separator()
|
||||
.link(
|
||||
"Copilot Settings",
|
||||
OpenBrowser {
|
||||
url: COPILOT_SETTINGS_URL.to_string(),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
.action("Sign Out", SignOut.boxed_clone())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn update_enabled(&mut self, editor: View<Editor>, cx: &mut ViewContext<Self>) {
|
||||
let editor = editor.read(cx);
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let suggestion_anchor = editor.selections.newest_anchor().start;
|
||||
let language = snapshot.language_at(suggestion_anchor);
|
||||
let file = snapshot.file_at(suggestion_anchor).cloned();
|
||||
|
||||
self.editor_enabled = Some(
|
||||
all_language_settings(self.file.as_ref(), cx)
|
||||
.copilot_enabled(language, file.as_ref().map(|file| file.path().as_ref())),
|
||||
);
|
||||
self.language = language.cloned();
|
||||
self.file = file;
|
||||
|
||||
cx.notify()
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for CopilotButton {
|
||||
fn set_active_pane_item(&mut self, item: Option<&dyn ItemHandle>, cx: &mut ViewContext<Self>) {
|
||||
if let Some(editor) = item.map(|item| item.act_as::<Editor>(cx)).flatten() {
|
||||
self.editor_subscription = Some((
|
||||
cx.observe(&editor, Self::update_enabled),
|
||||
editor.entity_id().as_u64() as usize,
|
||||
));
|
||||
self.update_enabled(editor, cx);
|
||||
} else {
|
||||
self.language = None;
|
||||
self.editor_subscription = None;
|
||||
self.editor_enabled = None;
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
async fn configure_disabled_globs(
|
||||
workspace: WeakView<Workspace>,
|
||||
path_to_disable: Option<Arc<Path>>,
|
||||
mut cx: AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let settings_editor = workspace
|
||||
.update(&mut cx, |_, cx| {
|
||||
create_and_open_local_file(&paths::SETTINGS, cx, || {
|
||||
settings::initial_user_settings_content().as_ref().into()
|
||||
})
|
||||
})?
|
||||
.await?
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
settings_editor.downgrade().update(&mut cx, |item, cx| {
|
||||
let text = item.buffer().read(cx).snapshot(cx).text();
|
||||
|
||||
let settings = cx.global::<SettingsStore>();
|
||||
let edits = settings.edits_for_update::<AllLanguageSettings>(&text, |file| {
|
||||
let copilot = file.copilot.get_or_insert_with(Default::default);
|
||||
let globs = copilot.disabled_globs.get_or_insert_with(|| {
|
||||
settings
|
||||
.get::<AllLanguageSettings>(None)
|
||||
.copilot
|
||||
.disabled_globs
|
||||
.iter()
|
||||
.map(|glob| glob.glob().to_string())
|
||||
.collect()
|
||||
});
|
||||
|
||||
if let Some(path_to_disable) = &path_to_disable {
|
||||
globs.push(path_to_disable.to_string_lossy().into_owned());
|
||||
} else {
|
||||
globs.clear();
|
||||
}
|
||||
});
|
||||
|
||||
if !edits.is_empty() {
|
||||
item.change_selections(Some(Autoscroll::newest()), cx, |selections| {
|
||||
selections.select_ranges(edits.iter().map(|e| e.0.clone()));
|
||||
});
|
||||
|
||||
// When *enabling* a path, don't actually perform an edit, just select the range.
|
||||
if path_to_disable.is_some() {
|
||||
item.edit(edits.iter().cloned(), cx);
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
|
||||
fn toggle_copilot_globally(fs: Arc<dyn Fs>, cx: &mut AppContext) {
|
||||
let show_copilot_suggestions = all_language_settings(None, cx).copilot_enabled(None, None);
|
||||
update_settings_file::<AllLanguageSettings>(fs, cx, move |file| {
|
||||
file.defaults.show_copilot_suggestions = Some((!show_copilot_suggestions).into())
|
||||
});
|
||||
}
|
||||
|
||||
fn toggle_copilot_for_language(language: Arc<Language>, fs: Arc<dyn Fs>, cx: &mut AppContext) {
|
||||
let show_copilot_suggestions =
|
||||
all_language_settings(None, cx).copilot_enabled(Some(&language), None);
|
||||
update_settings_file::<AllLanguageSettings>(fs, cx, move |file| {
|
||||
file.languages
|
||||
.entry(language.name())
|
||||
.or_default()
|
||||
.show_copilot_suggestions = Some(!show_copilot_suggestions);
|
||||
});
|
||||
}
|
||||
|
||||
fn hide_copilot(fs: Arc<dyn Fs>, cx: &mut AppContext) {
|
||||
update_settings_file::<AllLanguageSettings>(fs, cx, move |file| {
|
||||
file.features.get_or_insert(Default::default()).copilot = Some(false);
|
||||
});
|
||||
}
|
||||
|
||||
fn initiate_sign_in(cx: &mut WindowContext) {
|
||||
let Some(copilot) = Copilot::global(cx) else {
|
||||
return;
|
||||
};
|
||||
let status = copilot.read(cx).status();
|
||||
|
||||
match status {
|
||||
Status::Starting { task } => {
|
||||
let Some(workspace) = cx.window_handle().downcast::<Workspace>() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Ok(workspace) = workspace.update(cx, |workspace, cx| {
|
||||
workspace.show_toast(
|
||||
Toast::new(COPILOT_STARTING_TOAST_ID, "Copilot is starting..."),
|
||||
cx,
|
||||
);
|
||||
workspace.weak_handle()
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
task.await;
|
||||
if let Some(copilot) = cx.update(|_, cx| Copilot::global(cx)).ok().flatten() {
|
||||
workspace
|
||||
.update(&mut cx, |workspace, cx| match copilot.read(cx).status() {
|
||||
Status::Authorized => workspace.show_toast(
|
||||
Toast::new(COPILOT_STARTING_TOAST_ID, "Copilot has started!"),
|
||||
cx,
|
||||
),
|
||||
_ => {
|
||||
workspace.dismiss_toast(COPILOT_STARTING_TOAST_ID, cx);
|
||||
copilot
|
||||
.update(cx, |copilot, cx| copilot.sign_in(cx))
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
_ => {
|
||||
copilot
|
||||
.update(cx, |copilot, cx| copilot.sign_in(cx))
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
}
|
@ -774,24 +774,39 @@ fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||
Arc::new(move |_| {
|
||||
h_stack()
|
||||
.id("diagnostic header")
|
||||
.gap_3()
|
||||
.bg(gpui::red())
|
||||
.map(|stack| {
|
||||
let icon = if diagnostic.severity == DiagnosticSeverity::ERROR {
|
||||
IconElement::new(Icon::XCircle).color(Color::Error)
|
||||
} else {
|
||||
IconElement::new(Icon::ExclamationTriangle).color(Color::Warning)
|
||||
};
|
||||
|
||||
stack.child(div().pl_8().child(icon))
|
||||
})
|
||||
.when_some(diagnostic.source.as_ref(), |stack, source| {
|
||||
stack.child(Label::new(format!("{source}:")).color(Color::Accent))
|
||||
})
|
||||
.child(HighlightedLabel::new(message.clone(), highlights.clone()))
|
||||
.when_some(diagnostic.code.as_ref(), |stack, code| {
|
||||
stack.child(Label::new(code.clone()))
|
||||
})
|
||||
.py_2()
|
||||
.pl_10()
|
||||
.pr_5()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_stack()
|
||||
.gap_3()
|
||||
.map(|stack| {
|
||||
let icon = if diagnostic.severity == DiagnosticSeverity::ERROR {
|
||||
IconElement::new(Icon::XCircle).color(Color::Error)
|
||||
} else {
|
||||
IconElement::new(Icon::ExclamationTriangle).color(Color::Warning)
|
||||
};
|
||||
stack.child(icon)
|
||||
})
|
||||
.child(
|
||||
h_stack()
|
||||
.gap_1()
|
||||
.child(HighlightedLabel::new(message.clone(), highlights.clone()))
|
||||
.when_some(diagnostic.code.as_ref(), |stack, code| {
|
||||
stack.child(Label::new(format!("({code})")).color(Color::Muted))
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_stack()
|
||||
.gap_1()
|
||||
.when_some(diagnostic.source.as_ref(), |stack, source| {
|
||||
stack.child(Label::new(format!("{source}")).color(Color::Muted))
|
||||
}),
|
||||
)
|
||||
.into_any_element()
|
||||
})
|
||||
}
|
||||
@ -802,11 +817,22 @@ pub(crate) fn render_summary(summary: &DiagnosticSummary) -> AnyElement {
|
||||
label.into_any_element()
|
||||
} else {
|
||||
h_stack()
|
||||
.bg(gpui::red())
|
||||
.child(IconElement::new(Icon::XCircle))
|
||||
.child(Label::new(summary.error_count.to_string()))
|
||||
.child(IconElement::new(Icon::ExclamationTriangle))
|
||||
.child(Label::new(summary.warning_count.to_string()))
|
||||
.gap_1()
|
||||
.when(summary.error_count > 0, |then| {
|
||||
then.child(
|
||||
h_stack()
|
||||
.gap_1()
|
||||
.child(IconElement::new(Icon::XCircle).color(Color::Error))
|
||||
.child(Label::new(summary.error_count.to_string())),
|
||||
)
|
||||
})
|
||||
.when(summary.warning_count > 0, |then| {
|
||||
then.child(
|
||||
h_stack()
|
||||
.child(IconElement::new(Icon::ExclamationTriangle).color(Color::Warning))
|
||||
.child(Label::new(summary.warning_count.to_string())),
|
||||
)
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -741,49 +741,48 @@ impl WrapSnapshot {
|
||||
}
|
||||
|
||||
fn check_invariants(&self) {
|
||||
// todo!()
|
||||
// #[cfg(test)]
|
||||
// {
|
||||
// assert_eq!(
|
||||
// TabPoint::from(self.transforms.summary().input.lines),
|
||||
// self.tab_snapshot.max_point()
|
||||
// );
|
||||
#[cfg(test)]
|
||||
{
|
||||
assert_eq!(
|
||||
TabPoint::from(self.transforms.summary().input.lines),
|
||||
self.tab_snapshot.max_point()
|
||||
);
|
||||
|
||||
// {
|
||||
// let mut transforms = self.transforms.cursor::<()>().peekable();
|
||||
// while let Some(transform) = transforms.next() {
|
||||
// if let Some(next_transform) = transforms.peek() {
|
||||
// assert!(transform.is_isomorphic() != next_transform.is_isomorphic());
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
{
|
||||
let mut transforms = self.transforms.cursor::<()>().peekable();
|
||||
while let Some(transform) = transforms.next() {
|
||||
if let Some(next_transform) = transforms.peek() {
|
||||
assert!(transform.is_isomorphic() != next_transform.is_isomorphic());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// let text = language::Rope::from(self.text().as_str());
|
||||
// let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
|
||||
// let mut expected_buffer_rows = Vec::new();
|
||||
// let mut prev_tab_row = 0;
|
||||
// for display_row in 0..=self.max_point().row() {
|
||||
// let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
|
||||
// if tab_point.row() == prev_tab_row && display_row != 0 {
|
||||
// expected_buffer_rows.push(None);
|
||||
// } else {
|
||||
// expected_buffer_rows.push(input_buffer_rows.next().unwrap());
|
||||
// }
|
||||
let text = language::Rope::from(self.text().as_str());
|
||||
let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
|
||||
let mut expected_buffer_rows = Vec::new();
|
||||
let mut prev_tab_row = 0;
|
||||
for display_row in 0..=self.max_point().row() {
|
||||
let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
|
||||
if tab_point.row() == prev_tab_row && display_row != 0 {
|
||||
expected_buffer_rows.push(None);
|
||||
} else {
|
||||
expected_buffer_rows.push(input_buffer_rows.next().unwrap());
|
||||
}
|
||||
|
||||
// prev_tab_row = tab_point.row();
|
||||
// assert_eq!(self.line_len(display_row), text.line_len(display_row));
|
||||
// }
|
||||
prev_tab_row = tab_point.row();
|
||||
assert_eq!(self.line_len(display_row), text.line_len(display_row));
|
||||
}
|
||||
|
||||
// for start_display_row in 0..expected_buffer_rows.len() {
|
||||
// assert_eq!(
|
||||
// self.buffer_rows(start_display_row as u32)
|
||||
// .collect::<Vec<_>>(),
|
||||
// &expected_buffer_rows[start_display_row..],
|
||||
// "invalid buffer_rows({}..)",
|
||||
// start_display_row
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
for start_display_row in 0..expected_buffer_rows.len() {
|
||||
assert_eq!(
|
||||
self.buffer_rows(start_display_row as u32)
|
||||
.collect::<Vec<_>>(),
|
||||
&expected_buffer_rows[start_display_row..],
|
||||
"invalid buffer_rows({}..)",
|
||||
start_display_row
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1026,337 +1025,334 @@ fn consolidate_wrap_edits(edits: &mut Vec<WrapEdit>) {
|
||||
}
|
||||
}
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use crate::{
|
||||
// display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
|
||||
// MultiBuffer,
|
||||
// };
|
||||
// use gpui::test::observe;
|
||||
// use rand::prelude::*;
|
||||
// use settings::SettingsStore;
|
||||
// use smol::stream::StreamExt;
|
||||
// use std::{cmp, env, num::NonZeroU32};
|
||||
// use text::Rope;
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
|
||||
MultiBuffer,
|
||||
};
|
||||
use gpui::{font, px, test::observe, Platform};
|
||||
use rand::prelude::*;
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
use std::{cmp, env, num::NonZeroU32};
|
||||
use text::Rope;
|
||||
use theme::LoadThemes;
|
||||
|
||||
// #[gpui::test(iterations = 100)]
|
||||
// async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
|
||||
// init_test(cx);
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
|
||||
// todo!() this test is flaky
|
||||
init_test(cx);
|
||||
|
||||
// cx.foreground().set_block_on_ticks(0..=50);
|
||||
// let operations = env::var("OPERATIONS")
|
||||
// .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
// .unwrap_or(10);
|
||||
cx.background_executor.set_block_on_ticks(0..=50);
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
// let font_cache = cx.font_cache().clone();
|
||||
// let font_system = cx.platform().fonts();
|
||||
// let mut wrap_width = if rng.gen_bool(0.1) {
|
||||
// None
|
||||
// } else {
|
||||
// Some(rng.gen_range(0.0..=1000.0))
|
||||
// };
|
||||
// let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
|
||||
// let family_id = font_cache
|
||||
// .load_family(&["Helvetica"], &Default::default())
|
||||
// .unwrap();
|
||||
// let font_id = font_cache
|
||||
// .select_font(family_id, &Default::default())
|
||||
// .unwrap();
|
||||
// let font_size = 14.0;
|
||||
let text_system = cx.read(|cx| cx.text_system().clone());
|
||||
let mut wrap_width = if rng.gen_bool(0.1) {
|
||||
None
|
||||
} else {
|
||||
Some(px(rng.gen_range(0.0..=1000.0)))
|
||||
};
|
||||
let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
|
||||
let font = font("Helvetica");
|
||||
let font_id = text_system.font_id(&font).unwrap();
|
||||
let font_size = px(14.0);
|
||||
|
||||
// log::info!("Tab size: {}", tab_size);
|
||||
// log::info!("Wrap width: {:?}", wrap_width);
|
||||
log::info!("Tab size: {}", tab_size);
|
||||
log::info!("Wrap width: {:?}", wrap_width);
|
||||
|
||||
// let buffer = cx.update(|cx| {
|
||||
// if rng.gen() {
|
||||
// MultiBuffer::build_random(&mut rng, cx)
|
||||
// } else {
|
||||
// let len = rng.gen_range(0..10);
|
||||
// let text = util::RandomCharIter::new(&mut rng)
|
||||
// .take(len)
|
||||
// .collect::<String>();
|
||||
// MultiBuffer::build_simple(&text, cx)
|
||||
// }
|
||||
// });
|
||||
// let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
// log::info!("Buffer text: {:?}", buffer_snapshot.text());
|
||||
// let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
// log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
// let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
|
||||
// log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
// let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
|
||||
// let tabs_snapshot = tab_map.set_max_expansion_column(32);
|
||||
// log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
let buffer = cx.update(|cx| {
|
||||
if rng.gen() {
|
||||
MultiBuffer::build_random(&mut rng, cx)
|
||||
} else {
|
||||
let len = rng.gen_range(0..10);
|
||||
let text = util::RandomCharIter::new(&mut rng)
|
||||
.take(len)
|
||||
.collect::<String>();
|
||||
MultiBuffer::build_simple(&text, cx)
|
||||
}
|
||||
});
|
||||
let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
|
||||
log::info!("Buffer text: {:?}", buffer_snapshot.text());
|
||||
let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
|
||||
log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
|
||||
let tabs_snapshot = tab_map.set_max_expansion_column(32);
|
||||
log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
|
||||
// let mut line_wrapper = LineWrapper::new(font_id, font_size, font_system);
|
||||
// let unwrapped_text = tabs_snapshot.text();
|
||||
// let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size).unwrap();
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
|
||||
// let (wrap_map, _) =
|
||||
// cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font_id, font_size, wrap_width, cx));
|
||||
// let mut notifications = observe(&wrap_map, cx);
|
||||
let (wrap_map, _) =
|
||||
cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font, font_size, wrap_width, cx));
|
||||
let mut notifications = observe(&wrap_map, cx);
|
||||
|
||||
// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
// notifications.next().await.unwrap();
|
||||
// }
|
||||
if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.next().await.unwrap();
|
||||
}
|
||||
|
||||
// let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| {
|
||||
// assert!(!map.is_rewrapping());
|
||||
// map.sync(tabs_snapshot.clone(), Vec::new(), cx)
|
||||
// });
|
||||
let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| {
|
||||
assert!(!map.is_rewrapping());
|
||||
map.sync(tabs_snapshot.clone(), Vec::new(), cx)
|
||||
});
|
||||
|
||||
// let actual_text = initial_snapshot.text();
|
||||
// assert_eq!(
|
||||
// actual_text, expected_text,
|
||||
// "unwrapped text is: {:?}",
|
||||
// unwrapped_text
|
||||
// );
|
||||
// log::info!("Wrapped text: {:?}", actual_text);
|
||||
let actual_text = initial_snapshot.text();
|
||||
assert_eq!(
|
||||
actual_text, expected_text,
|
||||
"unwrapped text is: {:?}",
|
||||
unwrapped_text
|
||||
);
|
||||
log::info!("Wrapped text: {:?}", actual_text);
|
||||
|
||||
// let mut next_inlay_id = 0;
|
||||
// let mut edits = Vec::new();
|
||||
// for _i in 0..operations {
|
||||
// log::info!("{} ==============================================", _i);
|
||||
let mut next_inlay_id = 0;
|
||||
let mut edits = Vec::new();
|
||||
for _i in 0..operations {
|
||||
log::info!("{} ==============================================", _i);
|
||||
|
||||
// let mut buffer_edits = Vec::new();
|
||||
// match rng.gen_range(0..=100) {
|
||||
// 0..=19 => {
|
||||
// wrap_width = if rng.gen_bool(0.2) {
|
||||
// None
|
||||
// } else {
|
||||
// Some(rng.gen_range(0.0..=1000.0))
|
||||
// };
|
||||
// log::info!("Setting wrap width to {:?}", wrap_width);
|
||||
// wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
|
||||
// }
|
||||
// 20..=39 => {
|
||||
// for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
|
||||
// let (tabs_snapshot, tab_edits) =
|
||||
// tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
// let (mut snapshot, wrap_edits) =
|
||||
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
// snapshot.check_invariants();
|
||||
// snapshot.verify_chunks(&mut rng);
|
||||
// edits.push((snapshot, wrap_edits));
|
||||
// }
|
||||
// }
|
||||
// 40..=59 => {
|
||||
// let (inlay_snapshot, inlay_edits) =
|
||||
// inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
|
||||
// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
// let (tabs_snapshot, tab_edits) =
|
||||
// tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
// let (mut snapshot, wrap_edits) =
|
||||
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
// snapshot.check_invariants();
|
||||
// snapshot.verify_chunks(&mut rng);
|
||||
// edits.push((snapshot, wrap_edits));
|
||||
// }
|
||||
// _ => {
|
||||
// buffer.update(cx, |buffer, cx| {
|
||||
// let subscription = buffer.subscribe();
|
||||
// let edit_count = rng.gen_range(1..=5);
|
||||
// buffer.randomly_mutate(&mut rng, edit_count, cx);
|
||||
// buffer_snapshot = buffer.snapshot(cx);
|
||||
// buffer_edits.extend(subscription.consume());
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
let mut buffer_edits = Vec::new();
|
||||
match rng.gen_range(0..=100) {
|
||||
0..=19 => {
|
||||
wrap_width = if rng.gen_bool(0.2) {
|
||||
None
|
||||
} else {
|
||||
Some(px(rng.gen_range(0.0..=1000.0)))
|
||||
};
|
||||
log::info!("Setting wrap width to {:?}", wrap_width);
|
||||
wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
|
||||
}
|
||||
20..=39 => {
|
||||
for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
|
||||
let (tabs_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (mut snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
snapshot.check_invariants();
|
||||
snapshot.verify_chunks(&mut rng);
|
||||
edits.push((snapshot, wrap_edits));
|
||||
}
|
||||
}
|
||||
40..=59 => {
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
let (tabs_snapshot, tab_edits) =
|
||||
tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
let (mut snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
|
||||
snapshot.check_invariants();
|
||||
snapshot.verify_chunks(&mut rng);
|
||||
edits.push((snapshot, wrap_edits));
|
||||
}
|
||||
_ => {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let subscription = buffer.subscribe();
|
||||
let edit_count = rng.gen_range(1..=5);
|
||||
buffer.randomly_mutate(&mut rng, edit_count, cx);
|
||||
buffer_snapshot = buffer.snapshot(cx);
|
||||
buffer_edits.extend(subscription.consume());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// log::info!("Buffer text: {:?}", buffer_snapshot.text());
|
||||
// let (inlay_snapshot, inlay_edits) =
|
||||
// inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
|
||||
// log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
// log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
// let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
// log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
log::info!("Buffer text: {:?}", buffer_snapshot.text());
|
||||
let (inlay_snapshot, inlay_edits) =
|
||||
inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
|
||||
log::info!("InlayMap text: {:?}", inlay_snapshot.text());
|
||||
let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
|
||||
log::info!("FoldMap text: {:?}", fold_snapshot.text());
|
||||
let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
|
||||
log::info!("TabMap text: {:?}", tabs_snapshot.text());
|
||||
|
||||
// let unwrapped_text = tabs_snapshot.text();
|
||||
// let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
// let (mut snapshot, wrap_edits) =
|
||||
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx));
|
||||
// snapshot.check_invariants();
|
||||
// snapshot.verify_chunks(&mut rng);
|
||||
// edits.push((snapshot, wrap_edits));
|
||||
let unwrapped_text = tabs_snapshot.text();
|
||||
let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
|
||||
let (mut snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx));
|
||||
snapshot.check_invariants();
|
||||
snapshot.verify_chunks(&mut rng);
|
||||
edits.push((snapshot, wrap_edits));
|
||||
|
||||
// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
|
||||
// log::info!("Waiting for wrapping to finish");
|
||||
// while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
// notifications.next().await.unwrap();
|
||||
// }
|
||||
// wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
|
||||
// }
|
||||
if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
|
||||
log::info!("Waiting for wrapping to finish");
|
||||
while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.next().await.unwrap();
|
||||
}
|
||||
wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
|
||||
}
|
||||
|
||||
// if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
// let (mut wrapped_snapshot, wrap_edits) =
|
||||
// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
|
||||
// let actual_text = wrapped_snapshot.text();
|
||||
// let actual_longest_row = wrapped_snapshot.longest_row();
|
||||
// log::info!("Wrapping finished: {:?}", actual_text);
|
||||
// wrapped_snapshot.check_invariants();
|
||||
// wrapped_snapshot.verify_chunks(&mut rng);
|
||||
// edits.push((wrapped_snapshot.clone(), wrap_edits));
|
||||
// assert_eq!(
|
||||
// actual_text, expected_text,
|
||||
// "unwrapped text is: {:?}",
|
||||
// unwrapped_text
|
||||
// );
|
||||
if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
let (mut wrapped_snapshot, wrap_edits) =
|
||||
wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
|
||||
let actual_text = wrapped_snapshot.text();
|
||||
let actual_longest_row = wrapped_snapshot.longest_row();
|
||||
log::info!("Wrapping finished: {:?}", actual_text);
|
||||
wrapped_snapshot.check_invariants();
|
||||
wrapped_snapshot.verify_chunks(&mut rng);
|
||||
edits.push((wrapped_snapshot.clone(), wrap_edits));
|
||||
assert_eq!(
|
||||
actual_text, expected_text,
|
||||
"unwrapped text is: {:?}",
|
||||
unwrapped_text
|
||||
);
|
||||
|
||||
// let mut summary = TextSummary::default();
|
||||
// for (ix, item) in wrapped_snapshot
|
||||
// .transforms
|
||||
// .items(&())
|
||||
// .into_iter()
|
||||
// .enumerate()
|
||||
// {
|
||||
// summary += &item.summary.output;
|
||||
// log::info!("{} summary: {:?}", ix, item.summary.output,);
|
||||
// }
|
||||
let mut summary = TextSummary::default();
|
||||
for (ix, item) in wrapped_snapshot
|
||||
.transforms
|
||||
.items(&())
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
{
|
||||
summary += &item.summary.output;
|
||||
log::info!("{} summary: {:?}", ix, item.summary.output,);
|
||||
}
|
||||
|
||||
// if tab_size.get() == 1
|
||||
// || !wrapped_snapshot
|
||||
// .tab_snapshot
|
||||
// .fold_snapshot
|
||||
// .text()
|
||||
// .contains('\t')
|
||||
// {
|
||||
// let mut expected_longest_rows = Vec::new();
|
||||
// let mut longest_line_len = -1;
|
||||
// for (row, line) in expected_text.split('\n').enumerate() {
|
||||
// let line_char_count = line.chars().count() as isize;
|
||||
// if line_char_count > longest_line_len {
|
||||
// expected_longest_rows.clear();
|
||||
// longest_line_len = line_char_count;
|
||||
// }
|
||||
// if line_char_count >= longest_line_len {
|
||||
// expected_longest_rows.push(row as u32);
|
||||
// }
|
||||
// }
|
||||
if tab_size.get() == 1
|
||||
|| !wrapped_snapshot
|
||||
.tab_snapshot
|
||||
.fold_snapshot
|
||||
.text()
|
||||
.contains('\t')
|
||||
{
|
||||
let mut expected_longest_rows = Vec::new();
|
||||
let mut longest_line_len = -1;
|
||||
for (row, line) in expected_text.split('\n').enumerate() {
|
||||
let line_char_count = line.chars().count() as isize;
|
||||
if line_char_count > longest_line_len {
|
||||
expected_longest_rows.clear();
|
||||
longest_line_len = line_char_count;
|
||||
}
|
||||
if line_char_count >= longest_line_len {
|
||||
expected_longest_rows.push(row as u32);
|
||||
}
|
||||
}
|
||||
|
||||
// assert!(
|
||||
// expected_longest_rows.contains(&actual_longest_row),
|
||||
// "incorrect longest row {}. expected {:?} with length {}",
|
||||
// actual_longest_row,
|
||||
// expected_longest_rows,
|
||||
// longest_line_len,
|
||||
// )
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
assert!(
|
||||
expected_longest_rows.contains(&actual_longest_row),
|
||||
"incorrect longest row {}. expected {:?} with length {}",
|
||||
actual_longest_row,
|
||||
expected_longest_rows,
|
||||
longest_line_len,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// let mut initial_text = Rope::from(initial_snapshot.text().as_str());
|
||||
// for (snapshot, patch) in edits {
|
||||
// let snapshot_text = Rope::from(snapshot.text().as_str());
|
||||
// for edit in &patch {
|
||||
// let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
|
||||
// let old_end = initial_text.point_to_offset(cmp::min(
|
||||
// Point::new(edit.new.start + edit.old.len() as u32, 0),
|
||||
// initial_text.max_point(),
|
||||
// ));
|
||||
// let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0));
|
||||
// let new_end = snapshot_text.point_to_offset(cmp::min(
|
||||
// Point::new(edit.new.end, 0),
|
||||
// snapshot_text.max_point(),
|
||||
// ));
|
||||
// let new_text = snapshot_text
|
||||
// .chunks_in_range(new_start..new_end)
|
||||
// .collect::<String>();
|
||||
let mut initial_text = Rope::from(initial_snapshot.text().as_str());
|
||||
for (snapshot, patch) in edits {
|
||||
let snapshot_text = Rope::from(snapshot.text().as_str());
|
||||
for edit in &patch {
|
||||
let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
|
||||
let old_end = initial_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.new.start + edit.old.len() as u32, 0),
|
||||
initial_text.max_point(),
|
||||
));
|
||||
let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0));
|
||||
let new_end = snapshot_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.new.end, 0),
|
||||
snapshot_text.max_point(),
|
||||
));
|
||||
let new_text = snapshot_text
|
||||
.chunks_in_range(new_start..new_end)
|
||||
.collect::<String>();
|
||||
|
||||
// initial_text.replace(old_start..old_end, &new_text);
|
||||
// }
|
||||
// assert_eq!(initial_text.to_string(), snapshot_text.to_string());
|
||||
// }
|
||||
initial_text.replace(old_start..old_end, &new_text);
|
||||
}
|
||||
assert_eq!(initial_text.to_string(), snapshot_text.to_string());
|
||||
}
|
||||
|
||||
// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
// log::info!("Waiting for wrapping to finish");
|
||||
// while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
// notifications.next().await.unwrap();
|
||||
// }
|
||||
// }
|
||||
// wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
|
||||
// }
|
||||
if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
log::info!("Waiting for wrapping to finish");
|
||||
while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
|
||||
notifications.next().await.unwrap();
|
||||
}
|
||||
}
|
||||
wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
|
||||
}
|
||||
|
||||
// fn init_test(cx: &mut gpui::TestAppContext) {
|
||||
// cx.foreground().forbid_parking();
|
||||
// cx.update(|cx| {
|
||||
// cx.set_global(SettingsStore::test(cx));
|
||||
// theme::init((), cx);
|
||||
// });
|
||||
// }
|
||||
fn init_test(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
theme::init(LoadThemes::JustBase, cx);
|
||||
});
|
||||
}
|
||||
|
||||
// fn wrap_text(
|
||||
// unwrapped_text: &str,
|
||||
// wrap_width: Option<f32>,
|
||||
// line_wrapper: &mut LineWrapper,
|
||||
// ) -> String {
|
||||
// if let Some(wrap_width) = wrap_width {
|
||||
// let mut wrapped_text = String::new();
|
||||
// for (row, line) in unwrapped_text.split('\n').enumerate() {
|
||||
// if row > 0 {
|
||||
// wrapped_text.push('\n')
|
||||
// }
|
||||
fn wrap_text(
|
||||
unwrapped_text: &str,
|
||||
wrap_width: Option<Pixels>,
|
||||
line_wrapper: &mut LineWrapper,
|
||||
) -> String {
|
||||
if let Some(wrap_width) = wrap_width {
|
||||
let mut wrapped_text = String::new();
|
||||
for (row, line) in unwrapped_text.split('\n').enumerate() {
|
||||
if row > 0 {
|
||||
wrapped_text.push('\n')
|
||||
}
|
||||
|
||||
// let mut prev_ix = 0;
|
||||
// for boundary in line_wrapper.wrap_line(line, wrap_width) {
|
||||
// wrapped_text.push_str(&line[prev_ix..boundary.ix]);
|
||||
// wrapped_text.push('\n');
|
||||
// wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize));
|
||||
// prev_ix = boundary.ix;
|
||||
// }
|
||||
// wrapped_text.push_str(&line[prev_ix..]);
|
||||
// }
|
||||
// wrapped_text
|
||||
// } else {
|
||||
// unwrapped_text.to_string()
|
||||
// }
|
||||
// }
|
||||
let mut prev_ix = 0;
|
||||
for boundary in line_wrapper.wrap_line(line, wrap_width) {
|
||||
wrapped_text.push_str(&line[prev_ix..boundary.ix]);
|
||||
wrapped_text.push('\n');
|
||||
wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize));
|
||||
prev_ix = boundary.ix;
|
||||
}
|
||||
wrapped_text.push_str(&line[prev_ix..]);
|
||||
}
|
||||
wrapped_text
|
||||
} else {
|
||||
unwrapped_text.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
// impl WrapSnapshot {
|
||||
// pub fn text(&self) -> String {
|
||||
// self.text_chunks(0).collect()
|
||||
// }
|
||||
impl WrapSnapshot {
|
||||
pub fn text(&self) -> String {
|
||||
self.text_chunks(0).collect()
|
||||
}
|
||||
|
||||
// pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
|
||||
// self.chunks(
|
||||
// wrap_row..self.max_point().row() + 1,
|
||||
// false,
|
||||
// Highlights::default(),
|
||||
// )
|
||||
// .map(|h| h.text)
|
||||
// }
|
||||
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
|
||||
self.chunks(
|
||||
wrap_row..self.max_point().row() + 1,
|
||||
false,
|
||||
Highlights::default(),
|
||||
)
|
||||
.map(|h| h.text)
|
||||
}
|
||||
|
||||
// fn verify_chunks(&mut self, rng: &mut impl Rng) {
|
||||
// for _ in 0..5 {
|
||||
// let mut end_row = rng.gen_range(0..=self.max_point().row());
|
||||
// let start_row = rng.gen_range(0..=end_row);
|
||||
// end_row += 1;
|
||||
fn verify_chunks(&mut self, rng: &mut impl Rng) {
|
||||
for _ in 0..5 {
|
||||
let mut end_row = rng.gen_range(0..=self.max_point().row());
|
||||
let start_row = rng.gen_range(0..=end_row);
|
||||
end_row += 1;
|
||||
|
||||
// let mut expected_text = self.text_chunks(start_row).collect::<String>();
|
||||
// if expected_text.ends_with('\n') {
|
||||
// expected_text.push('\n');
|
||||
// }
|
||||
// let mut expected_text = expected_text
|
||||
// .lines()
|
||||
// .take((end_row - start_row) as usize)
|
||||
// .collect::<Vec<_>>()
|
||||
// .join("\n");
|
||||
// if end_row <= self.max_point().row() {
|
||||
// expected_text.push('\n');
|
||||
// }
|
||||
let mut expected_text = self.text_chunks(start_row).collect::<String>();
|
||||
if expected_text.ends_with('\n') {
|
||||
expected_text.push('\n');
|
||||
}
|
||||
let mut expected_text = expected_text
|
||||
.lines()
|
||||
.take((end_row - start_row) as usize)
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
if end_row <= self.max_point().row() {
|
||||
expected_text.push('\n');
|
||||
}
|
||||
|
||||
// let actual_text = self
|
||||
// .chunks(start_row..end_row, true, Highlights::default())
|
||||
// .map(|c| c.text)
|
||||
// .collect::<String>();
|
||||
// assert_eq!(
|
||||
// expected_text,
|
||||
// actual_text,
|
||||
// "chunks != highlighted_chunks for rows {:?}",
|
||||
// start_row..end_row
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
let actual_text = self
|
||||
.chunks(start_row..end_row, true, Highlights::default())
|
||||
.map(|c| c.text)
|
||||
.collect::<String>();
|
||||
assert_eq!(
|
||||
expected_text,
|
||||
actual_text,
|
||||
"chunks != highlighted_chunks for rows {:?}",
|
||||
start_row..end_row
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -100,8 +100,10 @@ use text::{OffsetUtf16, Rope};
|
||||
use theme::{
|
||||
ActiveTheme, DiagnosticStyle, PlayerColor, SyntaxTheme, Theme, ThemeColors, ThemeSettings,
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use ui::{h_stack, v_stack, HighlightedLabel, IconButton, Popover, Tooltip};
|
||||
use ui::{
|
||||
h_stack, v_stack, ButtonSize, ButtonStyle, HighlightedLabel, Icon, IconButton, Popover, Tooltip,
|
||||
};
|
||||
use ui::{prelude::*, IconSize};
|
||||
use util::{post_inc, RangeExt, ResultExt, TryFutureExt};
|
||||
use workspace::{
|
||||
item::{ItemEvent, ItemHandle},
|
||||
@ -1920,14 +1922,14 @@ impl Editor {
|
||||
// self.buffer.read(cx).read(cx).file_at(point).cloned()
|
||||
// }
|
||||
|
||||
// pub fn active_excerpt(
|
||||
// &self,
|
||||
// cx: &AppContext,
|
||||
// ) -> Option<(ExcerptId, Model<Buffer>, Range<text::Anchor>)> {
|
||||
// self.buffer
|
||||
// .read(cx)
|
||||
// .excerpt_containing(self.selections.newest_anchor().head(), cx)
|
||||
// }
|
||||
pub fn active_excerpt(
|
||||
&self,
|
||||
cx: &AppContext,
|
||||
) -> Option<(ExcerptId, Model<Buffer>, Range<text::Anchor>)> {
|
||||
self.buffer
|
||||
.read(cx)
|
||||
.excerpt_containing(self.selections.newest_anchor().head(), cx)
|
||||
}
|
||||
|
||||
// pub fn style(&self, cx: &AppContext) -> EditorStyle {
|
||||
// build_style(
|
||||
@ -3484,7 +3486,7 @@ impl Editor {
|
||||
drop(context_menu);
|
||||
this.discard_copilot_suggestion(cx);
|
||||
cx.notify();
|
||||
} else if this.completion_tasks.is_empty() {
|
||||
} else if this.completion_tasks.len() <= 1 {
|
||||
// If there are no more completion tasks and the last menu was
|
||||
// empty, we should hide it. If it was already hidden, we should
|
||||
// also show the copilot suggestion when available.
|
||||
@ -8238,6 +8240,11 @@ impl Editor {
|
||||
self.style = Some(style);
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn style(&self) -> Option<&EditorStyle> {
|
||||
self.style.as_ref()
|
||||
}
|
||||
|
||||
pub fn set_wrap_width(&self, width: Option<Pixels>, cx: &mut AppContext) -> bool {
|
||||
self.display_map
|
||||
.update(cx, |map, cx| map.set_wrap_width(width, cx))
|
||||
@ -9689,20 +9696,42 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, is_valid: bool) -> Rend
|
||||
let message = diagnostic.message;
|
||||
Arc::new(move |cx: &mut BlockContext| {
|
||||
let message = message.clone();
|
||||
let copy_id: SharedString = format!("copy-{}", cx.block_id.clone()).to_string().into();
|
||||
let write_to_clipboard = cx.write_to_clipboard(ClipboardItem::new(message.clone()));
|
||||
|
||||
// TODO: Nate: We should tint the background of the block with the severity color
|
||||
// We need to extend the theme before we can do this
|
||||
v_stack()
|
||||
.id(cx.block_id)
|
||||
.relative()
|
||||
.size_full()
|
||||
.bg(gpui::red())
|
||||
.children(highlighted_lines.iter().map(|(line, highlights)| {
|
||||
div()
|
||||
let group_id = cx.block_id.to_string();
|
||||
|
||||
h_stack()
|
||||
.group(group_id.clone())
|
||||
.gap_2()
|
||||
.absolute()
|
||||
.left(cx.anchor_x)
|
||||
.px_1p5()
|
||||
.child(HighlightedLabel::new(line.clone(), highlights.clone()))
|
||||
.ml(cx.anchor_x)
|
||||
.child(
|
||||
div()
|
||||
.border()
|
||||
.border_color(gpui::red())
|
||||
.invisible()
|
||||
.group_hover(group_id, |style| style.visible())
|
||||
.child(
|
||||
IconButton::new(copy_id.clone(), Icon::Copy)
|
||||
.icon_color(Color::Muted)
|
||||
.size(ButtonSize::Compact)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(cx.listener(move |_, _, cx| write_to_clipboard))
|
||||
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)),
|
||||
),
|
||||
)
|
||||
}))
|
||||
.cursor_pointer()
|
||||
.on_click(cx.listener(move |_, _, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new(message.clone()));
|
||||
}))
|
||||
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx))
|
||||
.into_any_element()
|
||||
})
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -88,195 +88,195 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
|
||||
}
|
||||
}
|
||||
|
||||
// #[cfg(any(test, feature = "test_support"))]
|
||||
// mod tests {
|
||||
// // use crate::editor_tests::init_test;
|
||||
// use crate::Point;
|
||||
// use gpui::TestAppContext;
|
||||
// use multi_buffer::{ExcerptRange, MultiBuffer};
|
||||
// use project::{FakeFs, Project};
|
||||
// use unindent::Unindent;
|
||||
// #[gpui::test]
|
||||
// async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
|
||||
// use git::diff::DiffHunkStatus;
|
||||
// init_test(cx, |_| {});
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::editor_tests::init_test;
|
||||
use crate::Point;
|
||||
use gpui::{Context, TestAppContext};
|
||||
use multi_buffer::{ExcerptRange, MultiBuffer};
|
||||
use project::{FakeFs, Project};
|
||||
use unindent::Unindent;
|
||||
#[gpui::test]
|
||||
async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
|
||||
use git::diff::DiffHunkStatus;
|
||||
init_test(cx, |_| {});
|
||||
|
||||
// let fs = FakeFs::new(cx.background());
|
||||
// let project = Project::test(fs, [], cx).await;
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
|
||||
// // buffer has two modified hunks with two rows each
|
||||
// let buffer_1 = project
|
||||
// .update(cx, |project, cx| {
|
||||
// project.create_buffer(
|
||||
// "
|
||||
// 1.zero
|
||||
// 1.ONE
|
||||
// 1.TWO
|
||||
// 1.three
|
||||
// 1.FOUR
|
||||
// 1.FIVE
|
||||
// 1.six
|
||||
// "
|
||||
// .unindent()
|
||||
// .as_str(),
|
||||
// None,
|
||||
// cx,
|
||||
// )
|
||||
// })
|
||||
// .unwrap();
|
||||
// buffer_1.update(cx, |buffer, cx| {
|
||||
// buffer.set_diff_base(
|
||||
// Some(
|
||||
// "
|
||||
// 1.zero
|
||||
// 1.one
|
||||
// 1.two
|
||||
// 1.three
|
||||
// 1.four
|
||||
// 1.five
|
||||
// 1.six
|
||||
// "
|
||||
// .unindent(),
|
||||
// ),
|
||||
// cx,
|
||||
// );
|
||||
// });
|
||||
// buffer has two modified hunks with two rows each
|
||||
let buffer_1 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_buffer(
|
||||
"
|
||||
1.zero
|
||||
1.ONE
|
||||
1.TWO
|
||||
1.three
|
||||
1.FOUR
|
||||
1.FIVE
|
||||
1.six
|
||||
"
|
||||
.unindent()
|
||||
.as_str(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
buffer_1.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(
|
||||
Some(
|
||||
"
|
||||
1.zero
|
||||
1.one
|
||||
1.two
|
||||
1.three
|
||||
1.four
|
||||
1.five
|
||||
1.six
|
||||
"
|
||||
.unindent(),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// // buffer has a deletion hunk and an insertion hunk
|
||||
// let buffer_2 = project
|
||||
// .update(cx, |project, cx| {
|
||||
// project.create_buffer(
|
||||
// "
|
||||
// 2.zero
|
||||
// 2.one
|
||||
// 2.two
|
||||
// 2.three
|
||||
// 2.four
|
||||
// 2.five
|
||||
// 2.six
|
||||
// "
|
||||
// .unindent()
|
||||
// .as_str(),
|
||||
// None,
|
||||
// cx,
|
||||
// )
|
||||
// })
|
||||
// .unwrap();
|
||||
// buffer_2.update(cx, |buffer, cx| {
|
||||
// buffer.set_diff_base(
|
||||
// Some(
|
||||
// "
|
||||
// 2.zero
|
||||
// 2.one
|
||||
// 2.one-and-a-half
|
||||
// 2.two
|
||||
// 2.three
|
||||
// 2.four
|
||||
// 2.six
|
||||
// "
|
||||
// .unindent(),
|
||||
// ),
|
||||
// cx,
|
||||
// );
|
||||
// });
|
||||
// buffer has a deletion hunk and an insertion hunk
|
||||
let buffer_2 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_buffer(
|
||||
"
|
||||
2.zero
|
||||
2.one
|
||||
2.two
|
||||
2.three
|
||||
2.four
|
||||
2.five
|
||||
2.six
|
||||
"
|
||||
.unindent()
|
||||
.as_str(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
buffer_2.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(
|
||||
Some(
|
||||
"
|
||||
2.zero
|
||||
2.one
|
||||
2.one-and-a-half
|
||||
2.two
|
||||
2.three
|
||||
2.four
|
||||
2.six
|
||||
"
|
||||
.unindent(),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// cx.foreground().run_until_parked();
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
// let multibuffer = cx.add_model(|cx| {
|
||||
// let mut multibuffer = MultiBuffer::new(0);
|
||||
// multibuffer.push_excerpts(
|
||||
// buffer_1.clone(),
|
||||
// [
|
||||
// // excerpt ends in the middle of a modified hunk
|
||||
// ExcerptRange {
|
||||
// context: Point::new(0, 0)..Point::new(1, 5),
|
||||
// primary: Default::default(),
|
||||
// },
|
||||
// // excerpt begins in the middle of a modified hunk
|
||||
// ExcerptRange {
|
||||
// context: Point::new(5, 0)..Point::new(6, 5),
|
||||
// primary: Default::default(),
|
||||
// },
|
||||
// ],
|
||||
// cx,
|
||||
// );
|
||||
// multibuffer.push_excerpts(
|
||||
// buffer_2.clone(),
|
||||
// [
|
||||
// // excerpt ends at a deletion
|
||||
// ExcerptRange {
|
||||
// context: Point::new(0, 0)..Point::new(1, 5),
|
||||
// primary: Default::default(),
|
||||
// },
|
||||
// // excerpt starts at a deletion
|
||||
// ExcerptRange {
|
||||
// context: Point::new(2, 0)..Point::new(2, 5),
|
||||
// primary: Default::default(),
|
||||
// },
|
||||
// // excerpt fully contains a deletion hunk
|
||||
// ExcerptRange {
|
||||
// context: Point::new(1, 0)..Point::new(2, 5),
|
||||
// primary: Default::default(),
|
||||
// },
|
||||
// // excerpt fully contains an insertion hunk
|
||||
// ExcerptRange {
|
||||
// context: Point::new(4, 0)..Point::new(6, 5),
|
||||
// primary: Default::default(),
|
||||
// },
|
||||
// ],
|
||||
// cx,
|
||||
// );
|
||||
// multibuffer
|
||||
// });
|
||||
let multibuffer = cx.build_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
[
|
||||
// excerpt ends in the middle of a modified hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(1, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt begins in the middle of a modified hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(5, 0)..Point::new(6, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
],
|
||||
cx,
|
||||
);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_2.clone(),
|
||||
[
|
||||
// excerpt ends at a deletion
|
||||
ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(1, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt starts at a deletion
|
||||
ExcerptRange {
|
||||
context: Point::new(2, 0)..Point::new(2, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt fully contains a deletion hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(1, 0)..Point::new(2, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt fully contains an insertion hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(4, 0)..Point::new(6, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
],
|
||||
cx,
|
||||
);
|
||||
multibuffer
|
||||
});
|
||||
|
||||
// let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
|
||||
let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
|
||||
|
||||
// assert_eq!(
|
||||
// snapshot.text(),
|
||||
// "
|
||||
// 1.zero
|
||||
// 1.ONE
|
||||
// 1.FIVE
|
||||
// 1.six
|
||||
// 2.zero
|
||||
// 2.one
|
||||
// 2.two
|
||||
// 2.one
|
||||
// 2.two
|
||||
// 2.four
|
||||
// 2.five
|
||||
// 2.six"
|
||||
// .unindent()
|
||||
// );
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
"
|
||||
1.zero
|
||||
1.ONE
|
||||
1.FIVE
|
||||
1.six
|
||||
2.zero
|
||||
2.one
|
||||
2.two
|
||||
2.one
|
||||
2.two
|
||||
2.four
|
||||
2.five
|
||||
2.six"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
// let expected = [
|
||||
// (DiffHunkStatus::Modified, 1..2),
|
||||
// (DiffHunkStatus::Modified, 2..3),
|
||||
// //TODO: Define better when and where removed hunks show up at range extremities
|
||||
// (DiffHunkStatus::Removed, 6..6),
|
||||
// (DiffHunkStatus::Removed, 8..8),
|
||||
// (DiffHunkStatus::Added, 10..11),
|
||||
// ];
|
||||
let expected = [
|
||||
(DiffHunkStatus::Modified, 1..2),
|
||||
(DiffHunkStatus::Modified, 2..3),
|
||||
//TODO: Define better when and where removed hunks show up at range extremities
|
||||
(DiffHunkStatus::Removed, 6..6),
|
||||
(DiffHunkStatus::Removed, 8..8),
|
||||
(DiffHunkStatus::Added, 10..11),
|
||||
];
|
||||
|
||||
// assert_eq!(
|
||||
// snapshot
|
||||
// .git_diff_hunks_in_range(0..12)
|
||||
// .map(|hunk| (hunk.status(), hunk.buffer_range))
|
||||
// .collect::<Vec<_>>(),
|
||||
// &expected,
|
||||
// );
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.git_diff_hunks_in_range(0..12)
|
||||
.map(|hunk| (hunk.status(), hunk.buffer_range))
|
||||
.collect::<Vec<_>>(),
|
||||
&expected,
|
||||
);
|
||||
|
||||
// assert_eq!(
|
||||
// snapshot
|
||||
// .git_diff_hunks_in_range_rev(0..12)
|
||||
// .map(|hunk| (hunk.status(), hunk.buffer_range))
|
||||
// .collect::<Vec<_>>(),
|
||||
// expected
|
||||
// .iter()
|
||||
// .rev()
|
||||
// .cloned()
|
||||
// .collect::<Vec<_>>()
|
||||
// .as_slice(),
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.git_diff_hunks_in_range_rev(0..12)
|
||||
.map(|hunk| (hunk.status(), hunk.buffer_range))
|
||||
.collect::<Vec<_>>(),
|
||||
expected
|
||||
.iter()
|
||||
.rev()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use crate::{Editor, RangeToAnchorExt};
|
||||
enum MatchingBracketHighlight {}
|
||||
|
||||
pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewContext<Editor>) {
|
||||
// editor.clear_background_highlights::<MatchingBracketHighlight>(cx);
|
||||
editor.clear_background_highlights::<MatchingBracketHighlight>(cx);
|
||||
|
||||
let newest_selection = editor.selections.newest::<usize>(cx);
|
||||
// Don't highlight brackets if the selection isn't empty
|
||||
@ -30,109 +30,109 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon
|
||||
}
|
||||
}
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
|
||||
// use indoc::indoc;
|
||||
// use language::{BracketPair, BracketPairConfig, Language, LanguageConfig};
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{BracketPair, BracketPairConfig, Language, LanguageConfig};
|
||||
|
||||
// #[gpui::test]
|
||||
// async fn test_matching_bracket_highlights(cx: &mut gpui::TestAppContext) {
|
||||
// init_test(cx, |_| {});
|
||||
#[gpui::test]
|
||||
async fn test_matching_bracket_highlights(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
// let mut cx = EditorLspTestContext::new(
|
||||
// Language::new(
|
||||
// LanguageConfig {
|
||||
// name: "Rust".into(),
|
||||
// path_suffixes: vec!["rs".to_string()],
|
||||
// brackets: BracketPairConfig {
|
||||
// pairs: vec![
|
||||
// BracketPair {
|
||||
// start: "{".to_string(),
|
||||
// end: "}".to_string(),
|
||||
// close: false,
|
||||
// newline: true,
|
||||
// },
|
||||
// BracketPair {
|
||||
// start: "(".to_string(),
|
||||
// end: ")".to_string(),
|
||||
// close: false,
|
||||
// newline: true,
|
||||
// },
|
||||
// ],
|
||||
// ..Default::default()
|
||||
// },
|
||||
// ..Default::default()
|
||||
// },
|
||||
// Some(tree_sitter_rust::language()),
|
||||
// )
|
||||
// .with_brackets_query(indoc! {r#"
|
||||
// ("{" @open "}" @close)
|
||||
// ("(" @open ")" @close)
|
||||
// "#})
|
||||
// .unwrap(),
|
||||
// Default::default(),
|
||||
// cx,
|
||||
// )
|
||||
// .await;
|
||||
let mut cx = EditorLspTestContext::new(
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
brackets: BracketPairConfig {
|
||||
pairs: vec![
|
||||
BracketPair {
|
||||
start: "{".to_string(),
|
||||
end: "}".to_string(),
|
||||
close: false,
|
||||
newline: true,
|
||||
},
|
||||
BracketPair {
|
||||
start: "(".to_string(),
|
||||
end: ")".to_string(),
|
||||
close: false,
|
||||
newline: true,
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::language()),
|
||||
)
|
||||
.with_brackets_query(indoc! {r#"
|
||||
("{" @open "}" @close)
|
||||
("(" @open ")" @close)
|
||||
"#})
|
||||
.unwrap(),
|
||||
Default::default(),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// // positioning cursor inside bracket highlights both
|
||||
// cx.set_state(indoc! {r#"
|
||||
// pub fn test("Test ˇargument") {
|
||||
// another_test(1, 2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
// pub fn test«(»"Test argument"«)» {
|
||||
// another_test(1, 2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// positioning cursor inside bracket highlights both
|
||||
cx.set_state(indoc! {r#"
|
||||
pub fn test("Test ˇargument") {
|
||||
another_test(1, 2, 3);
|
||||
}
|
||||
"#});
|
||||
cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
pub fn test«(»"Test argument"«)» {
|
||||
another_test(1, 2, 3);
|
||||
}
|
||||
"#});
|
||||
|
||||
// cx.set_state(indoc! {r#"
|
||||
// pub fn test("Test argument") {
|
||||
// another_test(1, ˇ2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
// pub fn test("Test argument") {
|
||||
// another_test«(»1, 2, 3«)»;
|
||||
// }
|
||||
// "#});
|
||||
cx.set_state(indoc! {r#"
|
||||
pub fn test("Test argument") {
|
||||
another_test(1, ˇ2, 3);
|
||||
}
|
||||
"#});
|
||||
cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
pub fn test("Test argument") {
|
||||
another_test«(»1, 2, 3«)»;
|
||||
}
|
||||
"#});
|
||||
|
||||
// cx.set_state(indoc! {r#"
|
||||
// pub fn test("Test argument") {
|
||||
// anotherˇ_test(1, 2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
// pub fn test("Test argument") «{»
|
||||
// another_test(1, 2, 3);
|
||||
// «}»
|
||||
// "#});
|
||||
cx.set_state(indoc! {r#"
|
||||
pub fn test("Test argument") {
|
||||
anotherˇ_test(1, 2, 3);
|
||||
}
|
||||
"#});
|
||||
cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
pub fn test("Test argument") «{»
|
||||
another_test(1, 2, 3);
|
||||
«}»
|
||||
"#});
|
||||
|
||||
// // positioning outside of brackets removes highlight
|
||||
// cx.set_state(indoc! {r#"
|
||||
// pub fˇn test("Test argument") {
|
||||
// another_test(1, 2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
// pub fn test("Test argument") {
|
||||
// another_test(1, 2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// positioning outside of brackets removes highlight
|
||||
cx.set_state(indoc! {r#"
|
||||
pub fˇn test("Test argument") {
|
||||
another_test(1, 2, 3);
|
||||
}
|
||||
"#});
|
||||
cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
pub fn test("Test argument") {
|
||||
another_test(1, 2, 3);
|
||||
}
|
||||
"#});
|
||||
|
||||
// // non empty selection dismisses highlight
|
||||
// cx.set_state(indoc! {r#"
|
||||
// pub fn test("Te«st argˇ»ument") {
|
||||
// another_test(1, 2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
// pub fn test("Test argument") {
|
||||
// another_test(1, 2, 3);
|
||||
// }
|
||||
// "#});
|
||||
// }
|
||||
// }
|
||||
// non empty selection dismisses highlight
|
||||
cx.set_state(indoc! {r#"
|
||||
pub fn test("Te«st argˇ»ument") {
|
||||
another_test(1, 2, 3);
|
||||
}
|
||||
"#});
|
||||
cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
|
||||
pub fn test("Test argument") {
|
||||
another_test(1, 2, 3);
|
||||
}
|
||||
"#});
|
||||
}
|
||||
}
|
||||
|
@ -2432,13 +2432,13 @@ pub mod tests {
|
||||
let language = Arc::new(language);
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
"/a",
|
||||
json!({
|
||||
"main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
|
||||
"other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
"/a",
|
||||
json!({
|
||||
"main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
|
||||
"other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs, ["/a".as_ref()], cx).await;
|
||||
project.update(cx, |project, _| {
|
||||
project.languages().add(Arc::clone(&language))
|
||||
@ -2598,24 +2598,22 @@ pub mod tests {
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
// todo!() there used to be no these hints, but new gpui2 presumably scrolls a bit farther
|
||||
// (or renders less?) note that tests below pass
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
];
|
||||
assert_eq!(
|
||||
expected_hints,
|
||||
cached_hint_labels(editor),
|
||||
"When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints"
|
||||
);
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), "Every visible excerpt hints should bump the verison");
|
||||
});
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
];
|
||||
assert_eq!(
|
||||
expected_hints,
|
||||
cached_hint_labels(editor),
|
||||
"When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints"
|
||||
);
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), "Every visible excerpt hints should bump the verison");
|
||||
});
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::Next), cx, |s| {
|
||||
@ -2630,23 +2628,23 @@ pub mod tests {
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
editor.update(cx, |editor, cx| {
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
"other hint #0".to_string(),
|
||||
"other hint #1".to_string(),
|
||||
"other hint #2".to_string(),
|
||||
];
|
||||
assert_eq!(expected_hints, cached_hint_labels(editor),
|
||||
"With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits");
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(),
|
||||
"Due to every excerpt having one hint, we update cache per new excerpt scrolled");
|
||||
});
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
"other hint #0".to_string(),
|
||||
"other hint #1".to_string(),
|
||||
"other hint #2".to_string(),
|
||||
];
|
||||
assert_eq!(expected_hints, cached_hint_labels(editor),
|
||||
"With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits");
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(),
|
||||
"Due to every excerpt having one hint, we update cache per new excerpt scrolled");
|
||||
});
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::Next), cx, |s| {
|
||||
@ -2658,26 +2656,26 @@ pub mod tests {
|
||||
));
|
||||
cx.executor().run_until_parked();
|
||||
let last_scroll_update_version = editor.update(cx, |editor, cx| {
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
"other hint #0".to_string(),
|
||||
"other hint #1".to_string(),
|
||||
"other hint #2".to_string(),
|
||||
"other hint #3".to_string(),
|
||||
"other hint #4".to_string(),
|
||||
"other hint #5".to_string(),
|
||||
];
|
||||
assert_eq!(expected_hints, cached_hint_labels(editor),
|
||||
"After multibuffer was scrolled to the end, all hints for all excerpts should be fetched");
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len());
|
||||
expected_hints.len()
|
||||
}).unwrap();
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
"other hint #0".to_string(),
|
||||
"other hint #1".to_string(),
|
||||
"other hint #2".to_string(),
|
||||
"other hint #3".to_string(),
|
||||
"other hint #4".to_string(),
|
||||
"other hint #5".to_string(),
|
||||
];
|
||||
assert_eq!(expected_hints, cached_hint_labels(editor),
|
||||
"After multibuffer was scrolled to the end, all hints for all excerpts should be fetched");
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, expected_hints.len());
|
||||
expected_hints.len()
|
||||
}).unwrap();
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::Next), cx, |s| {
|
||||
@ -2686,30 +2684,31 @@ pub mod tests {
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
editor.update(cx, |editor, cx| {
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
"other hint #0".to_string(),
|
||||
"other hint #1".to_string(),
|
||||
"other hint #2".to_string(),
|
||||
"other hint #3".to_string(),
|
||||
"other hint #4".to_string(),
|
||||
"other hint #5".to_string(),
|
||||
];
|
||||
assert_eq!(expected_hints, cached_hint_labels(editor),
|
||||
"After multibuffer was scrolled to the end, further scrolls up should not bring more hints");
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer");
|
||||
});
|
||||
let expected_hints = vec![
|
||||
"main hint #0".to_string(),
|
||||
"main hint #1".to_string(),
|
||||
"main hint #2".to_string(),
|
||||
"main hint #3".to_string(),
|
||||
"main hint #4".to_string(),
|
||||
"main hint #5".to_string(),
|
||||
"other hint #0".to_string(),
|
||||
"other hint #1".to_string(),
|
||||
"other hint #2".to_string(),
|
||||
"other hint #3".to_string(),
|
||||
"other hint #4".to_string(),
|
||||
"other hint #5".to_string(),
|
||||
];
|
||||
assert_eq!(expected_hints, cached_hint_labels(editor),
|
||||
"After multibuffer was scrolled to the end, further scrolls up should not bring more hints");
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer");
|
||||
});
|
||||
|
||||
editor_edited.store(true, Ordering::Release);
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| {
|
||||
s.select_ranges([Point::new(56, 0)..Point::new(56, 0)])
|
||||
// TODO if this gets set to hint boundary (e.g. 56) we sometimes get an extra cache version bump, why?
|
||||
s.select_ranges([Point::new(57, 0)..Point::new(57, 0)])
|
||||
});
|
||||
editor.handle_input("++++more text++++", cx);
|
||||
});
|
||||
@ -2729,15 +2728,15 @@ pub mod tests {
|
||||
expected_hints,
|
||||
cached_hint_labels(editor),
|
||||
"After multibuffer edit, editor gets scolled back to the last selection; \
|
||||
all hints should be invalidated and requeried for all of its visible excerpts"
|
||||
all hints should be invalidated and requeried for all of its visible excerpts"
|
||||
);
|
||||
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
|
||||
|
||||
let current_cache_version = editor.inlay_hint_cache().version;
|
||||
let minimum_expected_version = last_scroll_update_version + expected_hints.len();
|
||||
assert!(
|
||||
current_cache_version == minimum_expected_version || current_cache_version == minimum_expected_version + 1,
|
||||
"Due to every excerpt having one hint, cache should update per new excerpt received + 1 potential sporadic update"
|
||||
assert_eq!(
|
||||
current_cache_version,
|
||||
last_scroll_update_version + expected_hints.len(),
|
||||
"We should have updated cache N times == N of new hints arrived (separately from each excerpt)"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -37,19 +37,18 @@ pub fn deploy_context_menu(
|
||||
});
|
||||
|
||||
let context_menu = ui::ContextMenu::build(cx, |menu, cx| {
|
||||
menu.action("Rename Symbol", Box::new(Rename), cx)
|
||||
.action("Go to Definition", Box::new(GoToDefinition), cx)
|
||||
.action("Go to Type Definition", Box::new(GoToTypeDefinition), cx)
|
||||
.action("Find All References", Box::new(FindAllReferences), cx)
|
||||
menu.action("Rename Symbol", Box::new(Rename))
|
||||
.action("Go to Definition", Box::new(GoToDefinition))
|
||||
.action("Go to Type Definition", Box::new(GoToTypeDefinition))
|
||||
.action("Find All References", Box::new(FindAllReferences))
|
||||
.action(
|
||||
"Code Actions",
|
||||
Box::new(ToggleCodeActions {
|
||||
deployed_from_indicator: false,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.separator()
|
||||
.action("Reveal in Finder", Box::new(RevealInFinder), cx)
|
||||
.action("Reveal in Finder", Box::new(RevealInFinder))
|
||||
});
|
||||
let context_menu_focus = context_menu.focus_handle(cx);
|
||||
cx.focus(&context_menu_focus);
|
||||
@ -69,42 +68,43 @@ pub fn deploy_context_menu(
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
|
||||
// use indoc::indoc;
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
|
||||
use indoc::indoc;
|
||||
|
||||
// #[gpui::test]
|
||||
// async fn test_mouse_context_menu(cx: &mut gpui::TestAppContext) {
|
||||
// init_test(cx, |_| {});
|
||||
#[gpui::test]
|
||||
async fn test_mouse_context_menu(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
// let mut cx = EditorLspTestContext::new_rust(
|
||||
// lsp::ServerCapabilities {
|
||||
// hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
|
||||
// ..Default::default()
|
||||
// },
|
||||
// cx,
|
||||
// )
|
||||
// .await;
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
lsp::ServerCapabilities {
|
||||
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
|
||||
..Default::default()
|
||||
},
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// cx.set_state(indoc! {"
|
||||
// fn teˇst() {
|
||||
// do_work();
|
||||
// }
|
||||
// "});
|
||||
// let point = cx.display_point(indoc! {"
|
||||
// fn test() {
|
||||
// do_wˇork();
|
||||
// }
|
||||
// "});
|
||||
// cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx));
|
||||
cx.set_state(indoc! {"
|
||||
fn teˇst() {
|
||||
do_work();
|
||||
}
|
||||
"});
|
||||
let point = cx.display_point(indoc! {"
|
||||
fn test() {
|
||||
do_wˇork();
|
||||
}
|
||||
"});
|
||||
cx.editor(|editor, app| assert!(editor.mouse_context_menu.is_none()));
|
||||
cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx));
|
||||
|
||||
// cx.assert_editor_state(indoc! {"
|
||||
// fn test() {
|
||||
// do_wˇork();
|
||||
// }
|
||||
// "});
|
||||
// cx.editor(|editor, app| assert!(editor.mouse_context_menu.read(app).visible()));
|
||||
// }
|
||||
// }
|
||||
cx.assert_editor_state(indoc! {"
|
||||
fn test() {
|
||||
do_wˇork();
|
||||
}
|
||||
"});
|
||||
cx.editor(|editor, app| assert!(editor.mouse_context_menu.is_some()));
|
||||
}
|
||||
}
|
||||
|
@ -452,483 +452,475 @@ pub fn split_display_range_by_lines(
|
||||
result
|
||||
}
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use crate::{
|
||||
// display_map::Inlay,
|
||||
// test::{},
|
||||
// Buffer, DisplayMap, ExcerptRange, InlayId, MultiBuffer,
|
||||
// };
|
||||
// use project::Project;
|
||||
// use settings::SettingsStore;
|
||||
// use util::post_inc;
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
display_map::Inlay,
|
||||
test::{editor_test_context::EditorTestContext, marked_display_snapshot},
|
||||
Buffer, DisplayMap, ExcerptRange, InlayId, MultiBuffer,
|
||||
};
|
||||
use gpui::{font, Context as _};
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
use util::post_inc;
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_previous_word_start(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_previous_word_start(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
// assert_eq!(
|
||||
// previous_word_start(&snapshot, display_points[1]),
|
||||
// display_points[0]
|
||||
// );
|
||||
// }
|
||||
fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
previous_word_start(&snapshot, display_points[1]),
|
||||
display_points[0]
|
||||
);
|
||||
}
|
||||
|
||||
// assert("\nˇ ˇlorem", cx);
|
||||
// assert("ˇ\nˇ lorem", cx);
|
||||
// assert(" ˇloremˇ", cx);
|
||||
// assert("ˇ ˇlorem", cx);
|
||||
// assert(" ˇlorˇem", cx);
|
||||
// assert("\nlorem\nˇ ˇipsum", cx);
|
||||
// assert("\n\nˇ\nˇ", cx);
|
||||
// assert(" ˇlorem ˇipsum", cx);
|
||||
// assert("loremˇ-ˇipsum", cx);
|
||||
// assert("loremˇ-#$@ˇipsum", cx);
|
||||
// assert("ˇlorem_ˇipsum", cx);
|
||||
// assert(" ˇdefγˇ", cx);
|
||||
// assert(" ˇbcΔˇ", cx);
|
||||
// assert(" abˇ——ˇcd", cx);
|
||||
// }
|
||||
assert("\nˇ ˇlorem", cx);
|
||||
assert("ˇ\nˇ lorem", cx);
|
||||
assert(" ˇloremˇ", cx);
|
||||
assert("ˇ ˇlorem", cx);
|
||||
assert(" ˇlorˇem", cx);
|
||||
assert("\nlorem\nˇ ˇipsum", cx);
|
||||
assert("\n\nˇ\nˇ", cx);
|
||||
assert(" ˇlorem ˇipsum", cx);
|
||||
assert("loremˇ-ˇipsum", cx);
|
||||
assert("loremˇ-#$@ˇipsum", cx);
|
||||
assert("ˇlorem_ˇipsum", cx);
|
||||
assert(" ˇdefγˇ", cx);
|
||||
assert(" ˇbcΔˇ", cx);
|
||||
assert(" abˇ——ˇcd", cx);
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_previous_subword_start(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_previous_subword_start(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
// assert_eq!(
|
||||
// previous_subword_start(&snapshot, display_points[1]),
|
||||
// display_points[0]
|
||||
// );
|
||||
// }
|
||||
fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
previous_subword_start(&snapshot, display_points[1]),
|
||||
display_points[0]
|
||||
);
|
||||
}
|
||||
|
||||
// // Subword boundaries are respected
|
||||
// assert("lorem_ˇipˇsum", cx);
|
||||
// assert("lorem_ˇipsumˇ", cx);
|
||||
// assert("ˇlorem_ˇipsum", cx);
|
||||
// assert("lorem_ˇipsum_ˇdolor", cx);
|
||||
// assert("loremˇIpˇsum", cx);
|
||||
// assert("loremˇIpsumˇ", cx);
|
||||
// Subword boundaries are respected
|
||||
assert("lorem_ˇipˇsum", cx);
|
||||
assert("lorem_ˇipsumˇ", cx);
|
||||
assert("ˇlorem_ˇipsum", cx);
|
||||
assert("lorem_ˇipsum_ˇdolor", cx);
|
||||
assert("loremˇIpˇsum", cx);
|
||||
assert("loremˇIpsumˇ", cx);
|
||||
|
||||
// // Word boundaries are still respected
|
||||
// assert("\nˇ ˇlorem", cx);
|
||||
// assert(" ˇloremˇ", cx);
|
||||
// assert(" ˇlorˇem", cx);
|
||||
// assert("\nlorem\nˇ ˇipsum", cx);
|
||||
// assert("\n\nˇ\nˇ", cx);
|
||||
// assert(" ˇlorem ˇipsum", cx);
|
||||
// assert("loremˇ-ˇipsum", cx);
|
||||
// assert("loremˇ-#$@ˇipsum", cx);
|
||||
// assert(" ˇdefγˇ", cx);
|
||||
// assert(" bcˇΔˇ", cx);
|
||||
// assert(" ˇbcδˇ", cx);
|
||||
// assert(" abˇ——ˇcd", cx);
|
||||
// }
|
||||
// Word boundaries are still respected
|
||||
assert("\nˇ ˇlorem", cx);
|
||||
assert(" ˇloremˇ", cx);
|
||||
assert(" ˇlorˇem", cx);
|
||||
assert("\nlorem\nˇ ˇipsum", cx);
|
||||
assert("\n\nˇ\nˇ", cx);
|
||||
assert(" ˇlorem ˇipsum", cx);
|
||||
assert("loremˇ-ˇipsum", cx);
|
||||
assert("loremˇ-#$@ˇipsum", cx);
|
||||
assert(" ˇdefγˇ", cx);
|
||||
assert(" bcˇΔˇ", cx);
|
||||
assert(" ˇbcδˇ", cx);
|
||||
assert(" abˇ——ˇcd", cx);
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_find_preceding_boundary(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_find_preceding_boundary(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// fn assert(
|
||||
// marked_text: &str,
|
||||
// cx: &mut gpui::AppContext,
|
||||
// is_boundary: impl FnMut(char, char) -> bool,
|
||||
// ) {
|
||||
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
// assert_eq!(
|
||||
// find_preceding_boundary(
|
||||
// &snapshot,
|
||||
// display_points[1],
|
||||
// FindRange::MultiLine,
|
||||
// is_boundary
|
||||
// ),
|
||||
// display_points[0]
|
||||
// );
|
||||
// }
|
||||
fn assert(
|
||||
marked_text: &str,
|
||||
cx: &mut gpui::AppContext,
|
||||
is_boundary: impl FnMut(char, char) -> bool,
|
||||
) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
find_preceding_boundary(
|
||||
&snapshot,
|
||||
display_points[1],
|
||||
FindRange::MultiLine,
|
||||
is_boundary
|
||||
),
|
||||
display_points[0]
|
||||
);
|
||||
}
|
||||
|
||||
// assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
|
||||
// left == 'c' && right == 'd'
|
||||
// });
|
||||
// assert("abcdef\nˇgh\nijˇk", cx, |left, right| {
|
||||
// left == '\n' && right == 'g'
|
||||
// });
|
||||
// let mut line_count = 0;
|
||||
// assert("abcdef\nˇgh\nijˇk", cx, |left, _| {
|
||||
// if left == '\n' {
|
||||
// line_count += 1;
|
||||
// line_count == 2
|
||||
// } else {
|
||||
// false
|
||||
// }
|
||||
// });
|
||||
// }
|
||||
assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
|
||||
left == 'c' && right == 'd'
|
||||
});
|
||||
assert("abcdef\nˇgh\nijˇk", cx, |left, right| {
|
||||
left == '\n' && right == 'g'
|
||||
});
|
||||
let mut line_count = 0;
|
||||
assert("abcdef\nˇgh\nijˇk", cx, |left, _| {
|
||||
if left == '\n' {
|
||||
line_count += 1;
|
||||
line_count == 2
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_find_preceding_boundary_with_inlays(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_find_preceding_boundary_with_inlays(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// let input_text = "abcdefghijklmnopqrstuvwxys";
|
||||
// let family_id = cx
|
||||
// .font_cache()
|
||||
// .load_family(&["Helvetica"], &Default::default())
|
||||
// .unwrap();
|
||||
// let font_id = cx
|
||||
// .font_cache()
|
||||
// .select_font(family_id, &Default::default())
|
||||
// .unwrap();
|
||||
// let font_size = 14.0;
|
||||
// let buffer = MultiBuffer::build_simple(input_text, cx);
|
||||
// let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
// let display_map =
|
||||
// cx.add_model(|cx| DisplayMap::new(buffer, font_id, font_size, None, 1, 1, cx));
|
||||
let input_text = "abcdefghijklmnopqrstuvwxys";
|
||||
let font = font("Helvetica");
|
||||
let font_size = px(14.0);
|
||||
let buffer = MultiBuffer::build_simple(input_text, cx);
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let display_map =
|
||||
cx.build_model(|cx| DisplayMap::new(buffer, font, font_size, None, 1, 1, cx));
|
||||
|
||||
// // add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary
|
||||
// let mut id = 0;
|
||||
// let inlays = (0..buffer_snapshot.len())
|
||||
// .map(|offset| {
|
||||
// [
|
||||
// Inlay {
|
||||
// id: InlayId::Suggestion(post_inc(&mut id)),
|
||||
// position: buffer_snapshot.anchor_at(offset, Bias::Left),
|
||||
// text: format!("test").into(),
|
||||
// },
|
||||
// Inlay {
|
||||
// id: InlayId::Suggestion(post_inc(&mut id)),
|
||||
// position: buffer_snapshot.anchor_at(offset, Bias::Right),
|
||||
// text: format!("test").into(),
|
||||
// },
|
||||
// Inlay {
|
||||
// id: InlayId::Hint(post_inc(&mut id)),
|
||||
// position: buffer_snapshot.anchor_at(offset, Bias::Left),
|
||||
// text: format!("test").into(),
|
||||
// },
|
||||
// Inlay {
|
||||
// id: InlayId::Hint(post_inc(&mut id)),
|
||||
// position: buffer_snapshot.anchor_at(offset, Bias::Right),
|
||||
// text: format!("test").into(),
|
||||
// },
|
||||
// ]
|
||||
// })
|
||||
// .flatten()
|
||||
// .collect();
|
||||
// let snapshot = display_map.update(cx, |map, cx| {
|
||||
// map.splice_inlays(Vec::new(), inlays, cx);
|
||||
// map.snapshot(cx)
|
||||
// });
|
||||
// add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary
|
||||
let mut id = 0;
|
||||
let inlays = (0..buffer_snapshot.len())
|
||||
.map(|offset| {
|
||||
[
|
||||
Inlay {
|
||||
id: InlayId::Suggestion(post_inc(&mut id)),
|
||||
position: buffer_snapshot.anchor_at(offset, Bias::Left),
|
||||
text: format!("test").into(),
|
||||
},
|
||||
Inlay {
|
||||
id: InlayId::Suggestion(post_inc(&mut id)),
|
||||
position: buffer_snapshot.anchor_at(offset, Bias::Right),
|
||||
text: format!("test").into(),
|
||||
},
|
||||
Inlay {
|
||||
id: InlayId::Hint(post_inc(&mut id)),
|
||||
position: buffer_snapshot.anchor_at(offset, Bias::Left),
|
||||
text: format!("test").into(),
|
||||
},
|
||||
Inlay {
|
||||
id: InlayId::Hint(post_inc(&mut id)),
|
||||
position: buffer_snapshot.anchor_at(offset, Bias::Right),
|
||||
text: format!("test").into(),
|
||||
},
|
||||
]
|
||||
})
|
||||
.flatten()
|
||||
.collect();
|
||||
let snapshot = display_map.update(cx, |map, cx| {
|
||||
map.splice_inlays(Vec::new(), inlays, cx);
|
||||
map.snapshot(cx)
|
||||
});
|
||||
|
||||
// assert_eq!(
|
||||
// find_preceding_boundary(
|
||||
// &snapshot,
|
||||
// buffer_snapshot.len().to_display_point(&snapshot),
|
||||
// FindRange::MultiLine,
|
||||
// |left, _| left == 'e',
|
||||
// ),
|
||||
// snapshot
|
||||
// .buffer_snapshot
|
||||
// .offset_to_point(5)
|
||||
// .to_display_point(&snapshot),
|
||||
// "Should not stop at inlays when looking for boundaries"
|
||||
// );
|
||||
// }
|
||||
assert_eq!(
|
||||
find_preceding_boundary(
|
||||
&snapshot,
|
||||
buffer_snapshot.len().to_display_point(&snapshot),
|
||||
FindRange::MultiLine,
|
||||
|left, _| left == 'e',
|
||||
),
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.offset_to_point(5)
|
||||
.to_display_point(&snapshot),
|
||||
"Should not stop at inlays when looking for boundaries"
|
||||
);
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_next_word_end(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_next_word_end(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
// assert_eq!(
|
||||
// next_word_end(&snapshot, display_points[0]),
|
||||
// display_points[1]
|
||||
// );
|
||||
// }
|
||||
fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
next_word_end(&snapshot, display_points[0]),
|
||||
display_points[1]
|
||||
);
|
||||
}
|
||||
|
||||
// assert("\nˇ loremˇ", cx);
|
||||
// assert(" ˇloremˇ", cx);
|
||||
// assert(" lorˇemˇ", cx);
|
||||
// assert(" loremˇ ˇ\nipsum\n", cx);
|
||||
// assert("\nˇ\nˇ\n\n", cx);
|
||||
// assert("loremˇ ipsumˇ ", cx);
|
||||
// assert("loremˇ-ˇipsum", cx);
|
||||
// assert("loremˇ#$@-ˇipsum", cx);
|
||||
// assert("loremˇ_ipsumˇ", cx);
|
||||
// assert(" ˇbcΔˇ", cx);
|
||||
// assert(" abˇ——ˇcd", cx);
|
||||
// }
|
||||
assert("\nˇ loremˇ", cx);
|
||||
assert(" ˇloremˇ", cx);
|
||||
assert(" lorˇemˇ", cx);
|
||||
assert(" loremˇ ˇ\nipsum\n", cx);
|
||||
assert("\nˇ\nˇ\n\n", cx);
|
||||
assert("loremˇ ipsumˇ ", cx);
|
||||
assert("loremˇ-ˇipsum", cx);
|
||||
assert("loremˇ#$@-ˇipsum", cx);
|
||||
assert("loremˇ_ipsumˇ", cx);
|
||||
assert(" ˇbcΔˇ", cx);
|
||||
assert(" abˇ——ˇcd", cx);
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_next_subword_end(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_next_subword_end(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
// assert_eq!(
|
||||
// next_subword_end(&snapshot, display_points[0]),
|
||||
// display_points[1]
|
||||
// );
|
||||
// }
|
||||
fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
next_subword_end(&snapshot, display_points[0]),
|
||||
display_points[1]
|
||||
);
|
||||
}
|
||||
|
||||
// // Subword boundaries are respected
|
||||
// assert("loˇremˇ_ipsum", cx);
|
||||
// assert("ˇloremˇ_ipsum", cx);
|
||||
// assert("loremˇ_ipsumˇ", cx);
|
||||
// assert("loremˇ_ipsumˇ_dolor", cx);
|
||||
// assert("loˇremˇIpsum", cx);
|
||||
// assert("loremˇIpsumˇDolor", cx);
|
||||
// Subword boundaries are respected
|
||||
assert("loˇremˇ_ipsum", cx);
|
||||
assert("ˇloremˇ_ipsum", cx);
|
||||
assert("loremˇ_ipsumˇ", cx);
|
||||
assert("loremˇ_ipsumˇ_dolor", cx);
|
||||
assert("loˇremˇIpsum", cx);
|
||||
assert("loremˇIpsumˇDolor", cx);
|
||||
|
||||
// // Word boundaries are still respected
|
||||
// assert("\nˇ loremˇ", cx);
|
||||
// assert(" ˇloremˇ", cx);
|
||||
// assert(" lorˇemˇ", cx);
|
||||
// assert(" loremˇ ˇ\nipsum\n", cx);
|
||||
// assert("\nˇ\nˇ\n\n", cx);
|
||||
// assert("loremˇ ipsumˇ ", cx);
|
||||
// assert("loremˇ-ˇipsum", cx);
|
||||
// assert("loremˇ#$@-ˇipsum", cx);
|
||||
// assert("loremˇ_ipsumˇ", cx);
|
||||
// assert(" ˇbcˇΔ", cx);
|
||||
// assert(" abˇ——ˇcd", cx);
|
||||
// }
|
||||
// Word boundaries are still respected
|
||||
assert("\nˇ loremˇ", cx);
|
||||
assert(" ˇloremˇ", cx);
|
||||
assert(" lorˇemˇ", cx);
|
||||
assert(" loremˇ ˇ\nipsum\n", cx);
|
||||
assert("\nˇ\nˇ\n\n", cx);
|
||||
assert("loremˇ ipsumˇ ", cx);
|
||||
assert("loremˇ-ˇipsum", cx);
|
||||
assert("loremˇ#$@-ˇipsum", cx);
|
||||
assert("loremˇ_ipsumˇ", cx);
|
||||
assert(" ˇbcˇΔ", cx);
|
||||
assert(" abˇ——ˇcd", cx);
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_find_boundary(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_find_boundary(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// fn assert(
|
||||
// marked_text: &str,
|
||||
// cx: &mut gpui::AppContext,
|
||||
// is_boundary: impl FnMut(char, char) -> bool,
|
||||
// ) {
|
||||
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
// assert_eq!(
|
||||
// find_boundary(
|
||||
// &snapshot,
|
||||
// display_points[0],
|
||||
// FindRange::MultiLine,
|
||||
// is_boundary
|
||||
// ),
|
||||
// display_points[1]
|
||||
// );
|
||||
// }
|
||||
fn assert(
|
||||
marked_text: &str,
|
||||
cx: &mut gpui::AppContext,
|
||||
is_boundary: impl FnMut(char, char) -> bool,
|
||||
) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
find_boundary(
|
||||
&snapshot,
|
||||
display_points[0],
|
||||
FindRange::MultiLine,
|
||||
is_boundary
|
||||
),
|
||||
display_points[1]
|
||||
);
|
||||
}
|
||||
|
||||
// assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
|
||||
// left == 'j' && right == 'k'
|
||||
// });
|
||||
// assert("abˇcdef\ngh\nˇijk", cx, |left, right| {
|
||||
// left == '\n' && right == 'i'
|
||||
// });
|
||||
// let mut line_count = 0;
|
||||
// assert("abcˇdef\ngh\nˇijk", cx, |left, _| {
|
||||
// if left == '\n' {
|
||||
// line_count += 1;
|
||||
// line_count == 2
|
||||
// } else {
|
||||
// false
|
||||
// }
|
||||
// });
|
||||
// }
|
||||
assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
|
||||
left == 'j' && right == 'k'
|
||||
});
|
||||
assert("abˇcdef\ngh\nˇijk", cx, |left, right| {
|
||||
left == '\n' && right == 'i'
|
||||
});
|
||||
let mut line_count = 0;
|
||||
assert("abcˇdef\ngh\nˇijk", cx, |left, _| {
|
||||
if left == '\n' {
|
||||
line_count += 1;
|
||||
line_count == 2
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_surrounding_word(cx: &mut gpui::AppContext) {
|
||||
// init_test(cx);
|
||||
#[gpui::test]
|
||||
fn test_surrounding_word(cx: &mut gpui::AppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
// assert_eq!(
|
||||
// surrounding_word(&snapshot, display_points[1]),
|
||||
// display_points[0]..display_points[2],
|
||||
// "{}",
|
||||
// marked_text.to_string()
|
||||
// );
|
||||
// }
|
||||
fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, display_points[1]),
|
||||
display_points[0]..display_points[2],
|
||||
"{}",
|
||||
marked_text.to_string()
|
||||
);
|
||||
}
|
||||
|
||||
// assert("ˇˇloremˇ ipsum", cx);
|
||||
// assert("ˇloˇremˇ ipsum", cx);
|
||||
// assert("ˇloremˇˇ ipsum", cx);
|
||||
// assert("loremˇ ˇ ˇipsum", cx);
|
||||
// assert("lorem\nˇˇˇ\nipsum", cx);
|
||||
// assert("lorem\nˇˇipsumˇ", cx);
|
||||
// assert("loremˇ,ˇˇ ipsum", cx);
|
||||
// assert("ˇloremˇˇ, ipsum", cx);
|
||||
// }
|
||||
assert("ˇˇloremˇ ipsum", cx);
|
||||
assert("ˇloˇremˇ ipsum", cx);
|
||||
assert("ˇloremˇˇ ipsum", cx);
|
||||
assert("loremˇ ˇ ˇipsum", cx);
|
||||
assert("lorem\nˇˇˇ\nipsum", cx);
|
||||
assert("lorem\nˇˇipsumˇ", cx);
|
||||
assert("loremˇ,ˇˇ ipsum", cx);
|
||||
assert("ˇloremˇˇ, ipsum", cx);
|
||||
}
|
||||
|
||||
// #[gpui::test]
|
||||
// async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) {
|
||||
// cx.update(|cx| {
|
||||
// init_test(cx);
|
||||
// });
|
||||
#[gpui::test]
|
||||
async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
init_test(cx);
|
||||
});
|
||||
|
||||
// let mut cx = EditorTestContext::new(cx).await;
|
||||
// let editor = cx.editor.clone();
|
||||
// let window = cx.window.clone();
|
||||
// cx.update_window(window, |cx| {
|
||||
// let text_layout_details =
|
||||
// editor.read_with(cx, |editor, cx| editor.text_layout_details(cx));
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
let editor = cx.editor.clone();
|
||||
let window = cx.window.clone();
|
||||
cx.update_window(window, |_, cx| {
|
||||
let text_layout_details =
|
||||
editor.update(cx, |editor, cx| editor.text_layout_details(cx));
|
||||
|
||||
// let family_id = cx
|
||||
// .font_cache()
|
||||
// .load_family(&["Helvetica"], &Default::default())
|
||||
// .unwrap();
|
||||
// let font_id = cx
|
||||
// .font_cache()
|
||||
// .select_font(family_id, &Default::default())
|
||||
// .unwrap();
|
||||
let font = font("Helvetica");
|
||||
|
||||
// let buffer =
|
||||
// cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abc\ndefg\nhijkl\nmn"));
|
||||
// let multibuffer = cx.add_model(|cx| {
|
||||
// let mut multibuffer = MultiBuffer::new(0);
|
||||
// multibuffer.push_excerpts(
|
||||
// buffer.clone(),
|
||||
// [
|
||||
// ExcerptRange {
|
||||
// context: Point::new(0, 0)..Point::new(1, 4),
|
||||
// primary: None,
|
||||
// },
|
||||
// ExcerptRange {
|
||||
// context: Point::new(2, 0)..Point::new(3, 2),
|
||||
// primary: None,
|
||||
// },
|
||||
// ],
|
||||
// cx,
|
||||
// );
|
||||
// multibuffer
|
||||
// });
|
||||
// let display_map =
|
||||
// cx.add_model(|cx| DisplayMap::new(multibuffer, font_id, 14.0, None, 2, 2, cx));
|
||||
// let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let buffer = cx
|
||||
.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abc\ndefg\nhijkl\nmn"));
|
||||
let multibuffer = cx.build_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(0);
|
||||
multibuffer.push_excerpts(
|
||||
buffer.clone(),
|
||||
[
|
||||
ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(1, 4),
|
||||
primary: None,
|
||||
},
|
||||
ExcerptRange {
|
||||
context: Point::new(2, 0)..Point::new(3, 2),
|
||||
primary: None,
|
||||
},
|
||||
],
|
||||
cx,
|
||||
);
|
||||
multibuffer
|
||||
});
|
||||
let display_map =
|
||||
cx.build_model(|cx| DisplayMap::new(multibuffer, font, px(14.0), None, 2, 2, cx));
|
||||
let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
// assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn");
|
||||
assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn");
|
||||
|
||||
// let col_2_x = snapshot.x_for_point(DisplayPoint::new(2, 2), &text_layout_details);
|
||||
let col_2_x =
|
||||
snapshot.x_for_display_point(DisplayPoint::new(2, 2), &text_layout_details);
|
||||
|
||||
// // Can't move up into the first excerpt's header
|
||||
// assert_eq!(
|
||||
// up(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(2, 2),
|
||||
// SelectionGoal::HorizontalPosition(col_2_x),
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(2, 0),
|
||||
// SelectionGoal::HorizontalPosition(0.0)
|
||||
// ),
|
||||
// );
|
||||
// assert_eq!(
|
||||
// up(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(2, 0),
|
||||
// SelectionGoal::None,
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(2, 0),
|
||||
// SelectionGoal::HorizontalPosition(0.0)
|
||||
// ),
|
||||
// );
|
||||
// Can't move up into the first excerpt's header
|
||||
assert_eq!(
|
||||
up(
|
||||
&snapshot,
|
||||
DisplayPoint::new(2, 2),
|
||||
SelectionGoal::HorizontalPosition(col_2_x.0),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(2, 0),
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
up(
|
||||
&snapshot,
|
||||
DisplayPoint::new(2, 0),
|
||||
SelectionGoal::None,
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(2, 0),
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
),
|
||||
);
|
||||
|
||||
// let col_4_x = snapshot.x_for_point(DisplayPoint::new(3, 4), &text_layout_details);
|
||||
let col_4_x =
|
||||
snapshot.x_for_display_point(DisplayPoint::new(3, 4), &text_layout_details);
|
||||
|
||||
// // Move up and down within first excerpt
|
||||
// assert_eq!(
|
||||
// up(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(3, 4),
|
||||
// SelectionGoal::HorizontalPosition(col_4_x),
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(2, 3),
|
||||
// SelectionGoal::HorizontalPosition(col_4_x)
|
||||
// ),
|
||||
// );
|
||||
// assert_eq!(
|
||||
// down(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(2, 3),
|
||||
// SelectionGoal::HorizontalPosition(col_4_x),
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(3, 4),
|
||||
// SelectionGoal::HorizontalPosition(col_4_x)
|
||||
// ),
|
||||
// );
|
||||
// Move up and down within first excerpt
|
||||
assert_eq!(
|
||||
up(
|
||||
&snapshot,
|
||||
DisplayPoint::new(3, 4),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(2, 3),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0)
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(2, 3),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(3, 4),
|
||||
SelectionGoal::HorizontalPosition(col_4_x.0)
|
||||
),
|
||||
);
|
||||
|
||||
// let col_5_x = snapshot.x_for_point(DisplayPoint::new(6, 5), &text_layout_details);
|
||||
let col_5_x =
|
||||
snapshot.x_for_display_point(DisplayPoint::new(6, 5), &text_layout_details);
|
||||
|
||||
// // Move up and down across second excerpt's header
|
||||
// assert_eq!(
|
||||
// up(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(6, 5),
|
||||
// SelectionGoal::HorizontalPosition(col_5_x),
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(3, 4),
|
||||
// SelectionGoal::HorizontalPosition(col_5_x)
|
||||
// ),
|
||||
// );
|
||||
// assert_eq!(
|
||||
// down(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(3, 4),
|
||||
// SelectionGoal::HorizontalPosition(col_5_x),
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(6, 5),
|
||||
// SelectionGoal::HorizontalPosition(col_5_x)
|
||||
// ),
|
||||
// );
|
||||
// Move up and down across second excerpt's header
|
||||
assert_eq!(
|
||||
up(
|
||||
&snapshot,
|
||||
DisplayPoint::new(6, 5),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(3, 4),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0)
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(3, 4),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(6, 5),
|
||||
SelectionGoal::HorizontalPosition(col_5_x.0)
|
||||
),
|
||||
);
|
||||
|
||||
// let max_point_x = snapshot.x_for_point(DisplayPoint::new(7, 2), &text_layout_details);
|
||||
let max_point_x =
|
||||
snapshot.x_for_display_point(DisplayPoint::new(7, 2), &text_layout_details);
|
||||
|
||||
// // Can't move down off the end
|
||||
// assert_eq!(
|
||||
// down(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(7, 0),
|
||||
// SelectionGoal::HorizontalPosition(0.0),
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(7, 2),
|
||||
// SelectionGoal::HorizontalPosition(max_point_x)
|
||||
// ),
|
||||
// );
|
||||
// assert_eq!(
|
||||
// down(
|
||||
// &snapshot,
|
||||
// DisplayPoint::new(7, 2),
|
||||
// SelectionGoal::HorizontalPosition(max_point_x),
|
||||
// false,
|
||||
// &text_layout_details
|
||||
// ),
|
||||
// (
|
||||
// DisplayPoint::new(7, 2),
|
||||
// SelectionGoal::HorizontalPosition(max_point_x)
|
||||
// ),
|
||||
// );
|
||||
// });
|
||||
// }
|
||||
// Can't move down off the end
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(7, 0),
|
||||
SelectionGoal::HorizontalPosition(0.0),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(7, 2),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.0)
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
DisplayPoint::new(7, 2),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.0),
|
||||
false,
|
||||
&text_layout_details
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(7, 2),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.0)
|
||||
),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// fn init_test(cx: &mut gpui::AppContext) {
|
||||
// cx.set_global(SettingsStore::test(cx));
|
||||
// theme::init(cx);
|
||||
// language::init(cx);
|
||||
// crate::init(cx);
|
||||
// Project::init_settings(cx);
|
||||
// }
|
||||
// }
|
||||
fn init_test(cx: &mut gpui::AppContext) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
language::init(cx);
|
||||
crate::init(cx);
|
||||
Project::init_settings(cx);
|
||||
}
|
||||
}
|
||||
|
@ -358,7 +358,7 @@ impl AppContext {
|
||||
{
|
||||
let entity_id = entity.entity_id();
|
||||
let handle = entity.downgrade();
|
||||
self.observers.insert(
|
||||
let (subscription, activate) = self.observers.insert(
|
||||
entity_id,
|
||||
Box::new(move |cx| {
|
||||
if let Some(handle) = E::upgrade_from(&handle) {
|
||||
@ -367,7 +367,9 @@ impl AppContext {
|
||||
false
|
||||
}
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn subscribe<T, E, Evt>(
|
||||
@ -398,8 +400,7 @@ impl AppContext {
|
||||
{
|
||||
let entity_id = entity.entity_id();
|
||||
let entity = entity.downgrade();
|
||||
|
||||
self.event_listeners.insert(
|
||||
let (subscription, activate) = self.event_listeners.insert(
|
||||
entity_id,
|
||||
(
|
||||
TypeId::of::<Evt>(),
|
||||
@ -412,7 +413,9 @@ impl AppContext {
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
);
|
||||
self.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn windows(&self) -> Vec<AnyWindowHandle> {
|
||||
@ -873,13 +876,15 @@ impl AppContext {
|
||||
&mut self,
|
||||
mut f: impl FnMut(&mut Self) + 'static,
|
||||
) -> Subscription {
|
||||
self.global_observers.insert(
|
||||
let (subscription, activate) = self.global_observers.insert(
|
||||
TypeId::of::<G>(),
|
||||
Box::new(move |cx| {
|
||||
f(cx);
|
||||
true
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Move the global of the given type to the stack.
|
||||
@ -903,7 +908,7 @@ impl AppContext {
|
||||
&mut self,
|
||||
on_new: impl 'static + Fn(&mut V, &mut ViewContext<V>),
|
||||
) -> Subscription {
|
||||
self.new_view_observers.insert(
|
||||
let (subscription, activate) = self.new_view_observers.insert(
|
||||
TypeId::of::<V>(),
|
||||
Box::new(move |any_view: AnyView, cx: &mut WindowContext| {
|
||||
any_view
|
||||
@ -913,7 +918,9 @@ impl AppContext {
|
||||
on_new(view_state, cx);
|
||||
})
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn observe_release<E, T>(
|
||||
@ -925,13 +932,15 @@ impl AppContext {
|
||||
E: Entity<T>,
|
||||
T: 'static,
|
||||
{
|
||||
self.release_listeners.insert(
|
||||
let (subscription, activate) = self.release_listeners.insert(
|
||||
handle.entity_id(),
|
||||
Box::new(move |entity, cx| {
|
||||
let entity = entity.downcast_mut().expect("invalid entity type");
|
||||
on_release(entity, cx)
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub(crate) fn push_text_style(&mut self, text_style: TextStyleRefinement) {
|
||||
@ -996,13 +1005,15 @@ impl AppContext {
|
||||
where
|
||||
Fut: 'static + Future<Output = ()>,
|
||||
{
|
||||
self.quit_observers.insert(
|
||||
let (subscription, activate) = self.quit_observers.insert(
|
||||
(),
|
||||
Box::new(move |cx| {
|
||||
let future = on_quit(cx);
|
||||
async move { future.await }.boxed_local()
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -482,10 +482,6 @@ impl<T: 'static> WeakModel<T> {
|
||||
/// Update the entity referenced by this model with the given function if
|
||||
/// the referenced entity still exists. Returns an error if the entity has
|
||||
/// been released.
|
||||
///
|
||||
/// The update function receives a context appropriate for its environment.
|
||||
/// When updating in an `AppContext`, it receives a `ModelContext`.
|
||||
/// When updating an a `WindowContext`, it receives a `ViewContext`.
|
||||
pub fn update<C, R>(
|
||||
&self,
|
||||
cx: &mut C,
|
||||
@ -501,6 +497,21 @@ impl<T: 'static> WeakModel<T> {
|
||||
.map(|this| cx.update_model(&this, update)),
|
||||
)
|
||||
}
|
||||
|
||||
/// Reads the entity referenced by this model with the given function if
|
||||
/// the referenced entity still exists. Returns an error if the entity has
|
||||
/// been released.
|
||||
pub fn read_with<C, R>(&self, cx: &C, read: impl FnOnce(&T, &AppContext) -> R) -> Result<R>
|
||||
where
|
||||
C: Context,
|
||||
Result<C::Result<R>>: crate::Flatten<R>,
|
||||
{
|
||||
crate::Flatten::flatten(
|
||||
self.upgrade()
|
||||
.ok_or_else(|| anyhow!("entity release"))
|
||||
.map(|this| cx.read_model(&this, read)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Hash for WeakModel<T> {
|
||||
|
@ -88,13 +88,15 @@ impl<'a, T: 'static> ModelContext<'a, T> {
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
self.app.release_listeners.insert(
|
||||
let (subscription, activate) = self.app.release_listeners.insert(
|
||||
self.model_state.entity_id,
|
||||
Box::new(move |this, cx| {
|
||||
let this = this.downcast_mut().expect("invalid entity type");
|
||||
on_release(this, cx);
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn observe_release<T2, E>(
|
||||
@ -109,7 +111,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
|
||||
{
|
||||
let entity_id = entity.entity_id();
|
||||
let this = self.weak_model();
|
||||
self.app.release_listeners.insert(
|
||||
let (subscription, activate) = self.app.release_listeners.insert(
|
||||
entity_id,
|
||||
Box::new(move |entity, cx| {
|
||||
let entity = entity.downcast_mut().expect("invalid entity type");
|
||||
@ -117,7 +119,9 @@ impl<'a, T: 'static> ModelContext<'a, T> {
|
||||
this.update(cx, |this, cx| on_release(this, entity, cx));
|
||||
}
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn observe_global<G: 'static>(
|
||||
@ -128,10 +132,12 @@ impl<'a, T: 'static> ModelContext<'a, T> {
|
||||
T: 'static,
|
||||
{
|
||||
let handle = self.weak_model();
|
||||
self.global_observers.insert(
|
||||
let (subscription, activate) = self.global_observers.insert(
|
||||
TypeId::of::<G>(),
|
||||
Box::new(move |cx| handle.update(cx, |view, cx| f(view, cx)).is_ok()),
|
||||
)
|
||||
);
|
||||
self.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn on_app_quit<Fut>(
|
||||
@ -143,7 +149,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
|
||||
T: 'static,
|
||||
{
|
||||
let handle = self.weak_model();
|
||||
self.app.quit_observers.insert(
|
||||
let (subscription, activate) = self.app.quit_observers.insert(
|
||||
(),
|
||||
Box::new(move |cx| {
|
||||
let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok();
|
||||
@ -154,7 +160,9 @@ impl<'a, T: 'static> ModelContext<'a, T> {
|
||||
}
|
||||
.boxed_local()
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn notify(&mut self) {
|
||||
|
@ -1,13 +1,13 @@
|
||||
use crate::{
|
||||
div, Action, AnyView, AnyWindowHandle, AppCell, AppContext, AsyncAppContext,
|
||||
BackgroundExecutor, Context, Div, Entity, EventEmitter, ForegroundExecutor, InputEvent,
|
||||
KeyDownEvent, Keystroke, Model, ModelContext, Render, Result, Task, TestDispatcher,
|
||||
TestPlatform, TestWindow, TestWindowHandlers, View, ViewContext, VisualContext, WindowContext,
|
||||
WindowHandle, WindowOptions,
|
||||
BackgroundExecutor, Bounds, Context, Div, Entity, EventEmitter, ForegroundExecutor, InputEvent,
|
||||
KeyDownEvent, Keystroke, Model, ModelContext, Pixels, PlatformWindow, Point, Render, Result,
|
||||
Size, Task, TestDispatcher, TestPlatform, TestWindow, TestWindowHandlers, View, ViewContext,
|
||||
VisualContext, WindowBounds, WindowContext, WindowHandle, WindowOptions,
|
||||
};
|
||||
use anyhow::{anyhow, bail};
|
||||
use futures::{Stream, StreamExt};
|
||||
use std::{future::Future, ops::Deref, rc::Rc, sync::Arc, time::Duration};
|
||||
use std::{future::Future, mem, ops::Deref, rc::Rc, sync::Arc, time::Duration};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TestAppContext {
|
||||
@ -170,6 +170,45 @@ impl TestAppContext {
|
||||
self.test_platform.has_pending_prompt()
|
||||
}
|
||||
|
||||
pub fn simulate_window_resize(&self, window_handle: AnyWindowHandle, size: Size<Pixels>) {
|
||||
let (mut handlers, scale_factor) = self
|
||||
.app
|
||||
.borrow_mut()
|
||||
.update_window(window_handle, |_, cx| {
|
||||
let platform_window = cx.window.platform_window.as_test().unwrap();
|
||||
let scale_factor = platform_window.scale_factor();
|
||||
match &mut platform_window.bounds {
|
||||
WindowBounds::Fullscreen | WindowBounds::Maximized => {
|
||||
platform_window.bounds = WindowBounds::Fixed(Bounds {
|
||||
origin: Point::default(),
|
||||
size: size.map(|pixels| f64::from(pixels).into()),
|
||||
});
|
||||
}
|
||||
WindowBounds::Fixed(bounds) => {
|
||||
bounds.size = size.map(|pixels| f64::from(pixels).into());
|
||||
}
|
||||
}
|
||||
|
||||
(
|
||||
mem::take(&mut platform_window.handlers.lock().resize),
|
||||
scale_factor,
|
||||
)
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
for handler in &mut handlers {
|
||||
handler(size, scale_factor);
|
||||
}
|
||||
|
||||
self.app
|
||||
.borrow_mut()
|
||||
.update_window(window_handle, |_, cx| {
|
||||
let platform_window = cx.window.platform_window.as_test().unwrap();
|
||||
platform_window.handlers.lock().resize = handlers;
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut) -> Task<R>
|
||||
where
|
||||
Fut: Future<Output = R> + 'static,
|
||||
@ -343,12 +382,15 @@ impl TestAppContext {
|
||||
use smol::future::FutureExt as _;
|
||||
|
||||
async {
|
||||
while notifications.next().await.is_some() {
|
||||
loop {
|
||||
if model.update(self, &mut predicate) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if notifications.next().await.is_none() {
|
||||
bail!("model dropped")
|
||||
}
|
||||
}
|
||||
bail!("model dropped")
|
||||
}
|
||||
.race(timer.map(|_| Err(anyhow!("condition timed out"))))
|
||||
.await
|
||||
|
48
crates/gpui2/src/elements/canvas.rs
Normal file
48
crates/gpui2/src/elements/canvas.rs
Normal file
@ -0,0 +1,48 @@
|
||||
use crate::{Bounds, Element, IntoElement, Pixels, StyleRefinement, Styled, WindowContext};
|
||||
|
||||
pub fn canvas(callback: impl 'static + FnOnce(Bounds<Pixels>, &mut WindowContext)) -> Canvas {
|
||||
Canvas {
|
||||
paint_callback: Box::new(callback),
|
||||
style: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Canvas {
|
||||
paint_callback: Box<dyn FnOnce(Bounds<Pixels>, &mut WindowContext)>,
|
||||
style: StyleRefinement,
|
||||
}
|
||||
|
||||
impl IntoElement for Canvas {
|
||||
type Element = Self;
|
||||
|
||||
fn element_id(&self) -> Option<crate::ElementId> {
|
||||
None
|
||||
}
|
||||
|
||||
fn into_element(self) -> Self::Element {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Element for Canvas {
|
||||
type State = ();
|
||||
|
||||
fn layout(
|
||||
&mut self,
|
||||
_: Option<Self::State>,
|
||||
cx: &mut WindowContext,
|
||||
) -> (crate::LayoutId, Self::State) {
|
||||
let layout_id = cx.request_layout(&self.style.clone().into(), []);
|
||||
(layout_id, ())
|
||||
}
|
||||
|
||||
fn paint(self, bounds: Bounds<Pixels>, _: &mut (), cx: &mut WindowContext) {
|
||||
(self.paint_callback)(bounds, cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Styled for Canvas {
|
||||
fn style(&mut self) -> &mut crate::StyleRefinement {
|
||||
&mut self.style
|
||||
}
|
||||
}
|
@ -221,20 +221,6 @@ pub trait InteractiveElement: Sized + Element {
|
||||
|
||||
/// Add a listener for the given action, fires during the bubble event phase
|
||||
fn on_action<A: Action>(mut self, listener: impl Fn(&A, &mut WindowContext) + 'static) -> Self {
|
||||
// NOTE: this debug assert has the side-effect of working around
|
||||
// a bug where a crate consisting only of action definitions does
|
||||
// not register the actions in debug builds:
|
||||
//
|
||||
// https://github.com/rust-lang/rust/issues/47384
|
||||
// https://github.com/mmastrac/rust-ctor/issues/280
|
||||
//
|
||||
// if we are relying on this side-effect still, removing the debug_assert!
|
||||
// likely breaks the command_palette tests.
|
||||
// debug_assert!(
|
||||
// A::is_registered(),
|
||||
// "{:?} is not registered as an action",
|
||||
// A::qualified_name()
|
||||
// );
|
||||
self.interactivity().action_listeners.push((
|
||||
TypeId::of::<A>(),
|
||||
Box::new(move |action, phase, cx| {
|
||||
@ -247,6 +233,23 @@ pub trait InteractiveElement: Sized + Element {
|
||||
self
|
||||
}
|
||||
|
||||
fn on_boxed_action(
|
||||
mut self,
|
||||
action: &Box<dyn Action>,
|
||||
listener: impl Fn(&Box<dyn Action>, &mut WindowContext) + 'static,
|
||||
) -> Self {
|
||||
let action = action.boxed_clone();
|
||||
self.interactivity().action_listeners.push((
|
||||
(*action).type_id(),
|
||||
Box::new(move |_, phase, cx| {
|
||||
if phase == DispatchPhase::Bubble {
|
||||
(listener)(&action, cx)
|
||||
}
|
||||
}),
|
||||
));
|
||||
self
|
||||
}
|
||||
|
||||
fn on_key_down(
|
||||
mut self,
|
||||
listener: impl Fn(&KeyDownEvent, &mut WindowContext) + 'static,
|
||||
@ -866,6 +869,7 @@ impl Interactivity {
|
||||
}
|
||||
|
||||
if self.hover_style.is_some()
|
||||
|| self.base_style.mouse_cursor.is_some()
|
||||
|| cx.active_drag.is_some() && !self.drag_over_styles.is_empty()
|
||||
{
|
||||
let bounds = bounds.intersect(&cx.content_mask().bounds);
|
||||
@ -992,10 +996,6 @@ impl Interactivity {
|
||||
let interactive_bounds = interactive_bounds.clone();
|
||||
|
||||
cx.on_mouse_event(move |event: &MouseMoveEvent, phase, cx| {
|
||||
if phase != DispatchPhase::Bubble {
|
||||
return;
|
||||
}
|
||||
|
||||
let is_hovered = interactive_bounds.visibly_contains(&event.position, cx)
|
||||
&& pending_mouse_down.borrow().is_none();
|
||||
if !is_hovered {
|
||||
@ -1003,6 +1003,10 @@ impl Interactivity {
|
||||
return;
|
||||
}
|
||||
|
||||
if phase != DispatchPhase::Bubble {
|
||||
return;
|
||||
}
|
||||
|
||||
if active_tooltip.borrow().is_none() {
|
||||
let task = cx.spawn({
|
||||
let active_tooltip = active_tooltip.clone();
|
||||
|
@ -1,3 +1,4 @@
|
||||
mod canvas;
|
||||
mod div;
|
||||
mod img;
|
||||
mod overlay;
|
||||
@ -5,6 +6,7 @@ mod svg;
|
||||
mod text;
|
||||
mod uniform_list;
|
||||
|
||||
pub use canvas::*;
|
||||
pub use div::*;
|
||||
pub use img::*;
|
||||
pub use overlay::*;
|
||||
|
@ -128,11 +128,19 @@ impl BackgroundExecutor {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
#[track_caller]
|
||||
pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R {
|
||||
self.block_internal(false, future)
|
||||
if let Ok(value) = self.block_internal(false, future, usize::MAX) {
|
||||
value
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
|
||||
self.block_internal(true, future)
|
||||
if let Ok(value) = self.block_internal(true, future, usize::MAX) {
|
||||
value
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@ -140,7 +148,8 @@ impl BackgroundExecutor {
|
||||
&self,
|
||||
background_only: bool,
|
||||
future: impl Future<Output = R>,
|
||||
) -> R {
|
||||
mut max_ticks: usize,
|
||||
) -> Result<R, ()> {
|
||||
pin_mut!(future);
|
||||
let unparker = self.dispatcher.unparker();
|
||||
let awoken = Arc::new(AtomicBool::new(false));
|
||||
@ -156,8 +165,13 @@ impl BackgroundExecutor {
|
||||
|
||||
loop {
|
||||
match future.as_mut().poll(&mut cx) {
|
||||
Poll::Ready(result) => return result,
|
||||
Poll::Ready(result) => return Ok(result),
|
||||
Poll::Pending => {
|
||||
if max_ticks == 0 {
|
||||
return Err(());
|
||||
}
|
||||
max_ticks -= 1;
|
||||
|
||||
if !self.dispatcher.tick(background_only) {
|
||||
if awoken.swap(false, SeqCst) {
|
||||
continue;
|
||||
@ -192,16 +206,25 @@ impl BackgroundExecutor {
|
||||
return Err(future);
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let max_ticks = self
|
||||
.dispatcher
|
||||
.as_test()
|
||||
.map_or(usize::MAX, |dispatcher| dispatcher.gen_block_on_ticks());
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
let max_ticks = usize::MAX;
|
||||
|
||||
let mut timer = self.timer(duration).fuse();
|
||||
|
||||
let timeout = async {
|
||||
futures::select_biased! {
|
||||
value = future => Ok(value),
|
||||
_ = timer => Err(()),
|
||||
}
|
||||
};
|
||||
match self.block(timeout) {
|
||||
Ok(value) => Ok(value),
|
||||
Err(_) => Err(future),
|
||||
match self.block_internal(true, timeout, max_ticks) {
|
||||
Ok(Ok(value)) => Ok(value),
|
||||
_ => Err(future),
|
||||
}
|
||||
}
|
||||
|
||||
@ -281,6 +304,11 @@ impl BackgroundExecutor {
|
||||
pub fn is_main_thread(&self) -> bool {
|
||||
self.dispatcher.is_main_thread()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive<usize>) {
|
||||
self.dispatcher.as_test().unwrap().set_block_on_ticks(range);
|
||||
}
|
||||
}
|
||||
|
||||
impl ForegroundExecutor {
|
||||
|
@ -21,7 +21,7 @@ mod subscription;
|
||||
mod svg_renderer;
|
||||
mod taffy;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
mod test;
|
||||
pub mod test;
|
||||
mod text_system;
|
||||
mod util;
|
||||
mod view;
|
||||
|
@ -16,7 +16,7 @@ pub struct DispatchNodeId(usize);
|
||||
|
||||
pub(crate) struct DispatchTree {
|
||||
node_stack: Vec<DispatchNodeId>,
|
||||
context_stack: Vec<KeyContext>,
|
||||
pub(crate) context_stack: Vec<KeyContext>,
|
||||
nodes: Vec<DispatchNode>,
|
||||
focusable_node_ids: HashMap<FocusId, DispatchNodeId>,
|
||||
keystroke_matchers: HashMap<SmallVec<[KeyContext; 4]>, KeystrokeMatcher>,
|
||||
@ -163,11 +163,25 @@ impl DispatchTree {
|
||||
actions
|
||||
}
|
||||
|
||||
pub fn bindings_for_action(&self, action: &dyn Action) -> Vec<KeyBinding> {
|
||||
pub fn bindings_for_action(
|
||||
&self,
|
||||
action: &dyn Action,
|
||||
context_stack: &Vec<KeyContext>,
|
||||
) -> Vec<KeyBinding> {
|
||||
self.keymap
|
||||
.lock()
|
||||
.bindings_for_action(action.type_id())
|
||||
.filter(|candidate| candidate.action.partial_eq(action))
|
||||
.filter(|candidate| {
|
||||
if !candidate.action.partial_eq(action) {
|
||||
return false;
|
||||
}
|
||||
for i in 1..context_stack.len() {
|
||||
if candidate.matches_context(&context_stack[0..=i]) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
})
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ pub(crate) fn current_platform() -> Rc<dyn Platform> {
|
||||
Rc::new(MacPlatform::new())
|
||||
}
|
||||
|
||||
pub(crate) trait Platform: 'static {
|
||||
pub trait Platform: 'static {
|
||||
fn background_executor(&self) -> BackgroundExecutor;
|
||||
fn foreground_executor(&self) -> ForegroundExecutor;
|
||||
fn text_system(&self) -> Arc<dyn PlatformTextSystem>;
|
||||
@ -128,7 +128,7 @@ impl Debug for DisplayId {
|
||||
|
||||
unsafe impl Send for DisplayId {}
|
||||
|
||||
pub(crate) trait PlatformWindow {
|
||||
pub trait PlatformWindow {
|
||||
fn bounds(&self) -> WindowBounds;
|
||||
fn content_size(&self) -> Size<Pixels>;
|
||||
fn scale_factor(&self) -> f32;
|
||||
@ -160,7 +160,7 @@ pub(crate) trait PlatformWindow {
|
||||
fn sprite_atlas(&self) -> Arc<dyn PlatformAtlas>;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
fn as_test(&self) -> Option<&TestWindow> {
|
||||
fn as_test(&mut self) -> Option<&mut TestWindow> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@ -472,13 +472,27 @@ pub enum PromptLevel {
|
||||
Critical,
|
||||
}
|
||||
|
||||
/// The style of the cursor (pointer)
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum CursorStyle {
|
||||
Arrow,
|
||||
ResizeLeftRight,
|
||||
ResizeUpDown,
|
||||
PointingHand,
|
||||
IBeam,
|
||||
Crosshair,
|
||||
ClosedHand,
|
||||
OpenHand,
|
||||
PointingHand,
|
||||
ResizeLeft,
|
||||
ResizeRight,
|
||||
ResizeLeftRight,
|
||||
ResizeUp,
|
||||
ResizeDown,
|
||||
ResizeUpDown,
|
||||
DisappearingItem,
|
||||
IBeamCursorForVerticalLayout,
|
||||
OperationNotAllowed,
|
||||
DragLink,
|
||||
DragCopy,
|
||||
ContextualMenu,
|
||||
}
|
||||
|
||||
impl Default for CursorStyle {
|
||||
|
@ -724,16 +724,35 @@ impl Platform for MacPlatform {
|
||||
}
|
||||
}
|
||||
|
||||
/// Match cursor style to one of the styles available
|
||||
/// in macOS's [NSCursor](https://developer.apple.com/documentation/appkit/nscursor).
|
||||
fn set_cursor_style(&self, style: CursorStyle) {
|
||||
unsafe {
|
||||
let new_cursor: id = match style {
|
||||
CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor],
|
||||
CursorStyle::ResizeLeftRight => {
|
||||
msg_send![class!(NSCursor), resizeLeftRightCursor]
|
||||
}
|
||||
CursorStyle::ResizeUpDown => msg_send![class!(NSCursor), resizeUpDownCursor],
|
||||
CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor],
|
||||
CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor],
|
||||
CursorStyle::Crosshair => msg_send![class!(NSCursor), crosshairCursor],
|
||||
CursorStyle::ClosedHand => msg_send![class!(NSCursor), closedHandCursor],
|
||||
CursorStyle::OpenHand => msg_send![class!(NSCursor), openHandCursor],
|
||||
CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor],
|
||||
CursorStyle::ResizeLeft => msg_send![class!(NSCursor), resizeLeftCursor],
|
||||
CursorStyle::ResizeRight => msg_send![class!(NSCursor), resizeRightCursor],
|
||||
CursorStyle::ResizeLeftRight => msg_send![class!(NSCursor), resizeLeftRightCursor],
|
||||
CursorStyle::ResizeUp => msg_send![class!(NSCursor), resizeUpCursor],
|
||||
CursorStyle::ResizeDown => msg_send![class!(NSCursor), resizeDownCursor],
|
||||
CursorStyle::ResizeUpDown => msg_send![class!(NSCursor), resizeUpDownCursor],
|
||||
CursorStyle::DisappearingItem => {
|
||||
msg_send![class!(NSCursor), disappearingItemCursor]
|
||||
}
|
||||
CursorStyle::IBeamCursorForVerticalLayout => {
|
||||
msg_send![class!(NSCursor), IBeamCursorForVerticalLayout]
|
||||
}
|
||||
CursorStyle::OperationNotAllowed => {
|
||||
msg_send![class!(NSCursor), operationNotAllowedCursor]
|
||||
}
|
||||
CursorStyle::DragLink => msg_send![class!(NSCursor), dragLinkCursor],
|
||||
CursorStyle::DragCopy => msg_send![class!(NSCursor), dragCopyCursor],
|
||||
CursorStyle::ContextualMenu => msg_send![class!(NSCursor), contextualMenuCursor],
|
||||
};
|
||||
|
||||
let old_cursor: id = msg_send![class!(NSCursor), currentCursor];
|
||||
|
@ -7,6 +7,7 @@ use parking_lot::Mutex;
|
||||
use rand::prelude::*;
|
||||
use std::{
|
||||
future::Future,
|
||||
ops::RangeInclusive,
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
task::{Context, Poll},
|
||||
@ -36,6 +37,7 @@ struct TestDispatcherState {
|
||||
allow_parking: bool,
|
||||
waiting_backtrace: Option<Backtrace>,
|
||||
deprioritized_task_labels: HashSet<TaskLabel>,
|
||||
block_on_ticks: RangeInclusive<usize>,
|
||||
}
|
||||
|
||||
impl TestDispatcher {
|
||||
@ -53,6 +55,7 @@ impl TestDispatcher {
|
||||
allow_parking: false,
|
||||
waiting_backtrace: None,
|
||||
deprioritized_task_labels: Default::default(),
|
||||
block_on_ticks: 0..=1000,
|
||||
};
|
||||
|
||||
TestDispatcher {
|
||||
@ -82,8 +85,8 @@ impl TestDispatcher {
|
||||
}
|
||||
|
||||
pub fn simulate_random_delay(&self) -> impl 'static + Send + Future<Output = ()> {
|
||||
pub struct YieldNow {
|
||||
count: usize,
|
||||
struct YieldNow {
|
||||
pub(crate) count: usize,
|
||||
}
|
||||
|
||||
impl Future for YieldNow {
|
||||
@ -142,6 +145,16 @@ impl TestDispatcher {
|
||||
pub fn rng(&self) -> StdRng {
|
||||
self.state.lock().random.clone()
|
||||
}
|
||||
|
||||
pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive<usize>) {
|
||||
self.state.lock().block_on_ticks = range;
|
||||
}
|
||||
|
||||
pub fn gen_block_on_ticks(&self) -> usize {
|
||||
let mut lock = self.state.lock();
|
||||
let block_on_ticks = lock.block_on_ticks.clone();
|
||||
lock.random.gen_range(block_on_ticks)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for TestDispatcher {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
AnyWindowHandle, BackgroundExecutor, CursorStyle, DisplayId, ForegroundExecutor, Platform,
|
||||
PlatformDisplay, PlatformTextSystem, TestDisplay, TestWindow, WindowOptions,
|
||||
AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId, ForegroundExecutor,
|
||||
Platform, PlatformDisplay, PlatformTextSystem, TestDisplay, TestWindow, WindowOptions,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::VecDeque;
|
||||
@ -20,6 +20,7 @@ pub struct TestPlatform {
|
||||
active_window: Arc<Mutex<Option<AnyWindowHandle>>>,
|
||||
active_display: Rc<dyn PlatformDisplay>,
|
||||
active_cursor: Mutex<CursorStyle>,
|
||||
current_clipboard_item: Mutex<Option<ClipboardItem>>,
|
||||
pub(crate) prompts: RefCell<TestPrompts>,
|
||||
weak: Weak<Self>,
|
||||
}
|
||||
@ -39,6 +40,7 @@ impl TestPlatform {
|
||||
active_cursor: Default::default(),
|
||||
active_display: Rc::new(TestDisplay::new()),
|
||||
active_window: Default::default(),
|
||||
current_clipboard_item: Mutex::new(None),
|
||||
weak: weak.clone(),
|
||||
})
|
||||
}
|
||||
@ -236,12 +238,12 @@ impl Platform for TestPlatform {
|
||||
true
|
||||
}
|
||||
|
||||
fn write_to_clipboard(&self, _item: crate::ClipboardItem) {
|
||||
unimplemented!()
|
||||
fn write_to_clipboard(&self, item: ClipboardItem) {
|
||||
*self.current_clipboard_item.lock() = Some(item);
|
||||
}
|
||||
|
||||
fn read_from_clipboard(&self) -> Option<crate::ClipboardItem> {
|
||||
unimplemented!()
|
||||
fn read_from_clipboard(&self) -> Option<ClipboardItem> {
|
||||
self.current_clipboard_item.lock().clone()
|
||||
}
|
||||
|
||||
fn write_credentials(&self, _url: &str, _username: &str, _password: &[u8]) -> Result<()> {
|
||||
|
@ -19,7 +19,7 @@ pub(crate) struct TestWindowHandlers {
|
||||
}
|
||||
|
||||
pub struct TestWindow {
|
||||
bounds: WindowBounds,
|
||||
pub(crate) bounds: WindowBounds,
|
||||
current_scene: Mutex<Option<Scene>>,
|
||||
display: Rc<dyn PlatformDisplay>,
|
||||
pub(crate) window_title: Option<String>,
|
||||
@ -170,7 +170,7 @@ impl PlatformWindow for TestWindow {
|
||||
self.sprite_atlas.clone()
|
||||
}
|
||||
|
||||
fn as_test(&self) -> Option<&TestWindow> {
|
||||
fn as_test(&mut self) -> Option<&mut TestWindow> {
|
||||
Some(self)
|
||||
}
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ impl SceneBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Scene {
|
||||
pub struct Scene {
|
||||
pub shadows: Vec<Shadow>,
|
||||
pub quads: Vec<Quad>,
|
||||
pub paths: Vec<Path<ScaledPixels>>,
|
||||
@ -214,7 +214,7 @@ impl Scene {
|
||||
&self.paths
|
||||
}
|
||||
|
||||
pub fn batches(&self) -> impl Iterator<Item = PrimitiveBatch> {
|
||||
pub(crate) fn batches(&self) -> impl Iterator<Item = PrimitiveBatch> {
|
||||
BatchIterator {
|
||||
shadows: &self.shadows,
|
||||
shadows_start: 0,
|
||||
|
@ -208,8 +208,9 @@ impl TextStyle {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the rounded line height in pixels.
|
||||
pub fn line_height_in_pixels(&self, rem_size: Pixels) -> Pixels {
|
||||
self.line_height.to_pixels(self.font_size, rem_size)
|
||||
self.line_height.to_pixels(self.font_size, rem_size).round()
|
||||
}
|
||||
|
||||
pub fn to_run(&self, len: usize) -> TextRun {
|
||||
|
@ -101,6 +101,125 @@ pub trait Styled: Sized {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `text`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_text(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::IBeam);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `move`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_move(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ClosedHand);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `not-allowed`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_not_allowed(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::OperationNotAllowed);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `context-menu`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_context_menu(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ContextualMenu);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `crosshair`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_crosshair(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::Crosshair);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `vertical-text`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_vertical_text(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::IBeamCursorForVerticalLayout);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `alias`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_alias(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::DragLink);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `copy`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_copy(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::DragCopy);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `no-drop`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_no_drop(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::OperationNotAllowed);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `grab`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_grab(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::OpenHand);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `grabbing`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_grabbing(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ClosedHand);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `col-resize`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_col_resize(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ResizeLeftRight);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `row-resize`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_row_resize(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ResizeUpDown);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `n-resize`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_n_resize(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ResizeUp);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `e-resize`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_e_resize(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ResizeRight);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `s-resize`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_s_resize(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ResizeDown);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets cursor style when hovering over an element to `w-resize`.
|
||||
/// [Docs](https://tailwindcss.com/docs/cursor)
|
||||
fn cursor_w_resize(mut self) -> Self {
|
||||
self.style().mouse_cursor = Some(CursorStyle::ResizeLeft);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the whitespace of the element to `normal`.
|
||||
/// [Docs](https://tailwindcss.com/docs/whitespace#normal)
|
||||
fn whitespace_normal(mut self) -> Self {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use collections::{BTreeMap, BTreeSet};
|
||||
use parking_lot::Mutex;
|
||||
use std::{fmt::Debug, mem, sync::Arc};
|
||||
use std::{cell::Cell, fmt::Debug, mem, rc::Rc, sync::Arc};
|
||||
use util::post_inc;
|
||||
|
||||
pub(crate) struct SubscriberSet<EmitterKey, Callback>(
|
||||
@ -14,11 +14,16 @@ impl<EmitterKey, Callback> Clone for SubscriberSet<EmitterKey, Callback> {
|
||||
}
|
||||
|
||||
struct SubscriberSetState<EmitterKey, Callback> {
|
||||
subscribers: BTreeMap<EmitterKey, Option<BTreeMap<usize, Callback>>>,
|
||||
subscribers: BTreeMap<EmitterKey, Option<BTreeMap<usize, Subscriber<Callback>>>>,
|
||||
dropped_subscribers: BTreeSet<(EmitterKey, usize)>,
|
||||
next_subscriber_id: usize,
|
||||
}
|
||||
|
||||
struct Subscriber<Callback> {
|
||||
active: Rc<Cell<bool>>,
|
||||
callback: Callback,
|
||||
}
|
||||
|
||||
impl<EmitterKey, Callback> SubscriberSet<EmitterKey, Callback>
|
||||
where
|
||||
EmitterKey: 'static + Ord + Clone + Debug,
|
||||
@ -32,16 +37,33 @@ where
|
||||
})))
|
||||
}
|
||||
|
||||
pub fn insert(&self, emitter_key: EmitterKey, callback: Callback) -> Subscription {
|
||||
/// Inserts a new `[Subscription]` for the given `emitter_key`. By default, subscriptions
|
||||
/// are inert, meaning that they won't be listed when calling `[SubscriberSet::remove]` or `[SubscriberSet::retain]`.
|
||||
/// This method returns a tuple of a `[Subscription]` and an `impl FnOnce`, and you can use the latter
|
||||
/// to activate the `[Subscription]`.
|
||||
#[must_use]
|
||||
pub fn insert(
|
||||
&self,
|
||||
emitter_key: EmitterKey,
|
||||
callback: Callback,
|
||||
) -> (Subscription, impl FnOnce()) {
|
||||
let active = Rc::new(Cell::new(false));
|
||||
let mut lock = self.0.lock();
|
||||
let subscriber_id = post_inc(&mut lock.next_subscriber_id);
|
||||
lock.subscribers
|
||||
.entry(emitter_key.clone())
|
||||
.or_default()
|
||||
.get_or_insert_with(|| Default::default())
|
||||
.insert(subscriber_id, callback);
|
||||
.insert(
|
||||
subscriber_id,
|
||||
Subscriber {
|
||||
active: active.clone(),
|
||||
callback,
|
||||
},
|
||||
);
|
||||
let this = self.0.clone();
|
||||
Subscription {
|
||||
|
||||
let subscription = Subscription {
|
||||
unsubscribe: Some(Box::new(move || {
|
||||
let mut lock = this.lock();
|
||||
let Some(subscribers) = lock.subscribers.get_mut(&emitter_key) else {
|
||||
@ -63,7 +85,8 @@ where
|
||||
lock.dropped_subscribers
|
||||
.insert((emitter_key, subscriber_id));
|
||||
})),
|
||||
}
|
||||
};
|
||||
(subscription, move || active.set(true))
|
||||
}
|
||||
|
||||
pub fn remove(&self, emitter: &EmitterKey) -> impl IntoIterator<Item = Callback> {
|
||||
@ -73,6 +96,13 @@ where
|
||||
.map(|s| s.into_values())
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|subscriber| {
|
||||
if subscriber.active.get() {
|
||||
Some(subscriber.callback)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Call the given callback for each subscriber to the given emitter.
|
||||
@ -91,7 +121,13 @@ where
|
||||
return;
|
||||
};
|
||||
|
||||
subscribers.retain(|_, callback| f(callback));
|
||||
subscribers.retain(|_, subscriber| {
|
||||
if subscriber.active.get() {
|
||||
f(&mut subscriber.callback)
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
let mut lock = self.0.lock();
|
||||
|
||||
// Add any new subscribers that were added while invoking the callback.
|
||||
|
@ -1,5 +1,7 @@
|
||||
use crate::TestDispatcher;
|
||||
use crate::{Entity, Subscription, TestAppContext, TestDispatcher};
|
||||
use futures::StreamExt as _;
|
||||
use rand::prelude::*;
|
||||
use smol::channel;
|
||||
use std::{
|
||||
env,
|
||||
panic::{self, RefUnwindSafe},
|
||||
@ -49,3 +51,30 @@ pub fn run_test(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Observation<T> {
|
||||
rx: channel::Receiver<T>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl<T: 'static> futures::Stream for Observation<T> {
|
||||
type Item = T;
|
||||
|
||||
fn poll_next(
|
||||
mut self: std::pin::Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
) -> std::task::Poll<Option<Self::Item>> {
|
||||
self.rx.poll_next_unpin(cx)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn observe<T: 'static>(entity: &impl Entity<T>, cx: &mut TestAppContext) -> Observation<()> {
|
||||
let (tx, rx) = smol::channel::unbounded();
|
||||
let _subscription = cx.update(|cx| {
|
||||
cx.observe(entity, move |_, _| {
|
||||
let _ = smol::block_on(tx.send(()));
|
||||
})
|
||||
});
|
||||
|
||||
Observation { rx, _subscription }
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ impl TextSystem {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bounding_box(&self, font_id: FontId, font_size: Pixels) -> Result<Bounds<Pixels>> {
|
||||
pub fn bounding_box(&self, font_id: FontId, font_size: Pixels) -> Bounds<Pixels> {
|
||||
self.read_metrics(font_id, |metrics| metrics.bounding_box(font_size))
|
||||
}
|
||||
|
||||
@ -89,9 +89,9 @@ impl TextSystem {
|
||||
let bounds = self
|
||||
.platform_text_system
|
||||
.typographic_bounds(font_id, glyph_id)?;
|
||||
self.read_metrics(font_id, |metrics| {
|
||||
Ok(self.read_metrics(font_id, |metrics| {
|
||||
(bounds / metrics.units_per_em as f32 * font_size.0).map(px)
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn advance(&self, font_id: FontId, font_size: Pixels, ch: char) -> Result<Size<Pixels>> {
|
||||
@ -100,28 +100,28 @@ impl TextSystem {
|
||||
.glyph_for_char(font_id, ch)
|
||||
.ok_or_else(|| anyhow!("glyph not found for character '{}'", ch))?;
|
||||
let result = self.platform_text_system.advance(font_id, glyph_id)?
|
||||
/ self.units_per_em(font_id)? as f32;
|
||||
/ self.units_per_em(font_id) as f32;
|
||||
|
||||
Ok(result * font_size)
|
||||
}
|
||||
|
||||
pub fn units_per_em(&self, font_id: FontId) -> Result<u32> {
|
||||
pub fn units_per_em(&self, font_id: FontId) -> u32 {
|
||||
self.read_metrics(font_id, |metrics| metrics.units_per_em as u32)
|
||||
}
|
||||
|
||||
pub fn cap_height(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
|
||||
pub fn cap_height(&self, font_id: FontId, font_size: Pixels) -> Pixels {
|
||||
self.read_metrics(font_id, |metrics| metrics.cap_height(font_size))
|
||||
}
|
||||
|
||||
pub fn x_height(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
|
||||
pub fn x_height(&self, font_id: FontId, font_size: Pixels) -> Pixels {
|
||||
self.read_metrics(font_id, |metrics| metrics.x_height(font_size))
|
||||
}
|
||||
|
||||
pub fn ascent(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
|
||||
pub fn ascent(&self, font_id: FontId, font_size: Pixels) -> Pixels {
|
||||
self.read_metrics(font_id, |metrics| metrics.ascent(font_size))
|
||||
}
|
||||
|
||||
pub fn descent(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
|
||||
pub fn descent(&self, font_id: FontId, font_size: Pixels) -> Pixels {
|
||||
self.read_metrics(font_id, |metrics| metrics.descent(font_size))
|
||||
}
|
||||
|
||||
@ -130,24 +130,24 @@ impl TextSystem {
|
||||
font_id: FontId,
|
||||
font_size: Pixels,
|
||||
line_height: Pixels,
|
||||
) -> Result<Pixels> {
|
||||
let ascent = self.ascent(font_id, font_size)?;
|
||||
let descent = self.descent(font_id, font_size)?;
|
||||
) -> Pixels {
|
||||
let ascent = self.ascent(font_id, font_size);
|
||||
let descent = self.descent(font_id, font_size);
|
||||
let padding_top = (line_height - ascent - descent) / 2.;
|
||||
Ok(padding_top + ascent)
|
||||
padding_top + ascent
|
||||
}
|
||||
|
||||
fn read_metrics<T>(&self, font_id: FontId, read: impl FnOnce(&FontMetrics) -> T) -> Result<T> {
|
||||
fn read_metrics<T>(&self, font_id: FontId, read: impl FnOnce(&FontMetrics) -> T) -> T {
|
||||
let lock = self.font_metrics.upgradable_read();
|
||||
|
||||
if let Some(metrics) = lock.get(&font_id) {
|
||||
Ok(read(metrics))
|
||||
read(metrics)
|
||||
} else {
|
||||
let mut lock = RwLockUpgradableReadGuard::upgrade(lock);
|
||||
let metrics = lock
|
||||
.entry(font_id)
|
||||
.or_insert_with(|| self.platform_text_system.font_metrics(font_id));
|
||||
Ok(read(metrics))
|
||||
read(metrics)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,9 +101,7 @@ fn paint_line(
|
||||
let mut glyph_origin = origin;
|
||||
let mut prev_glyph_position = Point::default();
|
||||
for (run_ix, run) in layout.runs.iter().enumerate() {
|
||||
let max_glyph_size = text_system
|
||||
.bounding_box(run.font_id, layout.font_size)?
|
||||
.size;
|
||||
let max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size;
|
||||
|
||||
for (glyph_ix, glyph) in run.glyphs.iter().enumerate() {
|
||||
glyph_origin.x += glyph.position.x - prev_glyph_position.x;
|
||||
|
@ -490,7 +490,7 @@ impl<'a> WindowContext<'a> {
|
||||
let entity_id = entity.entity_id();
|
||||
let entity = entity.downgrade();
|
||||
let window_handle = self.window.handle;
|
||||
self.app.event_listeners.insert(
|
||||
let (subscription, activate) = self.app.event_listeners.insert(
|
||||
entity_id,
|
||||
(
|
||||
TypeId::of::<Evt>(),
|
||||
@ -508,7 +508,9 @@ impl<'a> WindowContext<'a> {
|
||||
.unwrap_or(false)
|
||||
}),
|
||||
),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Create an `AsyncWindowContext`, which has a static lifetime and can be held across
|
||||
@ -1348,6 +1350,8 @@ impl<'a> WindowContext<'a> {
|
||||
.dispatch_tree
|
||||
.dispatch_path(node_id);
|
||||
|
||||
let mut actions: Vec<Box<dyn Action>> = Vec::new();
|
||||
|
||||
// Capture phase
|
||||
let mut context_stack: SmallVec<[KeyContext; 16]> = SmallVec::new();
|
||||
self.propagate_event = true;
|
||||
@ -1382,22 +1386,26 @@ impl<'a> WindowContext<'a> {
|
||||
let node = self.window.current_frame.dispatch_tree.node(*node_id);
|
||||
if !node.context.is_empty() {
|
||||
if let Some(key_down_event) = event.downcast_ref::<KeyDownEvent>() {
|
||||
if let Some(action) = self
|
||||
if let Some(found) = self
|
||||
.window
|
||||
.current_frame
|
||||
.dispatch_tree
|
||||
.dispatch_key(&key_down_event.keystroke, &context_stack)
|
||||
{
|
||||
self.dispatch_action_on_node(*node_id, action);
|
||||
if !self.propagate_event {
|
||||
return;
|
||||
}
|
||||
actions.push(found.boxed_clone())
|
||||
}
|
||||
}
|
||||
|
||||
context_stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
for action in actions {
|
||||
self.dispatch_action_on_node(node_id, action);
|
||||
if !self.propagate_event {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1425,7 +1433,6 @@ impl<'a> WindowContext<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Bubble phase
|
||||
for node_id in dispatch_path.iter().rev() {
|
||||
let node = self.window.current_frame.dispatch_tree.node(*node_id);
|
||||
@ -1453,10 +1460,12 @@ impl<'a> WindowContext<'a> {
|
||||
f: impl Fn(&mut WindowContext<'_>) + 'static,
|
||||
) -> Subscription {
|
||||
let window_handle = self.window.handle;
|
||||
self.global_observers.insert(
|
||||
let (subscription, activate) = self.global_observers.insert(
|
||||
TypeId::of::<G>(),
|
||||
Box::new(move |cx| window_handle.update(cx, |_, cx| f(cx)).is_ok()),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn activate_window(&self) {
|
||||
@ -1493,9 +1502,30 @@ impl<'a> WindowContext<'a> {
|
||||
|
||||
pub fn bindings_for_action(&self, action: &dyn Action) -> Vec<KeyBinding> {
|
||||
self.window
|
||||
.current_frame
|
||||
.previous_frame
|
||||
.dispatch_tree
|
||||
.bindings_for_action(action)
|
||||
.bindings_for_action(
|
||||
action,
|
||||
&self.window.previous_frame.dispatch_tree.context_stack,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn bindings_for_action_in(
|
||||
&self,
|
||||
action: &dyn Action,
|
||||
focus_handle: &FocusHandle,
|
||||
) -> Vec<KeyBinding> {
|
||||
let dispatch_tree = &self.window.previous_frame.dispatch_tree;
|
||||
|
||||
let Some(node_id) = dispatch_tree.focusable_node_id(focus_handle.id) else {
|
||||
return vec![];
|
||||
};
|
||||
let context_stack = dispatch_tree
|
||||
.dispatch_path(node_id)
|
||||
.into_iter()
|
||||
.map(|node_id| dispatch_tree.node(node_id).context.clone())
|
||||
.collect();
|
||||
dispatch_tree.bindings_for_action(action, &context_stack)
|
||||
}
|
||||
|
||||
pub fn listener_for<V: Render, E>(
|
||||
@ -2096,7 +2126,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
let entity_id = entity.entity_id();
|
||||
let entity = entity.downgrade();
|
||||
let window_handle = self.window.handle;
|
||||
self.app.observers.insert(
|
||||
let (subscription, activate) = self.app.observers.insert(
|
||||
entity_id,
|
||||
Box::new(move |cx| {
|
||||
window_handle
|
||||
@ -2110,7 +2140,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn subscribe<V2, E, Evt>(
|
||||
@ -2127,7 +2159,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
let entity_id = entity.entity_id();
|
||||
let handle = entity.downgrade();
|
||||
let window_handle = self.window.handle;
|
||||
self.app.event_listeners.insert(
|
||||
let (subscription, activate) = self.app.event_listeners.insert(
|
||||
entity_id,
|
||||
(
|
||||
TypeId::of::<Evt>(),
|
||||
@ -2145,7 +2177,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
.unwrap_or(false)
|
||||
}),
|
||||
),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn on_release(
|
||||
@ -2153,13 +2187,15 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
on_release: impl FnOnce(&mut V, &mut WindowContext) + 'static,
|
||||
) -> Subscription {
|
||||
let window_handle = self.window.handle;
|
||||
self.app.release_listeners.insert(
|
||||
let (subscription, activate) = self.app.release_listeners.insert(
|
||||
self.view.model.entity_id,
|
||||
Box::new(move |this, cx| {
|
||||
let this = this.downcast_mut().expect("invalid entity type");
|
||||
let _ = window_handle.update(cx, |_, cx| on_release(this, cx));
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn observe_release<V2, E>(
|
||||
@ -2175,7 +2211,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
let view = self.view().downgrade();
|
||||
let entity_id = entity.entity_id();
|
||||
let window_handle = self.window.handle;
|
||||
self.app.release_listeners.insert(
|
||||
let (subscription, activate) = self.app.release_listeners.insert(
|
||||
entity_id,
|
||||
Box::new(move |entity, cx| {
|
||||
let entity = entity.downcast_mut().expect("invalid entity type");
|
||||
@ -2183,7 +2219,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
view.update(cx, |this, cx| on_release(this, entity, cx))
|
||||
});
|
||||
}),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn notify(&mut self) {
|
||||
@ -2198,10 +2236,12 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static,
|
||||
) -> Subscription {
|
||||
let view = self.view.downgrade();
|
||||
self.window.bounds_observers.insert(
|
||||
let (subscription, activate) = self.window.bounds_observers.insert(
|
||||
(),
|
||||
Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn observe_window_activation(
|
||||
@ -2209,10 +2249,12 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static,
|
||||
) -> Subscription {
|
||||
let view = self.view.downgrade();
|
||||
self.window.activation_observers.insert(
|
||||
let (subscription, activate) = self.window.activation_observers.insert(
|
||||
(),
|
||||
Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()),
|
||||
)
|
||||
);
|
||||
activate();
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Register a listener to be called when the given focus handle receives focus.
|
||||
@ -2225,7 +2267,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
) -> Subscription {
|
||||
let view = self.view.downgrade();
|
||||
let focus_id = handle.id;
|
||||
self.window.focus_listeners.insert(
|
||||
let (subscription, activate) = self.window.focus_listeners.insert(
|
||||
(),
|
||||
Box::new(move |event, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
@ -2235,7 +2277,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
})
|
||||
.is_ok()
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Register a listener to be called when the given focus handle or one of its descendants receives focus.
|
||||
@ -2248,7 +2292,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
) -> Subscription {
|
||||
let view = self.view.downgrade();
|
||||
let focus_id = handle.id;
|
||||
self.window.focus_listeners.insert(
|
||||
let (subscription, activate) = self.window.focus_listeners.insert(
|
||||
(),
|
||||
Box::new(move |event, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
@ -2262,7 +2306,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
})
|
||||
.is_ok()
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Register a listener to be called when the given focus handle loses focus.
|
||||
@ -2275,7 +2321,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
) -> Subscription {
|
||||
let view = self.view.downgrade();
|
||||
let focus_id = handle.id;
|
||||
self.window.focus_listeners.insert(
|
||||
let (subscription, activate) = self.window.focus_listeners.insert(
|
||||
(),
|
||||
Box::new(move |event, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
@ -2285,7 +2331,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
})
|
||||
.is_ok()
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
/// Register a listener to be called when the given focus handle or one of its descendants loses focus.
|
||||
@ -2298,7 +2346,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
) -> Subscription {
|
||||
let view = self.view.downgrade();
|
||||
let focus_id = handle.id;
|
||||
self.window.focus_listeners.insert(
|
||||
let (subscription, activate) = self.window.focus_listeners.insert(
|
||||
(),
|
||||
Box::new(move |event, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
@ -2312,7 +2360,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
})
|
||||
.is_ok()
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn spawn<Fut, R>(
|
||||
@ -2343,14 +2393,16 @@ impl<'a, V: 'static> ViewContext<'a, V> {
|
||||
) -> Subscription {
|
||||
let window_handle = self.window.handle;
|
||||
let view = self.view().downgrade();
|
||||
self.global_observers.insert(
|
||||
let (subscription, activate) = self.global_observers.insert(
|
||||
TypeId::of::<G>(),
|
||||
Box::new(move |cx| {
|
||||
window_handle
|
||||
.update(cx, |_, cx| view.update(cx, |view, cx| f(view, cx)).is_ok())
|
||||
.unwrap_or(false)
|
||||
}),
|
||||
)
|
||||
);
|
||||
self.app.defer(move |_| activate());
|
||||
subscription
|
||||
}
|
||||
|
||||
pub fn on_mouse_event<Event: 'static>(
|
||||
@ -2708,6 +2760,7 @@ pub enum ElementId {
|
||||
Integer(usize),
|
||||
Name(SharedString),
|
||||
FocusHandle(FocusId),
|
||||
NamedInteger(SharedString, usize),
|
||||
}
|
||||
|
||||
impl ElementId {
|
||||
@ -2757,3 +2810,9 @@ impl<'a> From<&'a FocusHandle> for ElementId {
|
||||
ElementId::FocusHandle(handle.id)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(&'static str, EntityId)> for ElementId {
|
||||
fn from((name, id): (&'static str, EntityId)) -> Self {
|
||||
ElementId::NamedInteger(name.into(), id.as_u64() as usize)
|
||||
}
|
||||
}
|
||||
|
@ -81,6 +81,7 @@ impl<T> Outline<T> {
|
||||
let mut prev_item_ix = 0;
|
||||
for mut string_match in matches {
|
||||
let outline_match = &self.items[string_match.candidate_id];
|
||||
string_match.string = outline_match.text.clone();
|
||||
|
||||
if is_path_query {
|
||||
let prefix_len = self.path_candidate_prefixes[string_match.candidate_id];
|
||||
|
26
crates/language_selector2/Cargo.toml
Normal file
26
crates/language_selector2/Cargo.toml
Normal file
@ -0,0 +1,26 @@
|
||||
[package]
|
||||
name = "language_selector2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/language_selector.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
editor = { package = "editor2", path = "../editor2" }
|
||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
||||
language = { package = "language2", path = "../language2" }
|
||||
gpui = { package = "gpui2", path = "../gpui2" }
|
||||
picker = { package = "picker2", path = "../picker2" }
|
||||
project = { package = "project2", path = "../project2" }
|
||||
theme = { package = "theme2", path = "../theme2" }
|
||||
ui = { package = "ui2", path = "../ui2" }
|
||||
settings = { package = "settings2", path = "../settings2" }
|
||||
util = { path = "../util" }
|
||||
workspace = { package = "workspace2", path = "../workspace2" }
|
||||
anyhow.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { package = "editor2", path = "../editor2", features = ["test-support"] }
|
82
crates/language_selector2/src/active_buffer_language.rs
Normal file
82
crates/language_selector2/src/active_buffer_language.rs
Normal file
@ -0,0 +1,82 @@
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
div, Div, IntoElement, ParentElement, Render, Subscription, View, ViewContext, WeakView,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use ui::{Button, ButtonCommon, Clickable, Tooltip};
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
use crate::LanguageSelector;
|
||||
|
||||
pub struct ActiveBufferLanguage {
|
||||
active_language: Option<Option<Arc<str>>>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_observe_active_editor: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl ActiveBufferLanguage {
|
||||
pub fn new(workspace: &Workspace) -> Self {
|
||||
Self {
|
||||
active_language: None,
|
||||
workspace: workspace.weak_handle(),
|
||||
_observe_active_editor: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn update_language(&mut self, editor: View<Editor>, cx: &mut ViewContext<Self>) {
|
||||
self.active_language = Some(None);
|
||||
|
||||
let editor = editor.read(cx);
|
||||
if let Some((_, buffer, _)) = editor.active_excerpt(cx) {
|
||||
if let Some(language) = buffer.read(cx).language() {
|
||||
self.active_language = Some(Some(language.name()));
|
||||
}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ActiveBufferLanguage {
|
||||
type Element = Div;
|
||||
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> Div {
|
||||
div().when_some(self.active_language.as_ref(), |el, active_language| {
|
||||
let active_language_text = if let Some(active_language_text) = active_language {
|
||||
active_language_text.to_string()
|
||||
} else {
|
||||
"Unknown".to_string()
|
||||
};
|
||||
|
||||
el.child(
|
||||
Button::new("change-language", active_language_text)
|
||||
.on_click(cx.listener(|this, _, cx| {
|
||||
if let Some(workspace) = this.workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
LanguageSelector::toggle(workspace, cx)
|
||||
});
|
||||
}
|
||||
}))
|
||||
.tooltip(|cx| Tooltip::text("Select Language", cx)),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for ActiveBufferLanguage {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn ItemHandle>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(editor) = active_pane_item.and_then(|item| item.act_as::<Editor>(cx)) {
|
||||
self._observe_active_editor = Some(cx.observe(&editor, Self::update_language));
|
||||
self.update_language(editor, cx);
|
||||
} else {
|
||||
self.active_language = None;
|
||||
self._observe_active_editor = None;
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
}
|
232
crates/language_selector2/src/language_selector.rs
Normal file
232
crates/language_selector2/src/language_selector.rs
Normal file
@ -0,0 +1,232 @@
|
||||
mod active_buffer_language;
|
||||
|
||||
pub use active_buffer_language::ActiveBufferLanguage;
|
||||
use anyhow::anyhow;
|
||||
use editor::Editor;
|
||||
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
actions, AppContext, DismissEvent, Div, EventEmitter, FocusHandle, FocusableView, Model,
|
||||
ParentElement, Render, Styled, View, ViewContext, VisualContext, WeakView,
|
||||
};
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::Project;
|
||||
use std::sync::Arc;
|
||||
use ui::{v_stack, HighlightedLabel, ListItem, Selectable};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
actions!(Toggle);
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
cx.observe_new_views(LanguageSelector::register).detach();
|
||||
}
|
||||
|
||||
pub struct LanguageSelector {
|
||||
picker: View<Picker<LanguageSelectorDelegate>>,
|
||||
}
|
||||
|
||||
impl LanguageSelector {
|
||||
fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
|
||||
workspace.register_action(move |workspace, _: &Toggle, cx| {
|
||||
Self::toggle(workspace, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn toggle(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) -> Option<()> {
|
||||
let registry = workspace.app_state().languages.clone();
|
||||
let (_, buffer, _) = workspace
|
||||
.active_item(cx)?
|
||||
.act_as::<Editor>(cx)?
|
||||
.read(cx)
|
||||
.active_excerpt(cx)?;
|
||||
let project = workspace.project().clone();
|
||||
|
||||
workspace.toggle_modal(cx, move |cx| {
|
||||
LanguageSelector::new(buffer, project, registry, cx)
|
||||
});
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn new(
|
||||
buffer: Model<Buffer>,
|
||||
project: Model<Project>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let delegate = LanguageSelectorDelegate::new(
|
||||
cx.view().downgrade(),
|
||||
buffer,
|
||||
project,
|
||||
language_registry,
|
||||
);
|
||||
|
||||
let picker = cx.build_view(|cx| Picker::new(delegate, cx));
|
||||
Self { picker }
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for LanguageSelector {
|
||||
type Element = Div;
|
||||
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> Self::Element {
|
||||
v_stack().min_w_96().child(self.picker.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl FocusableView for LanguageSelector {
|
||||
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for LanguageSelector {}
|
||||
|
||||
pub struct LanguageSelectorDelegate {
|
||||
language_selector: WeakView<LanguageSelector>,
|
||||
buffer: Model<Buffer>,
|
||||
project: Model<Project>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
candidates: Vec<StringMatchCandidate>,
|
||||
matches: Vec<StringMatch>,
|
||||
selected_index: usize,
|
||||
}
|
||||
|
||||
impl LanguageSelectorDelegate {
|
||||
fn new(
|
||||
language_selector: WeakView<LanguageSelector>,
|
||||
buffer: Model<Buffer>,
|
||||
project: Model<Project>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
) -> Self {
|
||||
let candidates = language_registry
|
||||
.language_names()
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(candidate_id, name)| StringMatchCandidate::new(candidate_id, name))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Self {
|
||||
language_selector,
|
||||
buffer,
|
||||
project,
|
||||
language_registry,
|
||||
candidates,
|
||||
matches: vec![],
|
||||
selected_index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for LanguageSelectorDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
fn placeholder_text(&self) -> Arc<str> {
|
||||
"Select a language...".into()
|
||||
}
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if let Some(mat) = self.matches.get(self.selected_index) {
|
||||
let language_name = &self.candidates[mat.candidate_id].string;
|
||||
let language = self.language_registry.language_for_name(language_name);
|
||||
let project = self.project.downgrade();
|
||||
let buffer = self.buffer.downgrade();
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let language = language.await?;
|
||||
let project = project
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("project was dropped"))?;
|
||||
let buffer = buffer
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("buffer was dropped"))?;
|
||||
project.update(&mut cx, |project, cx| {
|
||||
project.set_language_for_buffer(&buffer, language, cx);
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
self.dismissed(cx);
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
|
||||
self.language_selector
|
||||
.update(cx, |_, cx| cx.emit(DismissEvent))
|
||||
.log_err();
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, _: &mut ViewContext<Picker<Self>>) {
|
||||
self.selected_index = ix;
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> gpui::Task<()> {
|
||||
let background = cx.background_executor().clone();
|
||||
let candidates = self.candidates.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let matches = if query.is_empty() {
|
||||
candidates
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, candidate)| StringMatch {
|
||||
candidate_id: index,
|
||||
string: candidate.string,
|
||||
positions: Vec::new(),
|
||||
score: 0.0,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
100,
|
||||
&Default::default(),
|
||||
background,
|
||||
)
|
||||
.await
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let delegate = &mut this.delegate;
|
||||
delegate.matches = matches;
|
||||
delegate.selected_index = delegate
|
||||
.selected_index
|
||||
.min(delegate.matches.len().saturating_sub(1));
|
||||
cx.notify();
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let mat = &self.matches[ix];
|
||||
let buffer_language_name = self.buffer.read(cx).language().map(|l| l.name());
|
||||
let mut label = mat.string.clone();
|
||||
if buffer_language_name.as_deref() == Some(mat.string.as_str()) {
|
||||
label.push_str(" (current)");
|
||||
}
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.selected(selected)
|
||||
.child(HighlightedLabel::new(label, mat.positions.clone())),
|
||||
)
|
||||
}
|
||||
}
|
29
crates/outline2/Cargo.toml
Normal file
29
crates/outline2/Cargo.toml
Normal file
@ -0,0 +1,29 @@
|
||||
[package]
|
||||
name = "outline2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/outline.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
editor = { package = "editor2", path = "../editor2" }
|
||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
||||
gpui = { package = "gpui2", path = "../gpui2" }
|
||||
ui = { package = "ui2", path = "../ui2" }
|
||||
language = { package = "language2", path = "../language2" }
|
||||
picker = { package = "picker2", path = "../picker2" }
|
||||
settings = { package = "settings2", path = "../settings2" }
|
||||
text = { package = "text2", path = "../text2" }
|
||||
theme = { package = "theme2", path = "../theme2" }
|
||||
workspace = { package = "workspace2", path = "../workspace2" }
|
||||
util = { path = "../util" }
|
||||
|
||||
ordered-float.workspace = true
|
||||
postage.workspace = true
|
||||
smol.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { package = "editor2", path = "../editor2", features = ["test-support"] }
|
276
crates/outline2/src/outline.rs
Normal file
276
crates/outline2/src/outline.rs
Normal file
@ -0,0 +1,276 @@
|
||||
use editor::{
|
||||
display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, Anchor, AnchorRangeExt,
|
||||
DisplayPoint, Editor, ToPoint,
|
||||
};
|
||||
use fuzzy::StringMatch;
|
||||
use gpui::{
|
||||
actions, div, rems, AppContext, DismissEvent, Div, EventEmitter, FocusHandle, FocusableView,
|
||||
FontWeight, ParentElement, Point, Render, Styled, StyledText, Task, TextStyle, View,
|
||||
ViewContext, VisualContext, WeakView, WindowContext,
|
||||
};
|
||||
use language::Outline;
|
||||
use ordered_float::OrderedFloat;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use std::{
|
||||
cmp::{self, Reverse},
|
||||
sync::Arc,
|
||||
};
|
||||
use theme::ActiveTheme;
|
||||
use ui::{v_stack, ListItem, Selectable};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
actions!(Toggle);
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
cx.observe_new_views(OutlineView::register).detach();
|
||||
}
|
||||
|
||||
pub fn toggle(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
|
||||
if let Some(editor) = workspace
|
||||
.active_item(cx)
|
||||
.and_then(|item| item.downcast::<Editor>())
|
||||
{
|
||||
let outline = editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.snapshot(cx)
|
||||
.outline(Some(&cx.theme().syntax()));
|
||||
|
||||
if let Some(outline) = outline {
|
||||
workspace.toggle_modal(cx, |cx| OutlineView::new(outline, editor, cx));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OutlineView {
|
||||
picker: View<Picker<OutlineViewDelegate>>,
|
||||
}
|
||||
|
||||
impl FocusableView for OutlineView {
|
||||
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for OutlineView {}
|
||||
|
||||
impl Render for OutlineView {
|
||||
type Element = Div;
|
||||
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> Self::Element {
|
||||
v_stack().min_w_96().child(self.picker.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl OutlineView {
|
||||
fn register(workspace: &mut Workspace, _: &mut ViewContext<Workspace>) {
|
||||
workspace.register_action(toggle);
|
||||
}
|
||||
|
||||
fn new(
|
||||
outline: Outline<Anchor>,
|
||||
editor: View<Editor>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> OutlineView {
|
||||
let delegate = OutlineViewDelegate::new(cx.view().downgrade(), outline, editor, cx);
|
||||
let picker = cx.build_view(|cx| Picker::new(delegate, cx));
|
||||
OutlineView { picker }
|
||||
}
|
||||
}
|
||||
|
||||
struct OutlineViewDelegate {
|
||||
outline_view: WeakView<OutlineView>,
|
||||
active_editor: View<Editor>,
|
||||
outline: Outline<Anchor>,
|
||||
selected_match_index: usize,
|
||||
prev_scroll_position: Option<Point<f32>>,
|
||||
matches: Vec<StringMatch>,
|
||||
last_query: String,
|
||||
}
|
||||
|
||||
impl OutlineViewDelegate {
|
||||
fn new(
|
||||
outline_view: WeakView<OutlineView>,
|
||||
outline: Outline<Anchor>,
|
||||
editor: View<Editor>,
|
||||
cx: &mut ViewContext<OutlineView>,
|
||||
) -> Self {
|
||||
Self {
|
||||
outline_view,
|
||||
last_query: Default::default(),
|
||||
matches: Default::default(),
|
||||
selected_match_index: 0,
|
||||
prev_scroll_position: Some(editor.update(cx, |editor, cx| editor.scroll_position(cx))),
|
||||
active_editor: editor,
|
||||
outline,
|
||||
}
|
||||
}
|
||||
|
||||
fn restore_active_editor(&mut self, cx: &mut WindowContext) {
|
||||
self.active_editor.update(cx, |editor, cx| {
|
||||
editor.highlight_rows(None);
|
||||
if let Some(scroll_position) = self.prev_scroll_position {
|
||||
editor.set_scroll_position(scroll_position, cx);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn set_selected_index(
|
||||
&mut self,
|
||||
ix: usize,
|
||||
navigate: bool,
|
||||
cx: &mut ViewContext<Picker<OutlineViewDelegate>>,
|
||||
) {
|
||||
self.selected_match_index = ix;
|
||||
|
||||
if navigate && !self.matches.is_empty() {
|
||||
let selected_match = &self.matches[self.selected_match_index];
|
||||
let outline_item = &self.outline.items[selected_match.candidate_id];
|
||||
|
||||
self.active_editor.update(cx, |active_editor, cx| {
|
||||
let snapshot = active_editor.snapshot(cx).display_snapshot;
|
||||
let buffer_snapshot = &snapshot.buffer_snapshot;
|
||||
let start = outline_item.range.start.to_point(buffer_snapshot);
|
||||
let end = outline_item.range.end.to_point(buffer_snapshot);
|
||||
let display_rows = start.to_display_point(&snapshot).row()
|
||||
..end.to_display_point(&snapshot).row() + 1;
|
||||
active_editor.highlight_rows(Some(display_rows));
|
||||
active_editor.request_autoscroll(Autoscroll::center(), cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for OutlineViewDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
fn placeholder_text(&self) -> Arc<str> {
|
||||
"Search buffer symbols...".into()
|
||||
}
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_match_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<Picker<OutlineViewDelegate>>) {
|
||||
self.set_selected_index(ix, true, cx);
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
cx: &mut ViewContext<Picker<OutlineViewDelegate>>,
|
||||
) -> Task<()> {
|
||||
let selected_index;
|
||||
if query.is_empty() {
|
||||
self.restore_active_editor(cx);
|
||||
self.matches = self
|
||||
.outline
|
||||
.items
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, _)| StringMatch {
|
||||
candidate_id: index,
|
||||
score: Default::default(),
|
||||
positions: Default::default(),
|
||||
string: Default::default(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let editor = self.active_editor.read(cx);
|
||||
let cursor_offset = editor.selections.newest::<usize>(cx).head();
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
selected_index = self
|
||||
.outline
|
||||
.items
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(ix, item)| {
|
||||
let range = item.range.to_offset(&buffer);
|
||||
let distance_to_closest_endpoint = cmp::min(
|
||||
(range.start as isize - cursor_offset as isize).abs(),
|
||||
(range.end as isize - cursor_offset as isize).abs(),
|
||||
);
|
||||
let depth = if range.contains(&cursor_offset) {
|
||||
Some(item.depth)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
(ix, depth, distance_to_closest_endpoint)
|
||||
})
|
||||
.max_by_key(|(_, depth, distance)| (*depth, Reverse(*distance)))
|
||||
.map(|(ix, _, _)| ix)
|
||||
.unwrap_or(0);
|
||||
} else {
|
||||
self.matches = smol::block_on(
|
||||
self.outline
|
||||
.search(&query, cx.background_executor().clone()),
|
||||
);
|
||||
selected_index = self
|
||||
.matches
|
||||
.iter()
|
||||
.enumerate()
|
||||
.max_by_key(|(_, m)| OrderedFloat(m.score))
|
||||
.map(|(ix, _)| ix)
|
||||
.unwrap_or(0);
|
||||
}
|
||||
self.last_query = query;
|
||||
self.set_selected_index(selected_index, !self.last_query.is_empty(), cx);
|
||||
Task::ready(())
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, cx: &mut ViewContext<Picker<OutlineViewDelegate>>) {
|
||||
self.prev_scroll_position.take();
|
||||
|
||||
self.active_editor.update(cx, |active_editor, cx| {
|
||||
if let Some(rows) = active_editor.highlighted_rows() {
|
||||
let snapshot = active_editor.snapshot(cx).display_snapshot;
|
||||
let position = DisplayPoint::new(rows.start, 0).to_point(&snapshot);
|
||||
active_editor.change_selections(Some(Autoscroll::center()), cx, |s| {
|
||||
s.select_ranges([position..position])
|
||||
});
|
||||
active_editor.highlight_rows(None);
|
||||
}
|
||||
});
|
||||
|
||||
self.dismissed(cx);
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, cx: &mut ViewContext<Picker<OutlineViewDelegate>>) {
|
||||
self.outline_view
|
||||
.update(cx, |_, cx| cx.emit(DismissEvent))
|
||||
.log_err();
|
||||
self.restore_active_editor(cx);
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let mat = &self.matches[ix];
|
||||
let outline_item = &self.outline.items[mat.candidate_id];
|
||||
|
||||
let highlights = gpui::combine_highlights(
|
||||
mat.ranges().map(|range| (range, FontWeight::BOLD.into())),
|
||||
outline_item.highlight_ranges.iter().cloned(),
|
||||
);
|
||||
|
||||
let styled_text = StyledText::new(outline_item.text.clone())
|
||||
.with_highlights(&TextStyle::default(), highlights);
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.selected(selected)
|
||||
.child(div().pl(rems(outline_item.depth as f32)).child(styled_text)),
|
||||
)
|
||||
}
|
||||
}
|
@ -178,6 +178,15 @@ impl<D: PickerDelegate> Picker<D> {
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn query(&self, cx: &AppContext) -> String {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
pub fn set_query(&self, query: impl Into<Arc<str>>, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.set_text(query, cx));
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: PickerDelegate> Render for Picker<D> {
|
||||
|
@ -1121,20 +1121,22 @@ impl Project {
|
||||
project_path: impl Into<ProjectPath>,
|
||||
is_directory: bool,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let project_path = project_path.into();
|
||||
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
|
||||
let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
if self.is_local() {
|
||||
Some(worktree.update(cx, |worktree, cx| {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
worktree
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
.create_entry(project_path.path, is_directory, cx)
|
||||
}))
|
||||
})
|
||||
} else {
|
||||
let client = self.client.clone();
|
||||
let project_id = self.remote_id().unwrap();
|
||||
Some(cx.spawn_weak(|_, mut cx| async move {
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
let response = client
|
||||
.request(proto::CreateProjectEntry {
|
||||
worktree_id: project_path.worktree_id.to_proto(),
|
||||
@ -1143,19 +1145,20 @@ impl Project {
|
||||
is_directory,
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
.entry
|
||||
.ok_or_else(|| anyhow!("missing entry in response"))?;
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
}))
|
||||
match response.entry {
|
||||
Some(entry) => worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.map(Some),
|
||||
None => Ok(None),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1164,8 +1167,10 @@ impl Project {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let worktree = self.worktree_for_entry(entry_id, cx)?;
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
if self.is_local() {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
@ -1178,7 +1183,7 @@ impl Project {
|
||||
let client = self.client.clone();
|
||||
let project_id = self.remote_id().unwrap();
|
||||
|
||||
Some(cx.spawn_weak(|_, mut cx| async move {
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
let response = client
|
||||
.request(proto::CopyProjectEntry {
|
||||
project_id,
|
||||
@ -1186,19 +1191,20 @@ impl Project {
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
.entry
|
||||
.ok_or_else(|| anyhow!("missing entry in response"))?;
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
}))
|
||||
match response.entry {
|
||||
Some(entry) => worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.map(Some),
|
||||
None => Ok(None),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1207,8 +1213,10 @@ impl Project {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let worktree = self.worktree_for_entry(entry_id, cx)?;
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
if self.is_local() {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
@ -1221,7 +1229,7 @@ impl Project {
|
||||
let client = self.client.clone();
|
||||
let project_id = self.remote_id().unwrap();
|
||||
|
||||
Some(cx.spawn_weak(|_, mut cx| async move {
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
let response = client
|
||||
.request(proto::RenameProjectEntry {
|
||||
project_id,
|
||||
@ -1229,19 +1237,20 @@ impl Project {
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
.entry
|
||||
.ok_or_else(|| anyhow!("missing entry in response"))?;
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
}))
|
||||
match response.entry {
|
||||
Some(entry) => worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.map(Some),
|
||||
None => Ok(None),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1658,18 +1667,15 @@ impl Project {
|
||||
|
||||
pub fn open_path(
|
||||
&mut self,
|
||||
path: impl Into<ProjectPath>,
|
||||
path: ProjectPath,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
|
||||
let task = self.open_buffer(path, cx);
|
||||
) -> Task<Result<(Option<ProjectEntryId>, AnyModelHandle)>> {
|
||||
let task = self.open_buffer(path.clone(), cx);
|
||||
cx.spawn_weak(|_, cx| async move {
|
||||
let buffer = task.await?;
|
||||
let project_entry_id = buffer
|
||||
.read_with(&cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
|
||||
})
|
||||
.ok_or_else(|| anyhow!("no project entry"))?;
|
||||
|
||||
let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
|
||||
});
|
||||
let buffer: &AnyModelHandle = &buffer;
|
||||
Ok((project_entry_id, buffer.clone()))
|
||||
})
|
||||
@ -1984,8 +1990,10 @@ impl Project {
|
||||
remote_id,
|
||||
);
|
||||
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(file.entry_id, remote_id);
|
||||
if let Some(entry_id) = file.entry_id {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, remote_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2440,24 +2448,25 @@ impl Project {
|
||||
return None;
|
||||
};
|
||||
|
||||
match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
|
||||
Some(_) => {
|
||||
return None;
|
||||
let remote_id = buffer.read(cx).remote_id();
|
||||
if let Some(entry_id) = file.entry_id {
|
||||
match self.local_buffer_ids_by_entry_id.get(&entry_id) {
|
||||
Some(_) => {
|
||||
return None;
|
||||
}
|
||||
None => {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, remote_id);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let remote_id = buffer.read(cx).remote_id();
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(file.entry_id, remote_id);
|
||||
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path.clone(),
|
||||
},
|
||||
remote_id,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path.clone(),
|
||||
},
|
||||
remote_id,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -5775,11 +5784,6 @@ impl Project {
|
||||
while let Some(ignored_abs_path) =
|
||||
ignored_paths_to_process.pop_front()
|
||||
{
|
||||
if !query.file_matches(Some(&ignored_abs_path))
|
||||
|| snapshot.is_path_excluded(&ignored_abs_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Some(fs_metadata) = fs
|
||||
.metadata(&ignored_abs_path)
|
||||
.await
|
||||
@ -5807,6 +5811,13 @@ impl Project {
|
||||
}
|
||||
}
|
||||
} else if !fs_metadata.is_symlink {
|
||||
if !query.file_matches(Some(&ignored_abs_path))
|
||||
|| snapshot.is_path_excluded(
|
||||
ignored_entry.path.to_path_buf(),
|
||||
)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let matches = if let Some(file) = fs
|
||||
.open_sync(&ignored_abs_path)
|
||||
.await
|
||||
@ -6207,10 +6218,13 @@ impl Project {
|
||||
return;
|
||||
}
|
||||
|
||||
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
|
||||
let new_file = if let Some(entry) = old_file
|
||||
.entry_id
|
||||
.and_then(|entry_id| snapshot.entry_for_id(entry_id))
|
||||
{
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
@ -6219,7 +6233,7 @@ impl Project {
|
||||
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
@ -6249,10 +6263,12 @@ impl Project {
|
||||
);
|
||||
}
|
||||
|
||||
if new_file.entry_id != *entry_id {
|
||||
if new_file.entry_id != Some(*entry_id) {
|
||||
self.local_buffer_ids_by_entry_id.remove(entry_id);
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(new_file.entry_id, buffer_id);
|
||||
if let Some(entry_id) = new_file.entry_id {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, buffer_id);
|
||||
}
|
||||
}
|
||||
|
||||
if new_file != *old_file {
|
||||
@ -6815,7 +6831,7 @@ impl Project {
|
||||
})
|
||||
.await?;
|
||||
Ok(proto::ProjectEntryResponse {
|
||||
entry: Some((&entry).into()),
|
||||
entry: entry.as_ref().map(|e| e.into()),
|
||||
worktree_scan_id: worktree_scan_id as u64,
|
||||
})
|
||||
}
|
||||
@ -6839,11 +6855,10 @@ impl Project {
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
.rename_entry(entry_id, new_path, cx)
|
||||
.ok_or_else(|| anyhow!("invalid entry"))
|
||||
})?
|
||||
})
|
||||
.await?;
|
||||
Ok(proto::ProjectEntryResponse {
|
||||
entry: Some((&entry).into()),
|
||||
entry: entry.as_ref().map(|e| e.into()),
|
||||
worktree_scan_id: worktree_scan_id as u64,
|
||||
})
|
||||
}
|
||||
@ -6867,11 +6882,10 @@ impl Project {
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
.copy_entry(entry_id, new_path, cx)
|
||||
.ok_or_else(|| anyhow!("invalid entry"))
|
||||
})?
|
||||
})
|
||||
.await?;
|
||||
Ok(proto::ProjectEntryResponse {
|
||||
entry: Some((&entry).into()),
|
||||
entry: entry.as_ref().map(|e| e.into()),
|
||||
worktree_scan_id: worktree_scan_id as u64,
|
||||
})
|
||||
}
|
||||
|
@ -4050,6 +4050,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
json!({
|
||||
".git": {},
|
||||
".gitignore": "**/target\n/node_modules\n",
|
||||
"target": {
|
||||
"index.txt": "index_key:index_value"
|
||||
},
|
||||
"node_modules": {
|
||||
"eslint": {
|
||||
"index.ts": "const eslint_key = 'eslint value'",
|
||||
"package.json": r#"{ "some_key": "some value" }"#,
|
||||
},
|
||||
"prettier": {
|
||||
"index.ts": "const prettier_key = 'prettier value'",
|
||||
"package.json": r#"{ "other_key": "other value" }"#,
|
||||
},
|
||||
},
|
||||
"package.json": r#"{ "main_key": "main value" }"#,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
|
||||
|
||||
let query = "key";
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
.unwrap(),
|
||||
HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
|
||||
"Only one non-ignored file should have the query"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
.unwrap(),
|
||||
HashMap::from_iter([
|
||||
("package.json".to_string(), vec![8..11]),
|
||||
("target/index.txt".to_string(), vec![6..9]),
|
||||
(
|
||||
"node_modules/prettier/package.json".to_string(),
|
||||
vec![9..12]
|
||||
),
|
||||
("node_modules/prettier/index.ts".to_string(), vec![15..18]),
|
||||
("node_modules/eslint/index.ts".to_string(), vec![13..16]),
|
||||
("node_modules/eslint/package.json".to_string(), vec![8..11]),
|
||||
]),
|
||||
"Unrestricted search with ignored directories should find every file with the query"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(
|
||||
query,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
.unwrap(),
|
||||
HashMap::from_iter([(
|
||||
"node_modules/prettier/package.json".to_string(),
|
||||
vec![9..12]
|
||||
)]),
|
||||
"With search including ignored prettier directory and excluding TS files, only one file should be found"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_glob_literal_prefix() {
|
||||
assert_eq!(glob_literal_prefix("**/*.js"), "");
|
||||
|
@ -371,15 +371,25 @@ impl SearchQuery {
|
||||
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
|
||||
match file_path {
|
||||
Some(file_path) => {
|
||||
!self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.any(|exclude_glob| exclude_glob.is_match(file_path))
|
||||
&& (self.files_to_include().is_empty()
|
||||
let mut path = file_path.to_path_buf();
|
||||
loop {
|
||||
if self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.any(|exclude_glob| exclude_glob.is_match(&path))
|
||||
{
|
||||
return false;
|
||||
} else if self.files_to_include().is_empty()
|
||||
|| self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
.any(|include_glob| include_glob.is_match(file_path)))
|
||||
.any(|include_glob| include_glob.is_match(&path))
|
||||
{
|
||||
return true;
|
||||
} else if !path.pop() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
None => self.files_to_include().is_empty(),
|
||||
}
|
||||
|
@ -960,8 +960,6 @@ impl LocalWorktree {
|
||||
|
||||
cx.spawn(|this, cx| async move {
|
||||
let text = fs.load(&abs_path).await?;
|
||||
let entry = entry.await?;
|
||||
|
||||
let mut index_task = None;
|
||||
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
|
||||
if let Some(repo) = snapshot.repository_for_path(&path) {
|
||||
@ -981,18 +979,43 @@ impl LocalWorktree {
|
||||
None
|
||||
};
|
||||
|
||||
Ok((
|
||||
File {
|
||||
entry_id: entry.id,
|
||||
worktree: this,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
match entry.await? {
|
||||
Some(entry) => Ok((
|
||||
File {
|
||||
entry_id: Some(entry.id),
|
||||
worktree: this,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
)),
|
||||
None => {
|
||||
let metadata = fs
|
||||
.metadata(&abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Loading metadata for excluded file {abs_path:?}")
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Excluded file {abs_path:?} got removed during loading")
|
||||
})?;
|
||||
Ok((
|
||||
File {
|
||||
entry_id: None,
|
||||
worktree: this,
|
||||
path,
|
||||
mtime: metadata.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -1013,17 +1036,37 @@ impl LocalWorktree {
|
||||
let text = buffer.as_rope().clone();
|
||||
let fingerprint = text.fingerprint();
|
||||
let version = buffer.version();
|
||||
let save = self.write_file(path, text, buffer.line_ending(), cx);
|
||||
let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
|
||||
let fs = Arc::clone(&self.fs);
|
||||
let abs_path = self.absolutize(&path);
|
||||
|
||||
cx.as_mut().spawn(|mut cx| async move {
|
||||
let entry = save.await?;
|
||||
|
||||
let (entry_id, mtime, path) = match entry {
|
||||
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
|
||||
None => {
|
||||
let metadata = fs
|
||||
.metadata(&abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Fetching metadata after saving the excluded buffer {abs_path:?}"
|
||||
)
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Excluded buffer {path:?} got removed during saving")
|
||||
})?;
|
||||
(None, metadata.mtime, path)
|
||||
}
|
||||
};
|
||||
|
||||
if has_changed_file {
|
||||
let new_file = Arc::new(File {
|
||||
entry_id: entry.id,
|
||||
entry_id,
|
||||
worktree: handle,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
path,
|
||||
mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
});
|
||||
@ -1049,13 +1092,13 @@ impl LocalWorktree {
|
||||
project_id,
|
||||
buffer_id,
|
||||
version: serialize_version(&version),
|
||||
mtime: Some(entry.mtime.into()),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint: serialize_fingerprint(fingerprint),
|
||||
})?;
|
||||
}
|
||||
|
||||
buffer_handle.update(&mut cx, |buffer, cx| {
|
||||
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
|
||||
buffer.did_save(version.clone(), fingerprint, mtime, cx);
|
||||
});
|
||||
|
||||
Ok(())
|
||||
@ -1080,7 +1123,7 @@ impl LocalWorktree {
|
||||
path: impl Into<Arc<Path>>,
|
||||
is_dir: bool,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let path = path.into();
|
||||
let lowest_ancestor = self.lowest_ancestor(&path);
|
||||
let abs_path = self.absolutize(&path);
|
||||
@ -1097,7 +1140,7 @@ impl LocalWorktree {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
write.await?;
|
||||
let (result, refreshes) = this.update(&mut cx, |this, cx| {
|
||||
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
|
||||
let mut refreshes = Vec::new();
|
||||
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
|
||||
for refresh_path in refresh_paths.ancestors() {
|
||||
if refresh_path == Path::new("") {
|
||||
@ -1124,14 +1167,14 @@ impl LocalWorktree {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write_file(
|
||||
pub(crate) fn write_file(
|
||||
&self,
|
||||
path: impl Into<Arc<Path>>,
|
||||
text: Rope,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
let path = path.into();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let path: Arc<Path> = path.into();
|
||||
let abs_path = self.absolutize(&path);
|
||||
let fs = self.fs.clone();
|
||||
let write = cx
|
||||
@ -1190,8 +1233,11 @@ impl LocalWorktree {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let old_path = self.entry_for_id(entry_id)?.path.clone();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let old_path = match self.entry_for_id(entry_id) {
|
||||
Some(entry) => entry.path.clone(),
|
||||
None => return Task::ready(Ok(None)),
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
let abs_old_path = self.absolutize(&old_path);
|
||||
let abs_new_path = self.absolutize(&new_path);
|
||||
@ -1201,7 +1247,7 @@ impl LocalWorktree {
|
||||
.await
|
||||
});
|
||||
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
rename.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.as_local_mut()
|
||||
@ -1209,7 +1255,7 @@ impl LocalWorktree {
|
||||
.refresh_entry(new_path.clone(), Some(old_path), cx)
|
||||
})
|
||||
.await
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn copy_entry(
|
||||
@ -1217,8 +1263,11 @@ impl LocalWorktree {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let old_path = self.entry_for_id(entry_id)?.path.clone();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let old_path = match self.entry_for_id(entry_id) {
|
||||
Some(entry) => entry.path.clone(),
|
||||
None => return Task::ready(Ok(None)),
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
let abs_old_path = self.absolutize(&old_path);
|
||||
let abs_new_path = self.absolutize(&new_path);
|
||||
@ -1233,7 +1282,7 @@ impl LocalWorktree {
|
||||
.await
|
||||
});
|
||||
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
copy.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.as_local_mut()
|
||||
@ -1241,7 +1290,7 @@ impl LocalWorktree {
|
||||
.refresh_entry(new_path.clone(), None, cx)
|
||||
})
|
||||
.await
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn expand_entry(
|
||||
@ -1277,7 +1326,10 @@ impl LocalWorktree {
|
||||
path: Arc<Path>,
|
||||
old_path: Option<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
if self.is_path_excluded(path.to_path_buf()) {
|
||||
return Task::ready(Ok(None));
|
||||
}
|
||||
let paths = if let Some(old_path) = old_path.as_ref() {
|
||||
vec![old_path.clone(), path.clone()]
|
||||
} else {
|
||||
@ -1286,13 +1338,15 @@ impl LocalWorktree {
|
||||
let mut refresh = self.refresh_entries_for_paths(paths);
|
||||
cx.spawn_weak(move |this, mut cx| async move {
|
||||
refresh.recv().await;
|
||||
this.upgrade(&cx)
|
||||
let new_entry = this
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("worktree was dropped"))?
|
||||
.update(&mut cx, |this, _| {
|
||||
this.entry_for_path(path)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("failed to read path after update"))
|
||||
})
|
||||
})?;
|
||||
Ok(Some(new_entry))
|
||||
})
|
||||
}
|
||||
|
||||
@ -2226,10 +2280,19 @@ impl LocalSnapshot {
|
||||
paths
|
||||
}
|
||||
|
||||
pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
|
||||
self.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
|
||||
pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
|
||||
loop {
|
||||
if self
|
||||
.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(&path))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if !path.pop() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2458,8 +2521,7 @@ impl BackgroundScannerState {
|
||||
ids_to_preserve.insert(work_directory_id);
|
||||
} else {
|
||||
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|
||||
|| snapshot.is_path_excluded(&git_dir_abs_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
|
||||
if git_dir_excluded
|
||||
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
|
||||
{
|
||||
@ -2666,7 +2728,7 @@ pub struct File {
|
||||
pub worktree: ModelHandle<Worktree>,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: SystemTime,
|
||||
pub(crate) entry_id: ProjectEntryId,
|
||||
pub(crate) entry_id: Option<ProjectEntryId>,
|
||||
pub(crate) is_local: bool,
|
||||
pub(crate) is_deleted: bool,
|
||||
}
|
||||
@ -2735,7 +2797,7 @@ impl language::File for File {
|
||||
fn to_proto(&self) -> rpc::proto::File {
|
||||
rpc::proto::File {
|
||||
worktree_id: self.worktree.id() as u64,
|
||||
entry_id: self.entry_id.to_proto(),
|
||||
entry_id: self.entry_id.map(|id| id.to_proto()),
|
||||
path: self.path.to_string_lossy().into(),
|
||||
mtime: Some(self.mtime.into()),
|
||||
is_deleted: self.is_deleted,
|
||||
@ -2793,7 +2855,7 @@ impl File {
|
||||
worktree,
|
||||
path: entry.path.clone(),
|
||||
mtime: entry.mtime,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
})
|
||||
@ -2818,7 +2880,7 @@ impl File {
|
||||
worktree,
|
||||
path: Path::new(&proto.path).into(),
|
||||
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
|
||||
entry_id: ProjectEntryId::from_proto(proto.entry_id),
|
||||
entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
|
||||
is_local: false,
|
||||
is_deleted: proto.is_deleted,
|
||||
})
|
||||
@ -2836,7 +2898,7 @@ impl File {
|
||||
if self.is_deleted {
|
||||
None
|
||||
} else {
|
||||
Some(self.entry_id)
|
||||
self.entry_id
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3338,16 +3400,7 @@ impl BackgroundScanner {
|
||||
return false;
|
||||
}
|
||||
|
||||
// FS events may come for files which parent directory is excluded, need to check ignore those.
|
||||
let mut path_to_test = abs_path.clone();
|
||||
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|
||||
|| snapshot.is_path_excluded(&relative_path);
|
||||
while !excluded_file_event && path_to_test.pop() {
|
||||
if snapshot.is_path_excluded(&path_to_test) {
|
||||
excluded_file_event = true;
|
||||
}
|
||||
}
|
||||
if excluded_file_event {
|
||||
if snapshot.is_path_excluded(relative_path.to_path_buf()) {
|
||||
if !is_git_related {
|
||||
log::debug!("ignoring FS event for excluded path {relative_path:?}");
|
||||
}
|
||||
@ -3531,7 +3584,7 @@ impl BackgroundScanner {
|
||||
let state = self.state.lock();
|
||||
let snapshot = &state.snapshot;
|
||||
root_abs_path = snapshot.abs_path().clone();
|
||||
if snapshot.is_path_excluded(&job.abs_path) {
|
||||
if snapshot.is_path_excluded(job.path.to_path_buf()) {
|
||||
log::error!("skipping excluded directory {:?}", job.path);
|
||||
return Ok(());
|
||||
}
|
||||
@ -3603,8 +3656,8 @@ impl BackgroundScanner {
|
||||
|
||||
{
|
||||
let mut state = self.state.lock();
|
||||
if state.snapshot.is_path_excluded(&child_abs_path) {
|
||||
let relative_path = job.path.join(child_name);
|
||||
let relative_path = job.path.join(child_name);
|
||||
if state.snapshot.is_path_excluded(relative_path.clone()) {
|
||||
log::debug!("skipping excluded child entry {relative_path:?}");
|
||||
state.remove_path(&relative_path);
|
||||
continue;
|
||||
|
@ -1052,11 +1052,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||
&[
|
||||
".git/HEAD",
|
||||
".git/foo",
|
||||
"node_modules",
|
||||
"node_modules/.DS_Store",
|
||||
"node_modules/prettier",
|
||||
"node_modules/prettier/package.json",
|
||||
],
|
||||
&["target", "node_modules"],
|
||||
&["target"],
|
||||
&[
|
||||
".DS_Store",
|
||||
"src/.DS_Store",
|
||||
@ -1106,6 +1107,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||
".git/HEAD",
|
||||
".git/foo",
|
||||
".git/new_file",
|
||||
"node_modules",
|
||||
"node_modules/.DS_Store",
|
||||
"node_modules/prettier",
|
||||
"node_modules/prettier/package.json",
|
||||
@ -1114,7 +1116,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||
"build_output/new_file",
|
||||
"test_output/new_file",
|
||||
],
|
||||
&["target", "node_modules", "test_output"],
|
||||
&["target", "test_output"],
|
||||
&[
|
||||
".DS_Store",
|
||||
"src/.DS_Store",
|
||||
@ -1174,6 +1176,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
||||
.create_entry("a/e".as_ref(), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_dir());
|
||||
|
||||
@ -1222,6 +1225,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1257,6 +1261,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1275,6 +1280,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("a/b/c/e.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1291,6 +1297,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("d/e/f/g.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1616,14 +1623,14 @@ fn randomly_mutate_worktree(
|
||||
entry.id.0,
|
||||
new_path
|
||||
);
|
||||
let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
|
||||
let task = worktree.rename_entry(entry.id, new_path, cx);
|
||||
cx.foreground().spawn(async move {
|
||||
task.await?;
|
||||
task.await?.unwrap();
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
let task = if entry.is_dir() {
|
||||
if entry.is_dir() {
|
||||
let child_path = entry.path.join(random_filename(rng));
|
||||
let is_dir = rng.gen_bool(0.3);
|
||||
log::info!(
|
||||
@ -1631,15 +1638,20 @@ fn randomly_mutate_worktree(
|
||||
if is_dir { "dir" } else { "file" },
|
||||
child_path,
|
||||
);
|
||||
worktree.create_entry(child_path, is_dir, cx)
|
||||
let task = worktree.create_entry(child_path, is_dir, cx);
|
||||
cx.foreground().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
})
|
||||
} else {
|
||||
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
|
||||
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
|
||||
};
|
||||
cx.foreground().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
})
|
||||
let task =
|
||||
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
|
||||
cx.foreground().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1151,20 +1151,22 @@ impl Project {
|
||||
project_path: impl Into<ProjectPath>,
|
||||
is_directory: bool,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let project_path = project_path.into();
|
||||
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
|
||||
let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
if self.is_local() {
|
||||
Some(worktree.update(cx, |worktree, cx| {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
worktree
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
.create_entry(project_path.path, is_directory, cx)
|
||||
}))
|
||||
})
|
||||
} else {
|
||||
let client = self.client.clone();
|
||||
let project_id = self.remote_id().unwrap();
|
||||
Some(cx.spawn(move |_, mut cx| async move {
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
let response = client
|
||||
.request(proto::CreateProjectEntry {
|
||||
worktree_id: project_path.worktree_id.to_proto(),
|
||||
@ -1173,19 +1175,20 @@ impl Project {
|
||||
is_directory,
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
.entry
|
||||
.ok_or_else(|| anyhow!("missing entry in response"))?;
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await
|
||||
}))
|
||||
match response.entry {
|
||||
Some(entry) => worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await
|
||||
.map(Some),
|
||||
None => Ok(None),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1194,8 +1197,10 @@ impl Project {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let worktree = self.worktree_for_entry(entry_id, cx)?;
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
if self.is_local() {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
@ -1208,7 +1213,7 @@ impl Project {
|
||||
let client = self.client.clone();
|
||||
let project_id = self.remote_id().unwrap();
|
||||
|
||||
Some(cx.spawn(move |_, mut cx| async move {
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
let response = client
|
||||
.request(proto::CopyProjectEntry {
|
||||
project_id,
|
||||
@ -1216,19 +1221,20 @@ impl Project {
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
.entry
|
||||
.ok_or_else(|| anyhow!("missing entry in response"))?;
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await
|
||||
}))
|
||||
match response.entry {
|
||||
Some(entry) => worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await
|
||||
.map(Some),
|
||||
None => Ok(None),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1237,8 +1243,10 @@ impl Project {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let worktree = self.worktree_for_entry(entry_id, cx)?;
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
if self.is_local() {
|
||||
worktree.update(cx, |worktree, cx| {
|
||||
@ -1251,7 +1259,7 @@ impl Project {
|
||||
let client = self.client.clone();
|
||||
let project_id = self.remote_id().unwrap();
|
||||
|
||||
Some(cx.spawn(move |_, mut cx| async move {
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
let response = client
|
||||
.request(proto::RenameProjectEntry {
|
||||
project_id,
|
||||
@ -1259,19 +1267,20 @@ impl Project {
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
.entry
|
||||
.ok_or_else(|| anyhow!("missing entry in response"))?;
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await
|
||||
}))
|
||||
match response.entry {
|
||||
Some(entry) => worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_remote_mut().unwrap().insert_entry(
|
||||
entry,
|
||||
response.worktree_scan_id as usize,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await
|
||||
.map(Some),
|
||||
None => Ok(None),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -1688,17 +1697,15 @@ impl Project {
|
||||
|
||||
pub fn open_path(
|
||||
&mut self,
|
||||
path: impl Into<ProjectPath>,
|
||||
path: ProjectPath,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<(ProjectEntryId, AnyModel)>> {
|
||||
let task = self.open_buffer(path, cx);
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
|
||||
let task = self.open_buffer(path.clone(), cx);
|
||||
cx.spawn(move |_, cx| async move {
|
||||
let buffer = task.await?;
|
||||
let project_entry_id = buffer
|
||||
.update(&mut cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("no project entry"))?;
|
||||
let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
|
||||
File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
|
||||
})?;
|
||||
|
||||
let buffer: &AnyModel = &buffer;
|
||||
Ok((project_entry_id, buffer.clone()))
|
||||
@ -2017,8 +2024,10 @@ impl Project {
|
||||
remote_id,
|
||||
);
|
||||
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(file.entry_id, remote_id);
|
||||
if let Some(entry_id) = file.entry_id {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, remote_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2473,24 +2482,25 @@ impl Project {
|
||||
return None;
|
||||
};
|
||||
|
||||
match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
|
||||
Some(_) => {
|
||||
return None;
|
||||
let remote_id = buffer.read(cx).remote_id();
|
||||
if let Some(entry_id) = file.entry_id {
|
||||
match self.local_buffer_ids_by_entry_id.get(&entry_id) {
|
||||
Some(_) => {
|
||||
return None;
|
||||
}
|
||||
None => {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, remote_id);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let remote_id = buffer.read(cx).remote_id();
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(file.entry_id, remote_id);
|
||||
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path.clone(),
|
||||
},
|
||||
remote_id,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
self.local_buffer_ids_by_path.insert(
|
||||
ProjectPath {
|
||||
worktree_id: file.worktree_id(cx),
|
||||
path: file.path.clone(),
|
||||
},
|
||||
remote_id,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -5844,11 +5854,6 @@ impl Project {
|
||||
while let Some(ignored_abs_path) =
|
||||
ignored_paths_to_process.pop_front()
|
||||
{
|
||||
if !query.file_matches(Some(&ignored_abs_path))
|
||||
|| snapshot.is_path_excluded(&ignored_abs_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Some(fs_metadata) = fs
|
||||
.metadata(&ignored_abs_path)
|
||||
.await
|
||||
@ -5876,6 +5881,13 @@ impl Project {
|
||||
}
|
||||
}
|
||||
} else if !fs_metadata.is_symlink {
|
||||
if !query.file_matches(Some(&ignored_abs_path))
|
||||
|| snapshot.is_path_excluded(
|
||||
ignored_entry.path.to_path_buf(),
|
||||
)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let matches = if let Some(file) = fs
|
||||
.open_sync(&ignored_abs_path)
|
||||
.await
|
||||
@ -6277,10 +6289,13 @@ impl Project {
|
||||
return;
|
||||
}
|
||||
|
||||
let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
|
||||
let new_file = if let Some(entry) = old_file
|
||||
.entry_id
|
||||
.and_then(|entry_id| snapshot.entry_for_id(entry_id))
|
||||
{
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
@ -6289,7 +6304,7 @@ impl Project {
|
||||
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
|
||||
File {
|
||||
is_local: true,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
mtime: entry.mtime,
|
||||
path: entry.path.clone(),
|
||||
worktree: worktree_handle.clone(),
|
||||
@ -6319,10 +6334,12 @@ impl Project {
|
||||
);
|
||||
}
|
||||
|
||||
if new_file.entry_id != *entry_id {
|
||||
if new_file.entry_id != Some(*entry_id) {
|
||||
self.local_buffer_ids_by_entry_id.remove(entry_id);
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(new_file.entry_id, buffer_id);
|
||||
if let Some(entry_id) = new_file.entry_id {
|
||||
self.local_buffer_ids_by_entry_id
|
||||
.insert(entry_id, buffer_id);
|
||||
}
|
||||
}
|
||||
|
||||
if new_file != *old_file {
|
||||
@ -6889,7 +6906,7 @@ impl Project {
|
||||
})?
|
||||
.await?;
|
||||
Ok(proto::ProjectEntryResponse {
|
||||
entry: Some((&entry).into()),
|
||||
entry: entry.as_ref().map(|e| e.into()),
|
||||
worktree_scan_id: worktree_scan_id as u64,
|
||||
})
|
||||
}
|
||||
@ -6913,11 +6930,10 @@ impl Project {
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
.rename_entry(entry_id, new_path, cx)
|
||||
.ok_or_else(|| anyhow!("invalid entry"))
|
||||
})??
|
||||
})?
|
||||
.await?;
|
||||
Ok(proto::ProjectEntryResponse {
|
||||
entry: Some((&entry).into()),
|
||||
entry: entry.as_ref().map(|e| e.into()),
|
||||
worktree_scan_id: worktree_scan_id as u64,
|
||||
})
|
||||
}
|
||||
@ -6941,11 +6957,10 @@ impl Project {
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
.copy_entry(entry_id, new_path, cx)
|
||||
.ok_or_else(|| anyhow!("invalid entry"))
|
||||
})??
|
||||
})?
|
||||
.await?;
|
||||
Ok(proto::ProjectEntryResponse {
|
||||
entry: Some((&entry).into()),
|
||||
entry: entry.as_ref().map(|e| e.into()),
|
||||
worktree_scan_id: worktree_scan_id as u64,
|
||||
})
|
||||
}
|
||||
|
@ -4182,6 +4182,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
json!({
|
||||
".git": {},
|
||||
".gitignore": "**/target\n/node_modules\n",
|
||||
"target": {
|
||||
"index.txt": "index_key:index_value"
|
||||
},
|
||||
"node_modules": {
|
||||
"eslint": {
|
||||
"index.ts": "const eslint_key = 'eslint value'",
|
||||
"package.json": r#"{ "some_key": "some value" }"#,
|
||||
},
|
||||
"prettier": {
|
||||
"index.ts": "const prettier_key = 'prettier value'",
|
||||
"package.json": r#"{ "other_key": "other value" }"#,
|
||||
},
|
||||
},
|
||||
"package.json": r#"{ "main_key": "main value" }"#,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
|
||||
|
||||
let query = "key";
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
.unwrap(),
|
||||
HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
|
||||
"Only one non-ignored file should have the query"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
.unwrap(),
|
||||
HashMap::from_iter([
|
||||
("package.json".to_string(), vec![8..11]),
|
||||
("target/index.txt".to_string(), vec![6..9]),
|
||||
(
|
||||
"node_modules/prettier/package.json".to_string(),
|
||||
vec![9..12]
|
||||
),
|
||||
("node_modules/prettier/index.ts".to_string(), vec![15..18]),
|
||||
("node_modules/eslint/index.ts".to_string(), vec![13..16]),
|
||||
("node_modules/eslint/package.json".to_string(), vec![8..11]),
|
||||
]),
|
||||
"Unrestricted search with ignored directories should find every file with the query"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
search(
|
||||
&project,
|
||||
SearchQuery::text(
|
||||
query,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
|
||||
vec![PathMatcher::new("*.ts").unwrap()],
|
||||
)
|
||||
.unwrap(),
|
||||
cx
|
||||
)
|
||||
.await
|
||||
.unwrap(),
|
||||
HashMap::from_iter([(
|
||||
"node_modules/prettier/package.json".to_string(),
|
||||
vec![9..12]
|
||||
)]),
|
||||
"With search including ignored prettier directory and excluding TS files, only one file should be found"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_glob_literal_prefix() {
|
||||
assert_eq!(glob_literal_prefix("**/*.js"), "");
|
||||
|
@ -371,15 +371,25 @@ impl SearchQuery {
|
||||
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
|
||||
match file_path {
|
||||
Some(file_path) => {
|
||||
!self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.any(|exclude_glob| exclude_glob.is_match(file_path))
|
||||
&& (self.files_to_include().is_empty()
|
||||
let mut path = file_path.to_path_buf();
|
||||
loop {
|
||||
if self
|
||||
.files_to_exclude()
|
||||
.iter()
|
||||
.any(|exclude_glob| exclude_glob.is_match(&path))
|
||||
{
|
||||
return false;
|
||||
} else if self.files_to_include().is_empty()
|
||||
|| self
|
||||
.files_to_include()
|
||||
.iter()
|
||||
.any(|include_glob| include_glob.is_match(file_path)))
|
||||
.any(|include_glob| include_glob.is_match(&path))
|
||||
{
|
||||
return true;
|
||||
} else if !path.pop() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
None => self.files_to_include().is_empty(),
|
||||
}
|
||||
|
@ -958,8 +958,6 @@ impl LocalWorktree {
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let text = fs.load(&abs_path).await?;
|
||||
let entry = entry.await?;
|
||||
|
||||
let mut index_task = None;
|
||||
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
|
||||
if let Some(repo) = snapshot.repository_for_path(&path) {
|
||||
@ -982,18 +980,43 @@ impl LocalWorktree {
|
||||
let worktree = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("worktree was dropped"))?;
|
||||
Ok((
|
||||
File {
|
||||
entry_id: entry.id,
|
||||
worktree,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
match entry.await? {
|
||||
Some(entry) => Ok((
|
||||
File {
|
||||
entry_id: Some(entry.id),
|
||||
worktree,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
)),
|
||||
None => {
|
||||
let metadata = fs
|
||||
.metadata(&abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("Loading metadata for excluded file {abs_path:?}")
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Excluded file {abs_path:?} got removed during loading")
|
||||
})?;
|
||||
Ok((
|
||||
File {
|
||||
entry_id: None,
|
||||
worktree,
|
||||
path,
|
||||
mtime: metadata.mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
},
|
||||
text,
|
||||
diff_base,
|
||||
))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -1013,18 +1036,38 @@ impl LocalWorktree {
|
||||
let text = buffer.as_rope().clone();
|
||||
let fingerprint = text.fingerprint();
|
||||
let version = buffer.version();
|
||||
let save = self.write_file(path, text, buffer.line_ending(), cx);
|
||||
let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
|
||||
let fs = Arc::clone(&self.fs);
|
||||
let abs_path = self.absolutize(&path);
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let entry = save.await?;
|
||||
let this = this.upgrade().context("worktree dropped")?;
|
||||
|
||||
let (entry_id, mtime, path) = match entry {
|
||||
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
|
||||
None => {
|
||||
let metadata = fs
|
||||
.metadata(&abs_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Fetching metadata after saving the excluded buffer {abs_path:?}"
|
||||
)
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Excluded buffer {path:?} got removed during saving")
|
||||
})?;
|
||||
(None, metadata.mtime, path)
|
||||
}
|
||||
};
|
||||
|
||||
if has_changed_file {
|
||||
let new_file = Arc::new(File {
|
||||
entry_id: entry.id,
|
||||
entry_id,
|
||||
worktree: this,
|
||||
path: entry.path,
|
||||
mtime: entry.mtime,
|
||||
path,
|
||||
mtime,
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
});
|
||||
@ -1050,13 +1093,13 @@ impl LocalWorktree {
|
||||
project_id,
|
||||
buffer_id,
|
||||
version: serialize_version(&version),
|
||||
mtime: Some(entry.mtime.into()),
|
||||
mtime: Some(mtime.into()),
|
||||
fingerprint: serialize_fingerprint(fingerprint),
|
||||
})?;
|
||||
}
|
||||
|
||||
buffer_handle.update(&mut cx, |buffer, cx| {
|
||||
buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
|
||||
buffer.did_save(version.clone(), fingerprint, mtime, cx);
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
@ -1081,7 +1124,7 @@ impl LocalWorktree {
|
||||
path: impl Into<Arc<Path>>,
|
||||
is_dir: bool,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let path = path.into();
|
||||
let lowest_ancestor = self.lowest_ancestor(&path);
|
||||
let abs_path = self.absolutize(&path);
|
||||
@ -1098,7 +1141,7 @@ impl LocalWorktree {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
write.await?;
|
||||
let (result, refreshes) = this.update(&mut cx, |this, cx| {
|
||||
let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
|
||||
let mut refreshes = Vec::new();
|
||||
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
|
||||
for refresh_path in refresh_paths.ancestors() {
|
||||
if refresh_path == Path::new("") {
|
||||
@ -1125,14 +1168,14 @@ impl LocalWorktree {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write_file(
|
||||
pub(crate) fn write_file(
|
||||
&self,
|
||||
path: impl Into<Arc<Path>>,
|
||||
text: Rope,
|
||||
line_ending: LineEnding,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
let path = path.into();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let path: Arc<Path> = path.into();
|
||||
let abs_path = self.absolutize(&path);
|
||||
let fs = self.fs.clone();
|
||||
let write = cx
|
||||
@ -1191,8 +1234,11 @@ impl LocalWorktree {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let old_path = self.entry_for_id(entry_id)?.path.clone();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let old_path = match self.entry_for_id(entry_id) {
|
||||
Some(entry) => entry.path.clone(),
|
||||
None => return Task::ready(Ok(None)),
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
let abs_old_path = self.absolutize(&old_path);
|
||||
let abs_new_path = self.absolutize(&new_path);
|
||||
@ -1202,7 +1248,7 @@ impl LocalWorktree {
|
||||
.await
|
||||
});
|
||||
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
rename.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.as_local_mut()
|
||||
@ -1210,7 +1256,7 @@ impl LocalWorktree {
|
||||
.refresh_entry(new_path.clone(), Some(old_path), cx)
|
||||
})?
|
||||
.await
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn copy_entry(
|
||||
@ -1218,8 +1264,11 @@ impl LocalWorktree {
|
||||
entry_id: ProjectEntryId,
|
||||
new_path: impl Into<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Option<Task<Result<Entry>>> {
|
||||
let old_path = self.entry_for_id(entry_id)?.path.clone();
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
let old_path = match self.entry_for_id(entry_id) {
|
||||
Some(entry) => entry.path.clone(),
|
||||
None => return Task::ready(Ok(None)),
|
||||
};
|
||||
let new_path = new_path.into();
|
||||
let abs_old_path = self.absolutize(&old_path);
|
||||
let abs_new_path = self.absolutize(&new_path);
|
||||
@ -1234,7 +1283,7 @@ impl LocalWorktree {
|
||||
.await
|
||||
});
|
||||
|
||||
Some(cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
copy.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.as_local_mut()
|
||||
@ -1242,7 +1291,7 @@ impl LocalWorktree {
|
||||
.refresh_entry(new_path.clone(), None, cx)
|
||||
})?
|
||||
.await
|
||||
}))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn expand_entry(
|
||||
@ -1278,7 +1327,10 @@ impl LocalWorktree {
|
||||
path: Arc<Path>,
|
||||
old_path: Option<Arc<Path>>,
|
||||
cx: &mut ModelContext<Worktree>,
|
||||
) -> Task<Result<Entry>> {
|
||||
) -> Task<Result<Option<Entry>>> {
|
||||
if self.is_path_excluded(path.to_path_buf()) {
|
||||
return Task::ready(Ok(None));
|
||||
}
|
||||
let paths = if let Some(old_path) = old_path.as_ref() {
|
||||
vec![old_path.clone(), path.clone()]
|
||||
} else {
|
||||
@ -1287,11 +1339,12 @@ impl LocalWorktree {
|
||||
let mut refresh = self.refresh_entries_for_paths(paths);
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
refresh.recv().await;
|
||||
this.update(&mut cx, |this, _| {
|
||||
let new_entry = this.update(&mut cx, |this, _| {
|
||||
this.entry_for_path(path)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("failed to read path after update"))
|
||||
})?
|
||||
})??;
|
||||
Ok(Some(new_entry))
|
||||
})
|
||||
}
|
||||
|
||||
@ -2222,10 +2275,19 @@ impl LocalSnapshot {
|
||||
paths
|
||||
}
|
||||
|
||||
pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
|
||||
self.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(abs_path))
|
||||
pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
|
||||
loop {
|
||||
if self
|
||||
.file_scan_exclusions
|
||||
.iter()
|
||||
.any(|exclude_matcher| exclude_matcher.is_match(&path))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if !path.pop() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2455,8 +2517,7 @@ impl BackgroundScannerState {
|
||||
ids_to_preserve.insert(work_directory_id);
|
||||
} else {
|
||||
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
|
||||
|| snapshot.is_path_excluded(&git_dir_abs_path);
|
||||
let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
|
||||
if git_dir_excluded
|
||||
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
|
||||
{
|
||||
@ -2663,7 +2724,7 @@ pub struct File {
|
||||
pub worktree: Model<Worktree>,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: SystemTime,
|
||||
pub(crate) entry_id: ProjectEntryId,
|
||||
pub(crate) entry_id: Option<ProjectEntryId>,
|
||||
pub(crate) is_local: bool,
|
||||
pub(crate) is_deleted: bool,
|
||||
}
|
||||
@ -2732,7 +2793,7 @@ impl language::File for File {
|
||||
fn to_proto(&self) -> rpc::proto::File {
|
||||
rpc::proto::File {
|
||||
worktree_id: self.worktree.entity_id().as_u64(),
|
||||
entry_id: self.entry_id.to_proto(),
|
||||
entry_id: self.entry_id.map(|id| id.to_proto()),
|
||||
path: self.path.to_string_lossy().into(),
|
||||
mtime: Some(self.mtime.into()),
|
||||
is_deleted: self.is_deleted,
|
||||
@ -2790,7 +2851,7 @@ impl File {
|
||||
worktree,
|
||||
path: entry.path.clone(),
|
||||
mtime: entry.mtime,
|
||||
entry_id: entry.id,
|
||||
entry_id: Some(entry.id),
|
||||
is_local: true,
|
||||
is_deleted: false,
|
||||
})
|
||||
@ -2815,7 +2876,7 @@ impl File {
|
||||
worktree,
|
||||
path: Path::new(&proto.path).into(),
|
||||
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
|
||||
entry_id: ProjectEntryId::from_proto(proto.entry_id),
|
||||
entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
|
||||
is_local: false,
|
||||
is_deleted: proto.is_deleted,
|
||||
})
|
||||
@ -2833,7 +2894,7 @@ impl File {
|
||||
if self.is_deleted {
|
||||
None
|
||||
} else {
|
||||
Some(self.entry_id)
|
||||
self.entry_id
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3329,16 +3390,7 @@ impl BackgroundScanner {
|
||||
return false;
|
||||
}
|
||||
|
||||
// FS events may come for files which parent directory is excluded, need to check ignore those.
|
||||
let mut path_to_test = abs_path.clone();
|
||||
let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
|
||||
|| snapshot.is_path_excluded(&relative_path);
|
||||
while !excluded_file_event && path_to_test.pop() {
|
||||
if snapshot.is_path_excluded(&path_to_test) {
|
||||
excluded_file_event = true;
|
||||
}
|
||||
}
|
||||
if excluded_file_event {
|
||||
if snapshot.is_path_excluded(relative_path.to_path_buf()) {
|
||||
if !is_git_related {
|
||||
log::debug!("ignoring FS event for excluded path {relative_path:?}");
|
||||
}
|
||||
@ -3522,7 +3574,7 @@ impl BackgroundScanner {
|
||||
let state = self.state.lock();
|
||||
let snapshot = &state.snapshot;
|
||||
root_abs_path = snapshot.abs_path().clone();
|
||||
if snapshot.is_path_excluded(&job.abs_path) {
|
||||
if snapshot.is_path_excluded(job.path.to_path_buf()) {
|
||||
log::error!("skipping excluded directory {:?}", job.path);
|
||||
return Ok(());
|
||||
}
|
||||
@ -3593,9 +3645,9 @@ impl BackgroundScanner {
|
||||
}
|
||||
|
||||
{
|
||||
let relative_path = job.path.join(child_name);
|
||||
let mut state = self.state.lock();
|
||||
if state.snapshot.is_path_excluded(&child_abs_path) {
|
||||
let relative_path = job.path.join(child_name);
|
||||
if state.snapshot.is_path_excluded(relative_path.clone()) {
|
||||
log::debug!("skipping excluded child entry {relative_path:?}");
|
||||
state.remove_path(&relative_path);
|
||||
continue;
|
||||
|
@ -1055,11 +1055,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||
&[
|
||||
".git/HEAD",
|
||||
".git/foo",
|
||||
"node_modules",
|
||||
"node_modules/.DS_Store",
|
||||
"node_modules/prettier",
|
||||
"node_modules/prettier/package.json",
|
||||
],
|
||||
&["target", "node_modules"],
|
||||
&["target"],
|
||||
&[
|
||||
".DS_Store",
|
||||
"src/.DS_Store",
|
||||
@ -1109,6 +1110,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||
".git/HEAD",
|
||||
".git/foo",
|
||||
".git/new_file",
|
||||
"node_modules",
|
||||
"node_modules/.DS_Store",
|
||||
"node_modules/prettier",
|
||||
"node_modules/prettier/package.json",
|
||||
@ -1117,7 +1119,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||
"build_output/new_file",
|
||||
"test_output/new_file",
|
||||
],
|
||||
&["target", "node_modules", "test_output"],
|
||||
&["target", "test_output"],
|
||||
&[
|
||||
".DS_Store",
|
||||
"src/.DS_Store",
|
||||
@ -1177,6 +1179,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
||||
.create_entry("a/e".as_ref(), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_dir());
|
||||
|
||||
@ -1226,6 +1229,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1261,6 +1265,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1279,6 +1284,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("a/b/c/e.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1295,6 +1301,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||
.create_entry("d/e/f/g.txt".as_ref(), false, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert!(entry.is_file());
|
||||
|
||||
@ -1620,14 +1627,14 @@ fn randomly_mutate_worktree(
|
||||
entry.id.0,
|
||||
new_path
|
||||
);
|
||||
let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
|
||||
let task = worktree.rename_entry(entry.id, new_path, cx);
|
||||
cx.background_executor().spawn(async move {
|
||||
task.await?;
|
||||
task.await?.unwrap();
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
let task = if entry.is_dir() {
|
||||
if entry.is_dir() {
|
||||
let child_path = entry.path.join(random_filename(rng));
|
||||
let is_dir = rng.gen_bool(0.3);
|
||||
log::info!(
|
||||
@ -1635,15 +1642,20 @@ fn randomly_mutate_worktree(
|
||||
if is_dir { "dir" } else { "file" },
|
||||
child_path,
|
||||
);
|
||||
worktree.create_entry(child_path, is_dir, cx)
|
||||
let task = worktree.create_entry(child_path, is_dir, cx);
|
||||
cx.background_executor().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
})
|
||||
} else {
|
||||
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
|
||||
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
|
||||
};
|
||||
cx.background_executor().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
})
|
||||
let task =
|
||||
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
|
||||
cx.background_executor().spawn(async move {
|
||||
task.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -621,7 +621,7 @@ impl ProjectPanel {
|
||||
edited_entry_id = NEW_ENTRY_ID;
|
||||
edit_task = self.project.update(cx, |project, cx| {
|
||||
project.create_entry((worktree_id, &new_path), is_dir, cx)
|
||||
})?;
|
||||
});
|
||||
} else {
|
||||
let new_path = if let Some(parent) = entry.path.clone().parent() {
|
||||
parent.join(&filename)
|
||||
@ -635,7 +635,7 @@ impl ProjectPanel {
|
||||
edited_entry_id = entry.id;
|
||||
edit_task = self.project.update(cx, |project, cx| {
|
||||
project.rename_entry(entry.id, new_path.as_path(), cx)
|
||||
})?;
|
||||
});
|
||||
};
|
||||
|
||||
edit_state.processing_filename = Some(filename);
|
||||
@ -648,21 +648,22 @@ impl ProjectPanel {
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let new_entry = new_entry?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(selection) = &mut this.selection {
|
||||
if selection.entry_id == edited_entry_id {
|
||||
selection.worktree_id = worktree_id;
|
||||
selection.entry_id = new_entry.id;
|
||||
this.expand_to_selection(cx);
|
||||
if let Some(new_entry) = new_entry? {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(selection) = &mut this.selection {
|
||||
if selection.entry_id == edited_entry_id {
|
||||
selection.worktree_id = worktree_id;
|
||||
selection.entry_id = new_entry.id;
|
||||
this.expand_to_selection(cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.update_visible_entries(None, cx);
|
||||
if is_new_entry && !is_dir {
|
||||
this.open_entry(new_entry.id, true, cx);
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
this.update_visible_entries(None, cx);
|
||||
if is_new_entry && !is_dir {
|
||||
this.open_entry(new_entry.id, true, cx);
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
@ -935,15 +936,17 @@ impl ProjectPanel {
|
||||
}
|
||||
|
||||
if clipboard_entry.is_cut() {
|
||||
if let Some(task) = self.project.update(cx, |project, cx| {
|
||||
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
}) {
|
||||
task.detach_and_log_err(cx)
|
||||
}
|
||||
} else if let Some(task) = self.project.update(cx, |project, cx| {
|
||||
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
}) {
|
||||
task.detach_and_log_err(cx)
|
||||
self.project
|
||||
.update(cx, |project, cx| {
|
||||
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
} else {
|
||||
self.project
|
||||
.update(cx, |project, cx| {
|
||||
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
}
|
||||
None
|
||||
@ -1026,7 +1029,7 @@ impl ProjectPanel {
|
||||
let mut new_path = destination_path.to_path_buf();
|
||||
new_path.push(entry_path.path.file_name()?);
|
||||
if new_path != entry_path.path.as_ref() {
|
||||
let task = project.rename_entry(entry_to_move, new_path, cx)?;
|
||||
let task = project.rename_entry(entry_to_move, new_path, cx);
|
||||
cx.foreground().spawn(task).detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
|
@ -397,7 +397,6 @@ impl ProjectPanel {
|
||||
menu = menu.action(
|
||||
"Add Folder to Project",
|
||||
Box::new(workspace::AddFolderToProject),
|
||||
cx,
|
||||
);
|
||||
if is_root {
|
||||
menu = menu.entry(
|
||||
@ -412,35 +411,35 @@ impl ProjectPanel {
|
||||
}
|
||||
|
||||
menu = menu
|
||||
.action("New File", Box::new(NewFile), cx)
|
||||
.action("New Folder", Box::new(NewDirectory), cx)
|
||||
.action("New File", Box::new(NewFile))
|
||||
.action("New Folder", Box::new(NewDirectory))
|
||||
.separator()
|
||||
.action("Cut", Box::new(Cut), cx)
|
||||
.action("Copy", Box::new(Copy), cx);
|
||||
.action("Cut", Box::new(Cut))
|
||||
.action("Copy", Box::new(Copy));
|
||||
|
||||
if let Some(clipboard_entry) = self.clipboard_entry {
|
||||
if clipboard_entry.worktree_id() == worktree_id {
|
||||
menu = menu.action("Paste", Box::new(Paste), cx);
|
||||
menu = menu.action("Paste", Box::new(Paste));
|
||||
}
|
||||
}
|
||||
|
||||
menu = menu
|
||||
.separator()
|
||||
.action("Copy Path", Box::new(CopyPath), cx)
|
||||
.action("Copy Relative Path", Box::new(CopyRelativePath), cx)
|
||||
.action("Copy Path", Box::new(CopyPath))
|
||||
.action("Copy Relative Path", Box::new(CopyRelativePath))
|
||||
.separator()
|
||||
.action("Reveal in Finder", Box::new(RevealInFinder), cx);
|
||||
.action("Reveal in Finder", Box::new(RevealInFinder));
|
||||
|
||||
if is_dir {
|
||||
menu = menu
|
||||
.action("Open in Terminal", Box::new(OpenInTerminal), cx)
|
||||
.action("Search Inside", Box::new(NewSearchInDirectory), cx)
|
||||
.action("Open in Terminal", Box::new(OpenInTerminal))
|
||||
.action("Search Inside", Box::new(NewSearchInDirectory))
|
||||
}
|
||||
|
||||
menu = menu.separator().action("Rename", Box::new(Rename), cx);
|
||||
menu = menu.separator().action("Rename", Box::new(Rename));
|
||||
|
||||
if !is_root {
|
||||
menu = menu.action("Delete", Box::new(Delete), cx);
|
||||
menu = menu.action("Delete", Box::new(Delete));
|
||||
}
|
||||
|
||||
menu
|
||||
@ -611,7 +610,7 @@ impl ProjectPanel {
|
||||
edited_entry_id = NEW_ENTRY_ID;
|
||||
edit_task = self.project.update(cx, |project, cx| {
|
||||
project.create_entry((worktree_id, &new_path), is_dir, cx)
|
||||
})?;
|
||||
});
|
||||
} else {
|
||||
let new_path = if let Some(parent) = entry.path.clone().parent() {
|
||||
parent.join(&filename)
|
||||
@ -625,7 +624,7 @@ impl ProjectPanel {
|
||||
edited_entry_id = entry.id;
|
||||
edit_task = self.project.update(cx, |project, cx| {
|
||||
project.rename_entry(entry.id, new_path.as_path(), cx)
|
||||
})?;
|
||||
});
|
||||
};
|
||||
|
||||
edit_state.processing_filename = Some(filename);
|
||||
@ -638,21 +637,22 @@ impl ProjectPanel {
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let new_entry = new_entry?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(selection) = &mut this.selection {
|
||||
if selection.entry_id == edited_entry_id {
|
||||
selection.worktree_id = worktree_id;
|
||||
selection.entry_id = new_entry.id;
|
||||
this.expand_to_selection(cx);
|
||||
if let Some(new_entry) = new_entry? {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(selection) = &mut this.selection {
|
||||
if selection.entry_id == edited_entry_id {
|
||||
selection.worktree_id = worktree_id;
|
||||
selection.entry_id = new_entry.id;
|
||||
this.expand_to_selection(cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
this.update_visible_entries(None, cx);
|
||||
if is_new_entry && !is_dir {
|
||||
this.open_entry(new_entry.id, true, cx);
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
this.update_visible_entries(None, cx);
|
||||
if is_new_entry && !is_dir {
|
||||
this.open_entry(new_entry.id, true, cx);
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}))
|
||||
}
|
||||
@ -932,15 +932,17 @@ impl ProjectPanel {
|
||||
}
|
||||
|
||||
if clipboard_entry.is_cut() {
|
||||
if let Some(task) = self.project.update(cx, |project, cx| {
|
||||
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
}) {
|
||||
task.detach_and_log_err(cx);
|
||||
}
|
||||
} else if let Some(task) = self.project.update(cx, |project, cx| {
|
||||
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
}) {
|
||||
task.detach_and_log_err(cx);
|
||||
self.project
|
||||
.update(cx, |project, cx| {
|
||||
project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
} else {
|
||||
self.project
|
||||
.update(cx, |project, cx| {
|
||||
project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
|
||||
Some(())
|
||||
@ -1026,7 +1028,7 @@ impl ProjectPanel {
|
||||
// let mut new_path = destination_path.to_path_buf();
|
||||
// new_path.push(entry_path.path.file_name()?);
|
||||
// if new_path != entry_path.path.as_ref() {
|
||||
// let task = project.rename_entry(entry_to_move, new_path, cx)?;
|
||||
// let task = project.rename_entry(entry_to_move, new_path, cx);
|
||||
// cx.foreground_executor().spawn(task).detach_and_log_err(cx);
|
||||
// }
|
||||
|
||||
|
@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
|
||||
}
|
||||
|
||||
message ProjectEntryResponse {
|
||||
Entry entry = 1;
|
||||
optional Entry entry = 1;
|
||||
uint64 worktree_scan_id = 2;
|
||||
}
|
||||
|
||||
@ -1357,7 +1357,7 @@ message User {
|
||||
|
||||
message File {
|
||||
uint64 worktree_id = 1;
|
||||
uint64 entry_id = 2;
|
||||
optional uint64 entry_id = 2;
|
||||
string path = 3;
|
||||
Timestamp mtime = 4;
|
||||
bool is_deleted = 5;
|
||||
|
@ -9,4 +9,4 @@ pub use notification::*;
|
||||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 66;
|
||||
pub const PROTOCOL_VERSION: u32 = 67;
|
||||
|
@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
|
||||
}
|
||||
|
||||
message ProjectEntryResponse {
|
||||
Entry entry = 1;
|
||||
optional Entry entry = 1;
|
||||
uint64 worktree_scan_id = 2;
|
||||
}
|
||||
|
||||
@ -1357,7 +1357,7 @@ message User {
|
||||
|
||||
message File {
|
||||
uint64 worktree_id = 1;
|
||||
uint64 entry_id = 2;
|
||||
optional uint64 entry_id = 2;
|
||||
string path = 3;
|
||||
Timestamp mtime = 4;
|
||||
bool is_deleted = 5;
|
||||
|
@ -9,4 +9,4 @@ pub use notification::*;
|
||||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 66;
|
||||
pub const PROTOCOL_VERSION: u32 = 67;
|
||||
|
69
crates/semantic_index2/Cargo.toml
Normal file
69
crates/semantic_index2/Cargo.toml
Normal file
@ -0,0 +1,69 @@
|
||||
[package]
|
||||
name = "semantic_index2"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
path = "src/semantic_index.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
ai = { package = "ai2", path = "../ai2" }
|
||||
collections = { path = "../collections" }
|
||||
gpui = { package = "gpui2", path = "../gpui2" }
|
||||
language = { package = "language2", path = "../language2" }
|
||||
project = { package = "project2", path = "../project2" }
|
||||
workspace = { package = "workspace2", path = "../workspace2" }
|
||||
util = { path = "../util" }
|
||||
rpc = { package = "rpc2", path = "../rpc2" }
|
||||
settings = { package = "settings2", path = "../settings2" }
|
||||
anyhow.workspace = true
|
||||
postage.workspace = true
|
||||
futures.workspace = true
|
||||
ordered-float.workspace = true
|
||||
smol.workspace = true
|
||||
rusqlite.workspace = true
|
||||
log.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
lazy_static.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
async-trait.workspace = true
|
||||
tiktoken-rs.workspace = true
|
||||
parking_lot.workspace = true
|
||||
rand.workspace = true
|
||||
schemars.workspace = true
|
||||
globset.workspace = true
|
||||
sha1 = "0.10.5"
|
||||
ndarray = { version = "0.15.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
ai = { package = "ai2", path = "../ai2", features = ["test-support"] }
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||
workspace = { package = "workspace2", path = "../workspace2", features = ["test-support"] }
|
||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"]}
|
||||
rust-embed = { version = "8.0", features = ["include-exclude"] }
|
||||
client = { package = "client2", path = "../client2" }
|
||||
node_runtime = { path = "../node_runtime"}
|
||||
|
||||
pretty_assertions.workspace = true
|
||||
rand.workspace = true
|
||||
unindent.workspace = true
|
||||
tempdir.workspace = true
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
|
||||
tree-sitter-typescript.workspace = true
|
||||
tree-sitter-json.workspace = true
|
||||
tree-sitter-rust.workspace = true
|
||||
tree-sitter-toml.workspace = true
|
||||
tree-sitter-cpp.workspace = true
|
||||
tree-sitter-elixir.workspace = true
|
||||
tree-sitter-lua.workspace = true
|
||||
tree-sitter-ruby.workspace = true
|
||||
tree-sitter-php.workspace = true
|
20
crates/semantic_index2/README.md
Normal file
20
crates/semantic_index2/README.md
Normal file
@ -0,0 +1,20 @@
|
||||
|
||||
# Semantic Index
|
||||
|
||||
## Evaluation
|
||||
|
||||
### Metrics
|
||||
|
||||
nDCG@k:
|
||||
- "The value of NDCG is determined by comparing the relevance of the items returned by the search engine to the relevance of the item that a hypothetical "ideal" search engine would return.
|
||||
- "The relevance of result is represented by a score (also known as a 'grade') that is assigned to the search query. The scores of these results are then discounted based on their position in the search results -- did they get recommended first or last?"
|
||||
|
||||
MRR@k:
|
||||
- "Mean reciprocal rank quantifies the rank of the first relevant item found in teh recommendation list."
|
||||
|
||||
MAP@k:
|
||||
- "Mean average precision averages the precision@k metric at each relevant item position in the recommendation list.
|
||||
|
||||
Resources:
|
||||
- [Evaluating recommendation metrics](https://www.shaped.ai/blog/evaluating-recommendation-systems-map-mmr-ndcg)
|
||||
- [Math Walkthrough](https://towardsdatascience.com/demystifying-ndcg-bee3be58cfe0)
|
114
crates/semantic_index2/eval/gpt-engineer.json
Normal file
114
crates/semantic_index2/eval/gpt-engineer.json
Normal file
@ -0,0 +1,114 @@
|
||||
{
|
||||
"repo": "https://github.com/AntonOsika/gpt-engineer.git",
|
||||
"commit": "7735a6445bae3611c62f521e6464c67c957f87c2",
|
||||
"assertions": [
|
||||
{
|
||||
"query": "How do I contribute to this project?",
|
||||
"matches": [
|
||||
".github/CONTRIBUTING.md:1",
|
||||
"ROADMAP.md:48"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "What version of the openai package is active?",
|
||||
"matches": [
|
||||
"pyproject.toml:14"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "Ask user for clarification",
|
||||
"matches": [
|
||||
"gpt_engineer/steps.py:69"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "generate tests for python code",
|
||||
"matches": [
|
||||
"gpt_engineer/steps.py:153"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "get item from database based on key",
|
||||
"matches": [
|
||||
"gpt_engineer/db.py:42",
|
||||
"gpt_engineer/db.py:68"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "prompt user to select files",
|
||||
"matches": [
|
||||
"gpt_engineer/file_selector.py:171",
|
||||
"gpt_engineer/file_selector.py:306",
|
||||
"gpt_engineer/file_selector.py:289",
|
||||
"gpt_engineer/file_selector.py:234"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "send to rudderstack",
|
||||
"matches": [
|
||||
"gpt_engineer/collect.py:11",
|
||||
"gpt_engineer/collect.py:38"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "parse code blocks from chat messages",
|
||||
"matches": [
|
||||
"gpt_engineer/chat_to_files.py:10",
|
||||
"docs/intro/chat_parsing.md:1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "how do I use the docker cli?",
|
||||
"matches": [
|
||||
"docker/README.md:1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "ask the user if the code ran successfully?",
|
||||
"matches": [
|
||||
"gpt_engineer/learning.py:54"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "how is consent granted by the user?",
|
||||
"matches": [
|
||||
"gpt_engineer/learning.py:107",
|
||||
"gpt_engineer/learning.py:130",
|
||||
"gpt_engineer/learning.py:152"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "what are all the different steps the agent can take?",
|
||||
"matches": [
|
||||
"docs/intro/steps_module.md:1",
|
||||
"gpt_engineer/steps.py:391"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "ask the user for clarification?",
|
||||
"matches": [
|
||||
"gpt_engineer/steps.py:69"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "what models are available?",
|
||||
"matches": [
|
||||
"gpt_engineer/ai.py:315",
|
||||
"gpt_engineer/ai.py:341",
|
||||
"docs/open-models.md:1"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "what is the current focus of the project?",
|
||||
"matches": [
|
||||
"ROADMAP.md:11"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "does the agent know how to fix code?",
|
||||
"matches": [
|
||||
"gpt_engineer/steps.py:367"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
104
crates/semantic_index2/eval/tree-sitter.json
Normal file
104
crates/semantic_index2/eval/tree-sitter.json
Normal file
@ -0,0 +1,104 @@
|
||||
{
|
||||
"repo": "https://github.com/tree-sitter/tree-sitter.git",
|
||||
"commit": "46af27796a76c72d8466627d499f2bca4af958ee",
|
||||
"assertions": [
|
||||
{
|
||||
"query": "What attributes are available for the tags configuration struct?",
|
||||
"matches": [
|
||||
"tags/src/lib.rs:24"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "create a new tag configuration",
|
||||
"matches": [
|
||||
"tags/src/lib.rs:119"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "generate tags based on config",
|
||||
"matches": [
|
||||
"tags/src/lib.rs:261"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "match on ts quantifier in rust",
|
||||
"matches": [
|
||||
"lib/binding_rust/lib.rs:139"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "cli command to generate tags",
|
||||
"matches": [
|
||||
"cli/src/tags.rs:10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "what version of the tree-sitter-tags package is active?",
|
||||
"matches": [
|
||||
"tags/Cargo.toml:4"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "Insert a new parse state",
|
||||
"matches": [
|
||||
"cli/src/generate/build_tables/build_parse_table.rs:153"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "Handle conflict when numerous actions occur on the same symbol",
|
||||
"matches": [
|
||||
"cli/src/generate/build_tables/build_parse_table.rs:363",
|
||||
"cli/src/generate/build_tables/build_parse_table.rs:442"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "Match based on associativity of actions",
|
||||
"matches": [
|
||||
"cri/src/generate/build_tables/build_parse_table.rs:542"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "Format token set display",
|
||||
"matches": [
|
||||
"cli/src/generate/build_tables/item.rs:246"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "extract choices from rule",
|
||||
"matches": [
|
||||
"cli/src/generate/prepare_grammar/flatten_grammar.rs:124"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "How do we identify if a symbol is being used?",
|
||||
"matches": [
|
||||
"cli/src/generate/prepare_grammar/flatten_grammar.rs:175"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "How do we launch the playground?",
|
||||
"matches": [
|
||||
"cli/src/playground.rs:46"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "How do we test treesitter query matches in rust?",
|
||||
"matches": [
|
||||
"cli/src/query_testing.rs:152",
|
||||
"cli/src/tests/query_test.rs:781",
|
||||
"cli/src/tests/query_test.rs:2163",
|
||||
"cli/src/tests/query_test.rs:3781",
|
||||
"cli/src/tests/query_test.rs:887"
|
||||
]
|
||||
},
|
||||
{
|
||||
"query": "What does the CLI do?",
|
||||
"matches": [
|
||||
"cli/README.md:10",
|
||||
"cli/loader/README.md:3",
|
||||
"docs/section-5-implementation.md:14",
|
||||
"docs/section-5-implementation.md:18"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
603
crates/semantic_index2/src/db.rs
Normal file
603
crates/semantic_index2/src/db.rs
Normal file
@ -0,0 +1,603 @@
|
||||
use crate::{
|
||||
parsing::{Span, SpanDigest},
|
||||
SEMANTIC_INDEX_VERSION,
|
||||
};
|
||||
use ai::embedding::Embedding;
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::HashMap;
|
||||
use futures::channel::oneshot;
|
||||
use gpui::BackgroundExecutor;
|
||||
use ndarray::{Array1, Array2};
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::Fs;
|
||||
use rpc::proto::Timestamp;
|
||||
use rusqlite::params;
|
||||
use rusqlite::types::Value;
|
||||
use std::{
|
||||
future::Future,
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
};
|
||||
use util::{paths::PathMatcher, TryFutureExt};
|
||||
|
||||
pub fn argsort<T: Ord>(data: &[T]) -> Vec<usize> {
|
||||
let mut indices = (0..data.len()).collect::<Vec<_>>();
|
||||
indices.sort_by_key(|&i| &data[i]);
|
||||
indices.reverse();
|
||||
indices
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FileRecord {
|
||||
pub id: usize,
|
||||
pub relative_path: String,
|
||||
pub mtime: Timestamp,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VectorDatabase {
|
||||
path: Arc<Path>,
|
||||
transactions:
|
||||
smol::channel::Sender<Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>>,
|
||||
}
|
||||
|
||||
impl VectorDatabase {
|
||||
pub async fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
path: Arc<Path>,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Result<Self> {
|
||||
if let Some(db_directory) = path.parent() {
|
||||
fs.create_dir(db_directory).await?;
|
||||
}
|
||||
|
||||
let (transactions_tx, transactions_rx) = smol::channel::unbounded::<
|
||||
Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>,
|
||||
>();
|
||||
executor
|
||||
.spawn({
|
||||
let path = path.clone();
|
||||
async move {
|
||||
let mut connection = rusqlite::Connection::open(&path)?;
|
||||
|
||||
connection.pragma_update(None, "journal_mode", "wal")?;
|
||||
connection.pragma_update(None, "synchronous", "normal")?;
|
||||
connection.pragma_update(None, "cache_size", 1000000)?;
|
||||
connection.pragma_update(None, "temp_store", "MEMORY")?;
|
||||
|
||||
while let Ok(transaction) = transactions_rx.recv().await {
|
||||
transaction(&mut connection);
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
})
|
||||
.detach();
|
||||
let this = Self {
|
||||
transactions: transactions_tx,
|
||||
path,
|
||||
};
|
||||
this.initialize_database().await?;
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &Arc<Path> {
|
||||
&self.path
|
||||
}
|
||||
|
||||
fn transact<F, T>(&self, f: F) -> impl Future<Output = Result<T>>
|
||||
where
|
||||
F: 'static + Send + FnOnce(&rusqlite::Transaction) -> Result<T>,
|
||||
T: 'static + Send,
|
||||
{
|
||||
let (tx, rx) = oneshot::channel();
|
||||
let transactions = self.transactions.clone();
|
||||
async move {
|
||||
if transactions
|
||||
.send(Box::new(|connection| {
|
||||
let result = connection
|
||||
.transaction()
|
||||
.map_err(|err| anyhow!(err))
|
||||
.and_then(|transaction| {
|
||||
let result = f(&transaction)?;
|
||||
transaction.commit()?;
|
||||
Ok(result)
|
||||
});
|
||||
let _ = tx.send(result);
|
||||
}))
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return Err(anyhow!("connection was dropped"))?;
|
||||
}
|
||||
rx.await?
|
||||
}
|
||||
}
|
||||
|
||||
fn initialize_database(&self) -> impl Future<Output = Result<()>> {
|
||||
self.transact(|db| {
|
||||
rusqlite::vtab::array::load_module(&db)?;
|
||||
|
||||
// Delete existing tables, if SEMANTIC_INDEX_VERSION is bumped
|
||||
let version_query = db.prepare("SELECT version from semantic_index_config");
|
||||
let version = version_query
|
||||
.and_then(|mut query| query.query_row([], |row| Ok(row.get::<_, i64>(0)?)));
|
||||
if version.map_or(false, |version| version == SEMANTIC_INDEX_VERSION as i64) {
|
||||
log::trace!("vector database schema up to date");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
log::trace!("vector database schema out of date. updating...");
|
||||
// We renamed the `documents` table to `spans`, so we want to drop
|
||||
// `documents` without recreating it if it exists.
|
||||
db.execute("DROP TABLE IF EXISTS documents", [])
|
||||
.context("failed to drop 'documents' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS spans", [])
|
||||
.context("failed to drop 'spans' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS files", [])
|
||||
.context("failed to drop 'files' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS worktrees", [])
|
||||
.context("failed to drop 'worktrees' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS semantic_index_config", [])
|
||||
.context("failed to drop 'semantic_index_config' table")?;
|
||||
|
||||
// Initialize Vector Databasing Tables
|
||||
db.execute(
|
||||
"CREATE TABLE semantic_index_config (
|
||||
version INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"INSERT INTO semantic_index_config (version) VALUES (?1)",
|
||||
params![SEMANTIC_INDEX_VERSION],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE TABLE worktrees (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
absolute_path VARCHAR NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX worktrees_absolute_path ON worktrees (absolute_path);
|
||||
",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE TABLE files (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
worktree_id INTEGER NOT NULL,
|
||||
relative_path VARCHAR NOT NULL,
|
||||
mtime_seconds INTEGER NOT NULL,
|
||||
mtime_nanos INTEGER NOT NULL,
|
||||
FOREIGN KEY(worktree_id) REFERENCES worktrees(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE UNIQUE INDEX files_worktree_id_and_relative_path ON files (worktree_id, relative_path)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE TABLE spans (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
file_id INTEGER NOT NULL,
|
||||
start_byte INTEGER NOT NULL,
|
||||
end_byte INTEGER NOT NULL,
|
||||
name VARCHAR NOT NULL,
|
||||
embedding BLOB NOT NULL,
|
||||
digest BLOB NOT NULL,
|
||||
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
db.execute(
|
||||
"CREATE INDEX spans_digest ON spans (digest)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
log::trace!("vector database initialized with updated schema.");
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_file(
|
||||
&self,
|
||||
worktree_id: i64,
|
||||
delete_path: Arc<Path>,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
self.transact(move |db| {
|
||||
db.execute(
|
||||
"DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2",
|
||||
params![worktree_id, delete_path.to_str()],
|
||||
)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_file(
|
||||
&self,
|
||||
worktree_id: i64,
|
||||
path: Arc<Path>,
|
||||
mtime: SystemTime,
|
||||
spans: Vec<Span>,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
self.transact(move |db| {
|
||||
// Return the existing ID, if both the file and mtime match
|
||||
let mtime = Timestamp::from(mtime);
|
||||
|
||||
db.execute(
|
||||
"
|
||||
REPLACE INTO files
|
||||
(worktree_id, relative_path, mtime_seconds, mtime_nanos)
|
||||
VALUES (?1, ?2, ?3, ?4)
|
||||
",
|
||||
params![worktree_id, path.to_str(), mtime.seconds, mtime.nanos],
|
||||
)?;
|
||||
|
||||
let file_id = db.last_insert_rowid();
|
||||
|
||||
let mut query = db.prepare(
|
||||
"
|
||||
INSERT INTO spans
|
||||
(file_id, start_byte, end_byte, name, embedding, digest)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6)
|
||||
",
|
||||
)?;
|
||||
|
||||
for span in spans {
|
||||
query.execute(params![
|
||||
file_id,
|
||||
span.range.start.to_string(),
|
||||
span.range.end.to_string(),
|
||||
span.name,
|
||||
span.embedding,
|
||||
span.digest
|
||||
])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn worktree_previously_indexed(
|
||||
&self,
|
||||
worktree_root_path: &Path,
|
||||
) -> impl Future<Output = Result<bool>> {
|
||||
let worktree_root_path = worktree_root_path.to_string_lossy().into_owned();
|
||||
self.transact(move |db| {
|
||||
let mut worktree_query =
|
||||
db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
|
||||
let worktree_id = worktree_query
|
||||
.query_row(params![worktree_root_path], |row| Ok(row.get::<_, i64>(0)?));
|
||||
|
||||
if worktree_id.is_ok() {
|
||||
return Ok(true);
|
||||
} else {
|
||||
return Ok(false);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn embeddings_for_digests(
|
||||
&self,
|
||||
digests: Vec<SpanDigest>,
|
||||
) -> impl Future<Output = Result<HashMap<SpanDigest, Embedding>>> {
|
||||
self.transact(move |db| {
|
||||
let mut query = db.prepare(
|
||||
"
|
||||
SELECT digest, embedding
|
||||
FROM spans
|
||||
WHERE digest IN rarray(?)
|
||||
",
|
||||
)?;
|
||||
let mut embeddings_by_digest = HashMap::default();
|
||||
let digests = Rc::new(
|
||||
digests
|
||||
.into_iter()
|
||||
.map(|p| Value::Blob(p.0.to_vec()))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
let rows = query.query_map(params![digests], |row| {
|
||||
Ok((row.get::<_, SpanDigest>(0)?, row.get::<_, Embedding>(1)?))
|
||||
})?;
|
||||
|
||||
for row in rows {
|
||||
if let Ok(row) = row {
|
||||
embeddings_by_digest.insert(row.0, row.1);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(embeddings_by_digest)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn embeddings_for_files(
|
||||
&self,
|
||||
worktree_id_file_paths: HashMap<i64, Vec<Arc<Path>>>,
|
||||
) -> impl Future<Output = Result<HashMap<SpanDigest, Embedding>>> {
|
||||
self.transact(move |db| {
|
||||
let mut query = db.prepare(
|
||||
"
|
||||
SELECT digest, embedding
|
||||
FROM spans
|
||||
LEFT JOIN files ON files.id = spans.file_id
|
||||
WHERE files.worktree_id = ? AND files.relative_path IN rarray(?)
|
||||
",
|
||||
)?;
|
||||
let mut embeddings_by_digest = HashMap::default();
|
||||
for (worktree_id, file_paths) in worktree_id_file_paths {
|
||||
let file_paths = Rc::new(
|
||||
file_paths
|
||||
.into_iter()
|
||||
.map(|p| Value::Text(p.to_string_lossy().into_owned()))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
let rows = query.query_map(params![worktree_id, file_paths], |row| {
|
||||
Ok((row.get::<_, SpanDigest>(0)?, row.get::<_, Embedding>(1)?))
|
||||
})?;
|
||||
|
||||
for row in rows {
|
||||
if let Ok(row) = row {
|
||||
embeddings_by_digest.insert(row.0, row.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(embeddings_by_digest)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_or_create_worktree(
|
||||
&self,
|
||||
worktree_root_path: Arc<Path>,
|
||||
) -> impl Future<Output = Result<i64>> {
|
||||
self.transact(move |db| {
|
||||
let mut worktree_query =
|
||||
db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
|
||||
let worktree_id = worktree_query
|
||||
.query_row(params![worktree_root_path.to_string_lossy()], |row| {
|
||||
Ok(row.get::<_, i64>(0)?)
|
||||
});
|
||||
|
||||
if worktree_id.is_ok() {
|
||||
return Ok(worktree_id?);
|
||||
}
|
||||
|
||||
// If worktree_id is Err, insert new worktree
|
||||
db.execute(
|
||||
"INSERT into worktrees (absolute_path) VALUES (?1)",
|
||||
params![worktree_root_path.to_string_lossy()],
|
||||
)?;
|
||||
Ok(db.last_insert_rowid())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_file_mtimes(
|
||||
&self,
|
||||
worktree_id: i64,
|
||||
) -> impl Future<Output = Result<HashMap<PathBuf, SystemTime>>> {
|
||||
self.transact(move |db| {
|
||||
let mut statement = db.prepare(
|
||||
"
|
||||
SELECT relative_path, mtime_seconds, mtime_nanos
|
||||
FROM files
|
||||
WHERE worktree_id = ?1
|
||||
ORDER BY relative_path",
|
||||
)?;
|
||||
let mut result: HashMap<PathBuf, SystemTime> = HashMap::default();
|
||||
for row in statement.query_map(params![worktree_id], |row| {
|
||||
Ok((
|
||||
row.get::<_, String>(0)?.into(),
|
||||
Timestamp {
|
||||
seconds: row.get(1)?,
|
||||
nanos: row.get(2)?,
|
||||
}
|
||||
.into(),
|
||||
))
|
||||
})? {
|
||||
let row = row?;
|
||||
result.insert(row.0, row.1);
|
||||
}
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn top_k_search(
|
||||
&self,
|
||||
query_embedding: &Embedding,
|
||||
limit: usize,
|
||||
file_ids: &[i64],
|
||||
) -> impl Future<Output = Result<Vec<(i64, OrderedFloat<f32>)>>> {
|
||||
let file_ids = file_ids.to_vec();
|
||||
let query = query_embedding.clone().0;
|
||||
let query = Array1::from_vec(query);
|
||||
self.transact(move |db| {
|
||||
let mut query_statement = db.prepare(
|
||||
"
|
||||
SELECT
|
||||
id, embedding
|
||||
FROM
|
||||
spans
|
||||
WHERE
|
||||
file_id IN rarray(?)
|
||||
",
|
||||
)?;
|
||||
|
||||
let deserialized_rows = query_statement
|
||||
.query_map(params![ids_to_sql(&file_ids)], |row| {
|
||||
Ok((row.get::<_, usize>(0)?, row.get::<_, Embedding>(1)?))
|
||||
})?
|
||||
.filter_map(|row| row.ok())
|
||||
.collect::<Vec<(usize, Embedding)>>();
|
||||
|
||||
if deserialized_rows.len() == 0 {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
// Get Length of Embeddings Returned
|
||||
let embedding_len = deserialized_rows[0].1 .0.len();
|
||||
|
||||
let batch_n = 1000;
|
||||
let mut batches = Vec::new();
|
||||
let mut batch_ids = Vec::new();
|
||||
let mut batch_embeddings: Vec<f32> = Vec::new();
|
||||
deserialized_rows.iter().for_each(|(id, embedding)| {
|
||||
batch_ids.push(id);
|
||||
batch_embeddings.extend(&embedding.0);
|
||||
|
||||
if batch_ids.len() == batch_n {
|
||||
let embeddings = std::mem::take(&mut batch_embeddings);
|
||||
let ids = std::mem::take(&mut batch_ids);
|
||||
let array =
|
||||
Array2::from_shape_vec((ids.len(), embedding_len.clone()), embeddings);
|
||||
match array {
|
||||
Ok(array) => {
|
||||
batches.push((ids, array));
|
||||
}
|
||||
Err(err) => log::error!("Failed to deserialize to ndarray: {:?}", err),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if batch_ids.len() > 0 {
|
||||
let array = Array2::from_shape_vec(
|
||||
(batch_ids.len(), embedding_len),
|
||||
batch_embeddings.clone(),
|
||||
);
|
||||
match array {
|
||||
Ok(array) => {
|
||||
batches.push((batch_ids.clone(), array));
|
||||
}
|
||||
Err(err) => log::error!("Failed to deserialize to ndarray: {:?}", err),
|
||||
}
|
||||
}
|
||||
|
||||
let mut ids: Vec<usize> = Vec::new();
|
||||
let mut results = Vec::new();
|
||||
for (batch_ids, array) in batches {
|
||||
let scores = array
|
||||
.dot(&query.t())
|
||||
.to_vec()
|
||||
.iter()
|
||||
.map(|score| OrderedFloat(*score))
|
||||
.collect::<Vec<OrderedFloat<f32>>>();
|
||||
results.extend(scores);
|
||||
ids.extend(batch_ids);
|
||||
}
|
||||
|
||||
let sorted_idx = argsort(&results);
|
||||
let mut sorted_results = Vec::new();
|
||||
let last_idx = limit.min(sorted_idx.len());
|
||||
for idx in &sorted_idx[0..last_idx] {
|
||||
sorted_results.push((ids[*idx] as i64, results[*idx]))
|
||||
}
|
||||
|
||||
Ok(sorted_results)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn retrieve_included_file_ids(
|
||||
&self,
|
||||
worktree_ids: &[i64],
|
||||
includes: &[PathMatcher],
|
||||
excludes: &[PathMatcher],
|
||||
) -> impl Future<Output = Result<Vec<i64>>> {
|
||||
let worktree_ids = worktree_ids.to_vec();
|
||||
let includes = includes.to_vec();
|
||||
let excludes = excludes.to_vec();
|
||||
self.transact(move |db| {
|
||||
let mut file_query = db.prepare(
|
||||
"
|
||||
SELECT
|
||||
id, relative_path
|
||||
FROM
|
||||
files
|
||||
WHERE
|
||||
worktree_id IN rarray(?)
|
||||
",
|
||||
)?;
|
||||
|
||||
let mut file_ids = Vec::<i64>::new();
|
||||
let mut rows = file_query.query([ids_to_sql(&worktree_ids)])?;
|
||||
|
||||
while let Some(row) = rows.next()? {
|
||||
let file_id = row.get(0)?;
|
||||
let relative_path = row.get_ref(1)?.as_str()?;
|
||||
let included =
|
||||
includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
|
||||
let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
|
||||
if included && !excluded {
|
||||
file_ids.push(file_id);
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(file_ids)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spans_for_ids(
|
||||
&self,
|
||||
ids: &[i64],
|
||||
) -> impl Future<Output = Result<Vec<(i64, PathBuf, Range<usize>)>>> {
|
||||
let ids = ids.to_vec();
|
||||
self.transact(move |db| {
|
||||
let mut statement = db.prepare(
|
||||
"
|
||||
SELECT
|
||||
spans.id,
|
||||
files.worktree_id,
|
||||
files.relative_path,
|
||||
spans.start_byte,
|
||||
spans.end_byte
|
||||
FROM
|
||||
spans, files
|
||||
WHERE
|
||||
spans.file_id = files.id AND
|
||||
spans.id in rarray(?)
|
||||
",
|
||||
)?;
|
||||
|
||||
let result_iter = statement.query_map(params![ids_to_sql(&ids)], |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, i64>(1)?,
|
||||
row.get::<_, String>(2)?.into(),
|
||||
row.get(3)?..row.get(4)?,
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut values_by_id = HashMap::<i64, (i64, PathBuf, Range<usize>)>::default();
|
||||
for row in result_iter {
|
||||
let (id, worktree_id, path, range) = row?;
|
||||
values_by_id.insert(id, (worktree_id, path, range));
|
||||
}
|
||||
|
||||
let mut results = Vec::with_capacity(ids.len());
|
||||
for id in &ids {
|
||||
let value = values_by_id
|
||||
.remove(id)
|
||||
.ok_or(anyhow!("missing span id {}", id))?;
|
||||
results.push(value);
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn ids_to_sql(ids: &[i64]) -> Rc<Vec<rusqlite::types::Value>> {
|
||||
Rc::new(
|
||||
ids.iter()
|
||||
.copied()
|
||||
.map(|v| rusqlite::types::Value::from(v))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
169
crates/semantic_index2/src/embedding_queue.rs
Normal file
169
crates/semantic_index2/src/embedding_queue.rs
Normal file
@ -0,0 +1,169 @@
|
||||
use crate::{parsing::Span, JobHandle};
|
||||
use ai::embedding::EmbeddingProvider;
|
||||
use gpui::BackgroundExecutor;
|
||||
use parking_lot::Mutex;
|
||||
use smol::channel;
|
||||
use std::{mem, ops::Range, path::Path, sync::Arc, time::SystemTime};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FileToEmbed {
|
||||
pub worktree_id: i64,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: SystemTime,
|
||||
pub spans: Vec<Span>,
|
||||
pub job_handle: JobHandle,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for FileToEmbed {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("FileToEmbed")
|
||||
.field("worktree_id", &self.worktree_id)
|
||||
.field("path", &self.path)
|
||||
.field("mtime", &self.mtime)
|
||||
.field("spans", &self.spans)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FileToEmbed {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.worktree_id == other.worktree_id
|
||||
&& self.path == other.path
|
||||
&& self.mtime == other.mtime
|
||||
&& self.spans == other.spans
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EmbeddingQueue {
|
||||
embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
pending_batch: Vec<FileFragmentToEmbed>,
|
||||
executor: BackgroundExecutor,
|
||||
pending_batch_token_count: usize,
|
||||
finished_files_tx: channel::Sender<FileToEmbed>,
|
||||
finished_files_rx: channel::Receiver<FileToEmbed>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FileFragmentToEmbed {
|
||||
file: Arc<Mutex<FileToEmbed>>,
|
||||
span_range: Range<usize>,
|
||||
}
|
||||
|
||||
impl EmbeddingQueue {
|
||||
pub fn new(
|
||||
embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Self {
|
||||
let (finished_files_tx, finished_files_rx) = channel::unbounded();
|
||||
Self {
|
||||
embedding_provider,
|
||||
executor,
|
||||
pending_batch: Vec::new(),
|
||||
pending_batch_token_count: 0,
|
||||
finished_files_tx,
|
||||
finished_files_rx,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, file: FileToEmbed) {
|
||||
if file.spans.is_empty() {
|
||||
self.finished_files_tx.try_send(file).unwrap();
|
||||
return;
|
||||
}
|
||||
|
||||
let file = Arc::new(Mutex::new(file));
|
||||
|
||||
self.pending_batch.push(FileFragmentToEmbed {
|
||||
file: file.clone(),
|
||||
span_range: 0..0,
|
||||
});
|
||||
|
||||
let mut fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
|
||||
for (ix, span) in file.lock().spans.iter().enumerate() {
|
||||
let span_token_count = if span.embedding.is_none() {
|
||||
span.token_count
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let next_token_count = self.pending_batch_token_count + span_token_count;
|
||||
if next_token_count > self.embedding_provider.max_tokens_per_batch() {
|
||||
let range_end = fragment_range.end;
|
||||
self.flush();
|
||||
self.pending_batch.push(FileFragmentToEmbed {
|
||||
file: file.clone(),
|
||||
span_range: range_end..range_end,
|
||||
});
|
||||
fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
|
||||
}
|
||||
|
||||
fragment_range.end = ix + 1;
|
||||
self.pending_batch_token_count += span_token_count;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn flush(&mut self) {
|
||||
let batch = mem::take(&mut self.pending_batch);
|
||||
self.pending_batch_token_count = 0;
|
||||
if batch.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let finished_files_tx = self.finished_files_tx.clone();
|
||||
let embedding_provider = self.embedding_provider.clone();
|
||||
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let mut spans = Vec::new();
|
||||
for fragment in &batch {
|
||||
let file = fragment.file.lock();
|
||||
spans.extend(
|
||||
file.spans[fragment.span_range.clone()]
|
||||
.iter()
|
||||
.filter(|d| d.embedding.is_none())
|
||||
.map(|d| d.content.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
// If spans is 0, just send the fragment to the finished files if its the last one.
|
||||
if spans.is_empty() {
|
||||
for fragment in batch.clone() {
|
||||
if let Some(file) = Arc::into_inner(fragment.file) {
|
||||
finished_files_tx.try_send(file.into_inner()).unwrap();
|
||||
}
|
||||
}
|
||||
return;
|
||||
};
|
||||
|
||||
match embedding_provider.embed_batch(spans).await {
|
||||
Ok(embeddings) => {
|
||||
let mut embeddings = embeddings.into_iter();
|
||||
for fragment in batch {
|
||||
for span in &mut fragment.file.lock().spans[fragment.span_range.clone()]
|
||||
.iter_mut()
|
||||
.filter(|d| d.embedding.is_none())
|
||||
{
|
||||
if let Some(embedding) = embeddings.next() {
|
||||
span.embedding = Some(embedding);
|
||||
} else {
|
||||
log::error!("number of embeddings != number of documents");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(file) = Arc::into_inner(fragment.file) {
|
||||
finished_files_tx.try_send(file.into_inner()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!("{:?}", error);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn finished_files(&self) -> channel::Receiver<FileToEmbed> {
|
||||
self.finished_files_rx.clone()
|
||||
}
|
||||
}
|
414
crates/semantic_index2/src/parsing.rs
Normal file
414
crates/semantic_index2/src/parsing.rs
Normal file
@ -0,0 +1,414 @@
|
||||
use ai::{
|
||||
embedding::{Embedding, EmbeddingProvider},
|
||||
models::TruncationDirection,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use language::{Grammar, Language};
|
||||
use rusqlite::{
|
||||
types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef},
|
||||
ToSql,
|
||||
};
|
||||
use sha1::{Digest, Sha1};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
cmp::{self, Reverse},
|
||||
collections::HashSet,
|
||||
ops::Range,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
use tree_sitter::{Parser, QueryCursor};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub struct SpanDigest(pub [u8; 20]);
|
||||
|
||||
impl FromSql for SpanDigest {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let blob = value.as_blob()?;
|
||||
let bytes =
|
||||
blob.try_into()
|
||||
.map_err(|_| rusqlite::types::FromSqlError::InvalidBlobSize {
|
||||
expected_size: 20,
|
||||
blob_size: blob.len(),
|
||||
})?;
|
||||
return Ok(SpanDigest(bytes));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql for SpanDigest {
|
||||
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
|
||||
self.0.to_sql()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'_ str> for SpanDigest {
|
||||
fn from(value: &'_ str) -> Self {
|
||||
let mut sha1 = Sha1::new();
|
||||
sha1.update(value);
|
||||
Self(sha1.finalize().into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Span {
|
||||
pub name: String,
|
||||
pub range: Range<usize>,
|
||||
pub content: String,
|
||||
pub embedding: Option<Embedding>,
|
||||
pub digest: SpanDigest,
|
||||
pub token_count: usize,
|
||||
}
|
||||
|
||||
const CODE_CONTEXT_TEMPLATE: &str =
|
||||
"The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
|
||||
const ENTIRE_FILE_TEMPLATE: &str =
|
||||
"The below snippet is from file '<path>'\n\n```<language>\n<item>\n```";
|
||||
const MARKDOWN_CONTEXT_TEMPLATE: &str = "The below file contents is from file '<path>'\n\n<item>";
|
||||
pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] = &[
|
||||
"TOML", "YAML", "CSS", "HEEX", "ERB", "SVELTE", "HTML", "Scheme",
|
||||
];
|
||||
|
||||
pub struct CodeContextRetriever {
|
||||
pub parser: Parser,
|
||||
pub cursor: QueryCursor,
|
||||
pub embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
}
|
||||
|
||||
// Every match has an item, this represents the fundamental treesitter symbol and anchors the search
|
||||
// Every match has one or more 'name' captures. These indicate the display range of the item for deduplication.
|
||||
// If there are preceeding comments, we track this with a context capture
|
||||
// If there is a piece that should be collapsed in hierarchical queries, we capture it with a collapse capture
|
||||
// If there is a piece that should be kept inside a collapsed node, we capture it with a keep capture
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodeContextMatch {
|
||||
pub start_col: usize,
|
||||
pub item_range: Option<Range<usize>>,
|
||||
pub name_range: Option<Range<usize>>,
|
||||
pub context_ranges: Vec<Range<usize>>,
|
||||
pub collapse_ranges: Vec<Range<usize>>,
|
||||
}
|
||||
|
||||
impl CodeContextRetriever {
|
||||
pub fn new(embedding_provider: Arc<dyn EmbeddingProvider>) -> Self {
|
||||
Self {
|
||||
parser: Parser::new(),
|
||||
cursor: QueryCursor::new(),
|
||||
embedding_provider,
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_entire_file(
|
||||
&self,
|
||||
relative_path: Option<&Path>,
|
||||
language_name: Arc<str>,
|
||||
content: &str,
|
||||
) -> Result<Vec<Span>> {
|
||||
let document_span = ENTIRE_FILE_TEMPLATE
|
||||
.replace(
|
||||
"<path>",
|
||||
&relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
|
||||
)
|
||||
.replace("<language>", language_name.as_ref())
|
||||
.replace("<item>", &content);
|
||||
let digest = SpanDigest::from(document_span.as_str());
|
||||
let model = self.embedding_provider.base_model();
|
||||
let document_span = model.truncate(
|
||||
&document_span,
|
||||
model.capacity()?,
|
||||
ai::models::TruncationDirection::End,
|
||||
)?;
|
||||
let token_count = model.count_tokens(&document_span)?;
|
||||
|
||||
Ok(vec![Span {
|
||||
range: 0..content.len(),
|
||||
content: document_span,
|
||||
embedding: Default::default(),
|
||||
name: language_name.to_string(),
|
||||
digest,
|
||||
token_count,
|
||||
}])
|
||||
}
|
||||
|
||||
fn parse_markdown_file(
|
||||
&self,
|
||||
relative_path: Option<&Path>,
|
||||
content: &str,
|
||||
) -> Result<Vec<Span>> {
|
||||
let document_span = MARKDOWN_CONTEXT_TEMPLATE
|
||||
.replace(
|
||||
"<path>",
|
||||
&relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
|
||||
)
|
||||
.replace("<item>", &content);
|
||||
let digest = SpanDigest::from(document_span.as_str());
|
||||
|
||||
let model = self.embedding_provider.base_model();
|
||||
let document_span = model.truncate(
|
||||
&document_span,
|
||||
model.capacity()?,
|
||||
ai::models::TruncationDirection::End,
|
||||
)?;
|
||||
let token_count = model.count_tokens(&document_span)?;
|
||||
|
||||
Ok(vec![Span {
|
||||
range: 0..content.len(),
|
||||
content: document_span,
|
||||
embedding: None,
|
||||
name: "Markdown".to_string(),
|
||||
digest,
|
||||
token_count,
|
||||
}])
|
||||
}
|
||||
|
||||
fn get_matches_in_file(
|
||||
&mut self,
|
||||
content: &str,
|
||||
grammar: &Arc<Grammar>,
|
||||
) -> Result<Vec<CodeContextMatch>> {
|
||||
let embedding_config = grammar
|
||||
.embedding_config
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no embedding queries"))?;
|
||||
self.parser.set_language(grammar.ts_language).unwrap();
|
||||
|
||||
let tree = self
|
||||
.parser
|
||||
.parse(&content, None)
|
||||
.ok_or_else(|| anyhow!("parsing failed"))?;
|
||||
|
||||
let mut captures: Vec<CodeContextMatch> = Vec::new();
|
||||
let mut collapse_ranges: Vec<Range<usize>> = Vec::new();
|
||||
let mut keep_ranges: Vec<Range<usize>> = Vec::new();
|
||||
for mat in self.cursor.matches(
|
||||
&embedding_config.query,
|
||||
tree.root_node(),
|
||||
content.as_bytes(),
|
||||
) {
|
||||
let mut start_col = 0;
|
||||
let mut item_range: Option<Range<usize>> = None;
|
||||
let mut name_range: Option<Range<usize>> = None;
|
||||
let mut context_ranges: Vec<Range<usize>> = Vec::new();
|
||||
collapse_ranges.clear();
|
||||
keep_ranges.clear();
|
||||
for capture in mat.captures {
|
||||
if capture.index == embedding_config.item_capture_ix {
|
||||
item_range = Some(capture.node.byte_range());
|
||||
start_col = capture.node.start_position().column;
|
||||
} else if Some(capture.index) == embedding_config.name_capture_ix {
|
||||
name_range = Some(capture.node.byte_range());
|
||||
} else if Some(capture.index) == embedding_config.context_capture_ix {
|
||||
context_ranges.push(capture.node.byte_range());
|
||||
} else if Some(capture.index) == embedding_config.collapse_capture_ix {
|
||||
collapse_ranges.push(capture.node.byte_range());
|
||||
} else if Some(capture.index) == embedding_config.keep_capture_ix {
|
||||
keep_ranges.push(capture.node.byte_range());
|
||||
}
|
||||
}
|
||||
|
||||
captures.push(CodeContextMatch {
|
||||
start_col,
|
||||
item_range,
|
||||
name_range,
|
||||
context_ranges,
|
||||
collapse_ranges: subtract_ranges(&collapse_ranges, &keep_ranges),
|
||||
});
|
||||
}
|
||||
Ok(captures)
|
||||
}
|
||||
|
||||
pub fn parse_file_with_template(
|
||||
&mut self,
|
||||
relative_path: Option<&Path>,
|
||||
content: &str,
|
||||
language: Arc<Language>,
|
||||
) -> Result<Vec<Span>> {
|
||||
let language_name = language.name();
|
||||
|
||||
if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
|
||||
return self.parse_entire_file(relative_path, language_name, &content);
|
||||
} else if ["Markdown", "Plain Text"].contains(&language_name.as_ref()) {
|
||||
return self.parse_markdown_file(relative_path, &content);
|
||||
}
|
||||
|
||||
let mut spans = self.parse_file(content, language)?;
|
||||
for span in &mut spans {
|
||||
let document_content = CODE_CONTEXT_TEMPLATE
|
||||
.replace(
|
||||
"<path>",
|
||||
&relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
|
||||
)
|
||||
.replace("<language>", language_name.as_ref())
|
||||
.replace("item", &span.content);
|
||||
|
||||
let model = self.embedding_provider.base_model();
|
||||
let document_content = model.truncate(
|
||||
&document_content,
|
||||
model.capacity()?,
|
||||
TruncationDirection::End,
|
||||
)?;
|
||||
let token_count = model.count_tokens(&document_content)?;
|
||||
|
||||
span.content = document_content;
|
||||
span.token_count = token_count;
|
||||
}
|
||||
Ok(spans)
|
||||
}
|
||||
|
||||
pub fn parse_file(&mut self, content: &str, language: Arc<Language>) -> Result<Vec<Span>> {
|
||||
let grammar = language
|
||||
.grammar()
|
||||
.ok_or_else(|| anyhow!("no grammar for language"))?;
|
||||
|
||||
// Iterate through query matches
|
||||
let matches = self.get_matches_in_file(content, grammar)?;
|
||||
|
||||
let language_scope = language.default_scope();
|
||||
let placeholder = language_scope.collapsed_placeholder();
|
||||
|
||||
let mut spans = Vec::new();
|
||||
let mut collapsed_ranges_within = Vec::new();
|
||||
let mut parsed_name_ranges = HashSet::new();
|
||||
for (i, context_match) in matches.iter().enumerate() {
|
||||
// Items which are collapsible but not embeddable have no item range
|
||||
let item_range = if let Some(item_range) = context_match.item_range.clone() {
|
||||
item_range
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// Checks for deduplication
|
||||
let name;
|
||||
if let Some(name_range) = context_match.name_range.clone() {
|
||||
name = content
|
||||
.get(name_range.clone())
|
||||
.map_or(String::new(), |s| s.to_string());
|
||||
if parsed_name_ranges.contains(&name_range) {
|
||||
continue;
|
||||
}
|
||||
parsed_name_ranges.insert(name_range);
|
||||
} else {
|
||||
name = String::new();
|
||||
}
|
||||
|
||||
collapsed_ranges_within.clear();
|
||||
'outer: for remaining_match in &matches[(i + 1)..] {
|
||||
for collapsed_range in &remaining_match.collapse_ranges {
|
||||
if item_range.start <= collapsed_range.start
|
||||
&& item_range.end >= collapsed_range.end
|
||||
{
|
||||
collapsed_ranges_within.push(collapsed_range.clone());
|
||||
} else {
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
collapsed_ranges_within.sort_by_key(|r| (r.start, Reverse(r.end)));
|
||||
|
||||
let mut span_content = String::new();
|
||||
for context_range in &context_match.context_ranges {
|
||||
add_content_from_range(
|
||||
&mut span_content,
|
||||
content,
|
||||
context_range.clone(),
|
||||
context_match.start_col,
|
||||
);
|
||||
span_content.push_str("\n");
|
||||
}
|
||||
|
||||
let mut offset = item_range.start;
|
||||
for collapsed_range in &collapsed_ranges_within {
|
||||
if collapsed_range.start > offset {
|
||||
add_content_from_range(
|
||||
&mut span_content,
|
||||
content,
|
||||
offset..collapsed_range.start,
|
||||
context_match.start_col,
|
||||
);
|
||||
offset = collapsed_range.start;
|
||||
}
|
||||
|
||||
if collapsed_range.end > offset {
|
||||
span_content.push_str(placeholder);
|
||||
offset = collapsed_range.end;
|
||||
}
|
||||
}
|
||||
|
||||
if offset < item_range.end {
|
||||
add_content_from_range(
|
||||
&mut span_content,
|
||||
content,
|
||||
offset..item_range.end,
|
||||
context_match.start_col,
|
||||
);
|
||||
}
|
||||
|
||||
let sha1 = SpanDigest::from(span_content.as_str());
|
||||
spans.push(Span {
|
||||
name,
|
||||
content: span_content,
|
||||
range: item_range.clone(),
|
||||
embedding: None,
|
||||
digest: sha1,
|
||||
token_count: 0,
|
||||
})
|
||||
}
|
||||
|
||||
return Ok(spans);
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn subtract_ranges(
|
||||
ranges: &[Range<usize>],
|
||||
ranges_to_subtract: &[Range<usize>],
|
||||
) -> Vec<Range<usize>> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
let mut ranges_to_subtract = ranges_to_subtract.iter().peekable();
|
||||
|
||||
for range in ranges {
|
||||
let mut offset = range.start;
|
||||
|
||||
while offset < range.end {
|
||||
if let Some(range_to_subtract) = ranges_to_subtract.peek() {
|
||||
if offset < range_to_subtract.start {
|
||||
let next_offset = cmp::min(range_to_subtract.start, range.end);
|
||||
result.push(offset..next_offset);
|
||||
offset = next_offset;
|
||||
} else {
|
||||
let next_offset = cmp::min(range_to_subtract.end, range.end);
|
||||
offset = next_offset;
|
||||
}
|
||||
|
||||
if offset >= range_to_subtract.end {
|
||||
ranges_to_subtract.next();
|
||||
}
|
||||
} else {
|
||||
result.push(offset..range.end);
|
||||
offset = range.end;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn add_content_from_range(
|
||||
output: &mut String,
|
||||
content: &str,
|
||||
range: Range<usize>,
|
||||
start_col: usize,
|
||||
) {
|
||||
for mut line in content.get(range.clone()).unwrap_or("").lines() {
|
||||
for _ in 0..start_col {
|
||||
if line.starts_with(' ') {
|
||||
line = &line[1..];
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
output.push_str(line);
|
||||
output.push('\n');
|
||||
}
|
||||
output.pop();
|
||||
}
|
1280
crates/semantic_index2/src/semantic_index.rs
Normal file
1280
crates/semantic_index2/src/semantic_index.rs
Normal file
File diff suppressed because it is too large
Load Diff
28
crates/semantic_index2/src/semantic_index_settings.rs
Normal file
28
crates/semantic_index2/src/semantic_index_settings.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use anyhow;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct SemanticIndexSettings {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct SemanticIndexSettingsContent {
|
||||
pub enabled: Option<bool>,
|
||||
}
|
||||
|
||||
impl Settings for SemanticIndexSettings {
|
||||
const KEY: Option<&'static str> = Some("semantic_index");
|
||||
|
||||
type FileContent = SemanticIndexSettingsContent;
|
||||
|
||||
fn load(
|
||||
default_value: &Self::FileContent,
|
||||
user_values: &[&Self::FileContent],
|
||||
_: &mut gpui::AppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Self::load_via_json_merge(default_value, user_values)
|
||||
}
|
||||
}
|
1697
crates/semantic_index2/src/semantic_index_tests.rs
Normal file
1697
crates/semantic_index2/src/semantic_index_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,5 @@
|
||||
mod auto_height_editor;
|
||||
mod cursor;
|
||||
mod focus;
|
||||
mod kitchen_sink;
|
||||
mod picker;
|
||||
@ -7,6 +8,7 @@ mod text;
|
||||
mod z_index;
|
||||
|
||||
pub use auto_height_editor::*;
|
||||
pub use cursor::*;
|
||||
pub use focus::*;
|
||||
pub use kitchen_sink::*;
|
||||
pub use picker::*;
|
||||
|
111
crates/storybook2/src/stories/cursor.rs
Normal file
111
crates/storybook2/src/stories/cursor.rs
Normal file
@ -0,0 +1,111 @@
|
||||
use gpui::{Div, Render, Stateful};
|
||||
use story::Story;
|
||||
use ui::prelude::*;
|
||||
|
||||
pub struct CursorStory;
|
||||
|
||||
impl Render for CursorStory {
|
||||
type Element = Div;
|
||||
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> Self::Element {
|
||||
let all_cursors: [(&str, Box<dyn Fn(Stateful<Div>) -> Stateful<Div>>); 19] = [
|
||||
(
|
||||
"cursor_default",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_default()),
|
||||
),
|
||||
(
|
||||
"cursor_pointer",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_pointer()),
|
||||
),
|
||||
(
|
||||
"cursor_text",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_text()),
|
||||
),
|
||||
(
|
||||
"cursor_move",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_move()),
|
||||
),
|
||||
(
|
||||
"cursor_not_allowed",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_not_allowed()),
|
||||
),
|
||||
(
|
||||
"cursor_context_menu",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_context_menu()),
|
||||
),
|
||||
(
|
||||
"cursor_crosshair",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_crosshair()),
|
||||
),
|
||||
(
|
||||
"cursor_vertical_text",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_vertical_text()),
|
||||
),
|
||||
(
|
||||
"cursor_alias",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_alias()),
|
||||
),
|
||||
(
|
||||
"cursor_copy",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_copy()),
|
||||
),
|
||||
(
|
||||
"cursor_no_drop",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_no_drop()),
|
||||
),
|
||||
(
|
||||
"cursor_grab",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_grab()),
|
||||
),
|
||||
(
|
||||
"cursor_grabbing",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_grabbing()),
|
||||
),
|
||||
(
|
||||
"cursor_col_resize",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_col_resize()),
|
||||
),
|
||||
(
|
||||
"cursor_row_resize",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_row_resize()),
|
||||
),
|
||||
(
|
||||
"cursor_n_resize",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_n_resize()),
|
||||
),
|
||||
(
|
||||
"cursor_e_resize",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_e_resize()),
|
||||
),
|
||||
(
|
||||
"cursor_s_resize",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_s_resize()),
|
||||
),
|
||||
(
|
||||
"cursor_w_resize",
|
||||
Box::new(|el: Stateful<Div>| el.cursor_w_resize()),
|
||||
),
|
||||
];
|
||||
|
||||
Story::container()
|
||||
.flex()
|
||||
.gap_1()
|
||||
.child(Story::title("cursor"))
|
||||
.children(all_cursors.map(|(name, apply_cursor)| {
|
||||
div().gap_1().flex().text_color(gpui::white()).child(
|
||||
div()
|
||||
.flex()
|
||||
.items_center()
|
||||
.justify_center()
|
||||
.id(name)
|
||||
.map(apply_cursor)
|
||||
.w_64()
|
||||
.h_8()
|
||||
.bg(gpui::red())
|
||||
.active(|style| style.bg(gpui::green()))
|
||||
.text_sm()
|
||||
.child(Story::label(name)),
|
||||
)
|
||||
}))
|
||||
}
|
||||
}
|
@ -17,6 +17,7 @@ pub enum ComponentStory {
|
||||
Button,
|
||||
Checkbox,
|
||||
ContextMenu,
|
||||
Cursor,
|
||||
Disclosure,
|
||||
Focus,
|
||||
Icon,
|
||||
@ -40,6 +41,7 @@ impl ComponentStory {
|
||||
Self::Button => cx.build_view(|_| ui::ButtonStory).into(),
|
||||
Self::Checkbox => cx.build_view(|_| ui::CheckboxStory).into(),
|
||||
Self::ContextMenu => cx.build_view(|_| ui::ContextMenuStory).into(),
|
||||
Self::Cursor => cx.build_view(|_| crate::stories::CursorStory).into(),
|
||||
Self::Disclosure => cx.build_view(|_| ui::DisclosureStory).into(),
|
||||
Self::Focus => FocusStory::view(cx).into(),
|
||||
Self::Icon => cx.build_view(|_| ui::IconStory).into(),
|
||||
|
@ -1132,6 +1132,7 @@ mod tests {
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
(wt, entry)
|
||||
|
@ -300,11 +300,8 @@ impl TerminalView {
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.context_menu = Some(ContextMenu::build(cx, |menu, cx| {
|
||||
menu.action("Clear", Box::new(Clear), cx).action(
|
||||
"Close",
|
||||
Box::new(CloseActiveItem { save_intent: None }),
|
||||
cx,
|
||||
)
|
||||
menu.action("Clear", Box::new(Clear))
|
||||
.action("Close", Box::new(CloseActiveItem { save_intent: None }))
|
||||
}));
|
||||
// todo!()
|
||||
// self.context_menu
|
||||
@ -1172,6 +1169,7 @@ mod tests {
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
(wt, entry)
|
||||
|
@ -52,13 +52,13 @@ pub(crate) fn one_dark() -> Theme {
|
||||
element_hover: hsla(225.0 / 360., 11.8 / 100., 26.7 / 100., 1.0),
|
||||
element_active: hsla(220.0 / 360., 11.8 / 100., 20.0 / 100., 1.0),
|
||||
element_selected: hsla(224.0 / 360., 11.3 / 100., 26.1 / 100., 1.0),
|
||||
element_disabled: hsla(224.0 / 360., 11.3 / 100., 26.1 / 100., 1.0),
|
||||
element_disabled: SystemColors::default().transparent,
|
||||
drop_target_background: hsla(220.0 / 360., 8.3 / 100., 21.4 / 100., 1.0),
|
||||
ghost_element_background: SystemColors::default().transparent,
|
||||
ghost_element_hover: hsla(225.0 / 360., 11.8 / 100., 26.7 / 100., 1.0),
|
||||
ghost_element_active: hsla(220.0 / 360., 11.8 / 100., 20.0 / 100., 1.0),
|
||||
ghost_element_selected: hsla(224.0 / 360., 11.3 / 100., 26.1 / 100., 1.0),
|
||||
ghost_element_disabled: hsla(224.0 / 360., 11.3 / 100., 26.1 / 100., 1.0),
|
||||
ghost_element_disabled: SystemColors::default().transparent,
|
||||
text: hsla(221. / 360., 11. / 100., 86. / 100., 1.0),
|
||||
text_muted: hsla(218.0 / 360., 7. / 100., 46. / 100., 1.0),
|
||||
text_placeholder: hsla(220.0 / 360., 6.6 / 100., 44.5 / 100., 1.0),
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user