mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-28 07:22:17 +03:00
Merge branch 'main' into storybook
This commit is contained in:
commit
37ef28a3bf
72
Cargo.lock
generated
72
Cargo.lock
generated
@ -1453,9 +1453,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.19.0"
|
||||
version = "0.20.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"async-tungstenite",
|
||||
"audio",
|
||||
"axum",
|
||||
@ -3570,7 +3571,7 @@ dependencies = [
|
||||
"gif",
|
||||
"jpeg-decoder",
|
||||
"num-iter",
|
||||
"num-rational",
|
||||
"num-rational 0.3.2",
|
||||
"num-traits",
|
||||
"png",
|
||||
"scoped_threadpool",
|
||||
@ -4613,6 +4614,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"async-compression",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"futures 0.3.28",
|
||||
"gpui",
|
||||
"log",
|
||||
@ -4662,6 +4664,31 @@ dependencies = [
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8536030f9fea7127f841b45bb6243b27255787fb4eb83958aa1ef9d2fdc0c36"
|
||||
dependencies = [
|
||||
"num-bigint 0.2.6",
|
||||
"num-complex",
|
||||
"num-integer",
|
||||
"num-iter",
|
||||
"num-rational 0.2.4",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.4"
|
||||
@ -4690,6 +4717,16 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6b19411a9719e753aff12e5187b74d60d3dc449ec3f4dc21e3989c3f554bc95"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-derive"
|
||||
version = "0.3.3"
|
||||
@ -4722,6 +4759,18 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-rational"
|
||||
version = "0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-bigint 0.2.6",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-rational"
|
||||
version = "0.3.2"
|
||||
@ -5038,6 +5087,17 @@ dependencies = [
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse_duration"
|
||||
version = "2.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7037e5e93e0172a5a96874380bf73bc6ecef022e26fa25f2be26864d6b3ba95d"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"num",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "password-hash"
|
||||
version = "0.2.3"
|
||||
@ -6666,6 +6726,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"collections",
|
||||
"ctor",
|
||||
"editor",
|
||||
"env_logger 0.9.3",
|
||||
@ -6678,6 +6739,7 @@ dependencies = [
|
||||
"log",
|
||||
"matrixmultiply",
|
||||
"parking_lot 0.11.2",
|
||||
"parse_duration",
|
||||
"picker",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
@ -7009,7 +7071,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8eb4ea60fb301dc81dfc113df680571045d375ab7345d171c5dc7d7e13107a80"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"num-bigint",
|
||||
"num-bigint 0.4.4",
|
||||
"num-traits",
|
||||
"thiserror",
|
||||
]
|
||||
@ -7241,7 +7303,7 @@ dependencies = [
|
||||
"log",
|
||||
"md-5",
|
||||
"memchr",
|
||||
"num-bigint",
|
||||
"num-bigint 0.4.4",
|
||||
"once_cell",
|
||||
"paste",
|
||||
"percent-encoding",
|
||||
@ -9720,7 +9782,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.103.0"
|
||||
version = "0.104.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"ai",
|
||||
|
@ -515,6 +515,17 @@
|
||||
"enter": "editor::ConfirmCodeAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && (showing_code_actions || showing_completions)",
|
||||
"bindings": {
|
||||
"up": "editor::ContextMenuPrev",
|
||||
"ctrl-p": "editor::ContextMenuPrev",
|
||||
"down": "editor::ContextMenuNext",
|
||||
"ctrl-n": "editor::ContextMenuNext",
|
||||
"pageup": "editor::ContextMenuFirst",
|
||||
"pagedown": "editor::ContextMenuLast"
|
||||
}
|
||||
},
|
||||
// Custom bindings
|
||||
{
|
||||
"bindings": {
|
||||
|
@ -371,6 +371,7 @@
|
||||
"Replace"
|
||||
],
|
||||
"s": "vim::Substitute",
|
||||
"shift-s": "vim::SubstituteLine",
|
||||
"> >": "editor::Indent",
|
||||
"< <": "editor::Outdent",
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
@ -446,6 +447,7 @@
|
||||
}
|
||||
],
|
||||
"s": "vim::Substitute",
|
||||
"shift-s": "vim::SubstituteLine",
|
||||
"c": "vim::Substitute",
|
||||
"~": "vim::ChangeCase",
|
||||
"shift-i": [
|
||||
|
@ -273,7 +273,13 @@ impl ActiveCall {
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
Self::report_call_event_for_room("decline incoming", call.room_id, None, &self.client, cx);
|
||||
Self::report_call_event_for_room(
|
||||
"decline incoming",
|
||||
Some(call.room_id),
|
||||
None,
|
||||
&self.client,
|
||||
cx,
|
||||
);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
})?;
|
||||
@ -404,21 +410,19 @@ impl ActiveCall {
|
||||
}
|
||||
|
||||
fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
|
||||
if let Some(room) = self.room() {
|
||||
let room = room.read(cx);
|
||||
Self::report_call_event_for_room(
|
||||
operation,
|
||||
room.id(),
|
||||
room.channel_id(),
|
||||
&self.client,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
let (room_id, channel_id) = match self.room() {
|
||||
Some(room) => {
|
||||
let room = room.read(cx);
|
||||
(Some(room.id()), room.channel_id())
|
||||
}
|
||||
None => (None, None),
|
||||
};
|
||||
Self::report_call_event_for_room(operation, room_id, channel_id, &self.client, cx)
|
||||
}
|
||||
|
||||
pub fn report_call_event_for_room(
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
room_id: Option<u64>,
|
||||
channel_id: Option<u64>,
|
||||
client: &Arc<Client>,
|
||||
cx: &AppContext,
|
||||
|
@ -10,6 +10,7 @@ pub(crate) fn init(client: &Arc<Client>) {
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
|
||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborator);
|
||||
}
|
||||
|
||||
pub struct ChannelBuffer {
|
||||
@ -17,6 +18,7 @@ pub struct ChannelBuffer {
|
||||
connected: bool,
|
||||
collaborators: Vec<proto::Collaborator>,
|
||||
buffer: ModelHandle<language::Buffer>,
|
||||
buffer_epoch: u64,
|
||||
client: Arc<Client>,
|
||||
subscription: Option<client::Subscription>,
|
||||
}
|
||||
@ -73,6 +75,7 @@ impl ChannelBuffer {
|
||||
|
||||
Self {
|
||||
buffer,
|
||||
buffer_epoch: response.epoch,
|
||||
client,
|
||||
connected: true,
|
||||
collaborators,
|
||||
@ -82,6 +85,26 @@ impl ChannelBuffer {
|
||||
}))
|
||||
}
|
||||
|
||||
pub(crate) fn replace_collaborators(
|
||||
&mut self,
|
||||
collaborators: Vec<proto::Collaborator>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
for old_collaborator in &self.collaborators {
|
||||
if collaborators
|
||||
.iter()
|
||||
.any(|c| c.replica_id == old_collaborator.replica_id)
|
||||
{
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.remove_peer(old_collaborator.replica_id as u16, cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
self.collaborators = collaborators;
|
||||
cx.emit(Event::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
async fn handle_update_channel_buffer(
|
||||
this: ModelHandle<Self>,
|
||||
update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
|
||||
@ -149,6 +172,26 @@ impl ChannelBuffer {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_update_channel_buffer_collaborator(
|
||||
this: ModelHandle<Self>,
|
||||
message: TypedEnvelope<proto::UpdateChannelBufferCollaborator>,
|
||||
_: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
for collaborator in &mut this.collaborators {
|
||||
if collaborator.peer_id == message.payload.old_peer_id {
|
||||
collaborator.peer_id = message.payload.new_peer_id;
|
||||
break;
|
||||
}
|
||||
}
|
||||
cx.emit(Event::CollaboratorsChanged);
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn on_buffer_update(
|
||||
&mut self,
|
||||
_: ModelHandle<language::Buffer>,
|
||||
@ -166,6 +209,10 @@ impl ChannelBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn epoch(&self) -> u64 {
|
||||
self.buffer_epoch
|
||||
}
|
||||
|
||||
pub fn buffer(&self) -> ModelHandle<language::Buffer> {
|
||||
self.buffer.clone()
|
||||
}
|
||||
@ -179,6 +226,7 @@ impl ChannelBuffer {
|
||||
}
|
||||
|
||||
pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||
log::info!("channel buffer {} disconnected", self.channel.id);
|
||||
if self.connected {
|
||||
self.connected = false;
|
||||
self.subscription.take();
|
||||
|
@ -1,13 +1,15 @@
|
||||
use crate::channel_buffer::ChannelBuffer;
|
||||
use anyhow::{anyhow, Result};
|
||||
use client::{Client, Status, Subscription, User, UserId, UserStore};
|
||||
use client::{Client, Subscription, User, UserId, UserStore};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
|
||||
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
|
||||
use rpc::{proto, TypedEnvelope};
|
||||
use std::sync::Arc;
|
||||
use std::{mem, sync::Arc, time::Duration};
|
||||
use util::ResultExt;
|
||||
|
||||
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||
|
||||
pub type ChannelId = u64;
|
||||
|
||||
pub struct ChannelStore {
|
||||
@ -22,7 +24,8 @@ pub struct ChannelStore {
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
_rpc_subscription: Subscription,
|
||||
_watch_connection_status: Task<()>,
|
||||
_watch_connection_status: Task<Option<()>>,
|
||||
disconnect_channel_buffers_task: Option<Task<()>>,
|
||||
_update_channels: Task<()>,
|
||||
}
|
||||
|
||||
@ -67,24 +70,20 @@ impl ChannelStore {
|
||||
let rpc_subscription =
|
||||
client.add_message_handler(cx.handle(), Self::handle_update_channels);
|
||||
|
||||
let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
|
||||
let mut connection_status = client.status();
|
||||
let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
|
||||
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Some(status) = connection_status.next().await {
|
||||
if !status.is_connected() {
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if matches!(status, Status::ConnectionLost | Status::SignedOut) {
|
||||
this.handle_disconnect(cx);
|
||||
} else {
|
||||
this.disconnect_buffers(cx);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
let this = this.upgrade(&cx)?;
|
||||
if status.is_connected() {
|
||||
this.update(&mut cx, |this, cx| this.handle_connect(cx))
|
||||
.await
|
||||
.log_err()?;
|
||||
} else {
|
||||
this.update(&mut cx, |this, cx| this.handle_disconnect(cx));
|
||||
}
|
||||
}
|
||||
Some(())
|
||||
});
|
||||
|
||||
Self {
|
||||
@ -100,6 +99,7 @@ impl ChannelStore {
|
||||
user_store,
|
||||
_rpc_subscription: rpc_subscription,
|
||||
_watch_connection_status: watch_connection_status,
|
||||
disconnect_channel_buffers_task: None,
|
||||
_update_channels: cx.spawn_weak(|this, mut cx| async move {
|
||||
while let Some(update_channels) = update_channels_rx.next().await {
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
@ -152,6 +152,15 @@ impl ChannelStore {
|
||||
self.channels_by_id.get(&channel_id)
|
||||
}
|
||||
|
||||
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, cx: &AppContext) -> bool {
|
||||
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
|
||||
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||
return buffer.upgrade(cx).is_some();
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub fn open_channel_buffer(
|
||||
&mut self,
|
||||
channel_id: ChannelId,
|
||||
@ -482,8 +491,106 @@ impl ChannelStore {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_disconnect(&mut self, cx: &mut ModelContext<'_, ChannelStore>) {
|
||||
self.disconnect_buffers(cx);
|
||||
fn handle_connect(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
self.disconnect_channel_buffers_task.take();
|
||||
|
||||
let mut buffer_versions = Vec::new();
|
||||
for buffer in self.opened_buffers.values() {
|
||||
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
let channel_buffer = buffer.read(cx);
|
||||
let buffer = channel_buffer.buffer().read(cx);
|
||||
buffer_versions.push(proto::ChannelBufferVersion {
|
||||
channel_id: channel_buffer.channel().id,
|
||||
epoch: channel_buffer.epoch(),
|
||||
version: language::proto::serialize_version(&buffer.version()),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if buffer_versions.is_empty() {
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let response = self.client.request(proto::RejoinChannelBuffers {
|
||||
buffers: buffer_versions,
|
||||
});
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let mut response = response.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.opened_buffers.retain(|_, buffer| match buffer {
|
||||
OpenedChannelBuffer::Open(channel_buffer) => {
|
||||
let Some(channel_buffer) = channel_buffer.upgrade(cx) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
channel_buffer.update(cx, |channel_buffer, cx| {
|
||||
let channel_id = channel_buffer.channel().id;
|
||||
if let Some(remote_buffer) = response
|
||||
.buffers
|
||||
.iter_mut()
|
||||
.find(|buffer| buffer.channel_id == channel_id)
|
||||
{
|
||||
let channel_id = channel_buffer.channel().id;
|
||||
let remote_version =
|
||||
language::proto::deserialize_version(&remote_buffer.version);
|
||||
|
||||
channel_buffer.replace_collaborators(
|
||||
mem::take(&mut remote_buffer.collaborators),
|
||||
cx,
|
||||
);
|
||||
|
||||
let operations = channel_buffer
|
||||
.buffer()
|
||||
.update(cx, |buffer, cx| {
|
||||
let outgoing_operations =
|
||||
buffer.serialize_ops(Some(remote_version), cx);
|
||||
let incoming_operations =
|
||||
mem::take(&mut remote_buffer.operations)
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_operation)
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
buffer.apply_ops(incoming_operations, cx)?;
|
||||
anyhow::Ok(outgoing_operations)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
if let Some(operations) = operations {
|
||||
let client = this.client.clone();
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
let operations = operations.await;
|
||||
for chunk in
|
||||
language::proto::split_operations(operations)
|
||||
{
|
||||
client
|
||||
.send(proto::UpdateChannelBuffer {
|
||||
channel_id,
|
||||
operations: chunk,
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
channel_buffer.disconnect(cx);
|
||||
false
|
||||
})
|
||||
}
|
||||
OpenedChannelBuffer::Loading(_) => true,
|
||||
});
|
||||
});
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||
self.channels_by_id.clear();
|
||||
self.channel_invitations.clear();
|
||||
self.channel_participants.clear();
|
||||
@ -491,16 +598,23 @@ impl ChannelStore {
|
||||
self.channel_paths.clear();
|
||||
self.outgoing_invites.clear();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn disconnect_buffers(&mut self, cx: &mut ModelContext<ChannelStore>) {
|
||||
for (_, buffer) in self.opened_buffers.drain() {
|
||||
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
|
||||
self.disconnect_channel_buffers_task.get_or_insert_with(|| {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
cx.background().timer(RECONNECT_TIMEOUT).await;
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
for (_, buffer) in this.opened_buffers.drain() {
|
||||
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn update_channels(
|
||||
|
@ -1011,9 +1011,9 @@ impl Client {
|
||||
credentials: &Credentials,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Task<Result<Connection, EstablishConnectionError>> {
|
||||
let is_preview = cx.read(|cx| {
|
||||
let use_preview_server = cx.read(|cx| {
|
||||
if cx.has_global::<ReleaseChannel>() {
|
||||
*cx.global::<ReleaseChannel>() == ReleaseChannel::Preview
|
||||
*cx.global::<ReleaseChannel>() != ReleaseChannel::Stable
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@ -1028,7 +1028,7 @@ impl Client {
|
||||
|
||||
let http = self.http.clone();
|
||||
cx.background().spawn(async move {
|
||||
let mut rpc_url = Self::get_rpc_url(http, is_preview).await?;
|
||||
let mut rpc_url = Self::get_rpc_url(http, use_preview_server).await?;
|
||||
let rpc_host = rpc_url
|
||||
.host_str()
|
||||
.zip(rpc_url.port_or_known_default())
|
||||
|
@ -73,7 +73,7 @@ pub enum ClickhouseEvent {
|
||||
},
|
||||
Call {
|
||||
operation: &'static str,
|
||||
room_id: u64,
|
||||
room_id: Option<u64>,
|
||||
channel_id: Option<u64>,
|
||||
},
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.19.0"
|
||||
version = "0.20.0"
|
||||
publish = false
|
||||
|
||||
[[bin]]
|
||||
@ -80,6 +80,7 @@ theme = { path = "../theme" }
|
||||
workspace = { path = "../workspace", features = ["test-support"] }
|
||||
collab_ui = { path = "../collab_ui", features = ["test-support"] }
|
||||
|
||||
async-trait.workspace = true
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
indoc.workspace = true
|
||||
|
@ -435,6 +435,12 @@ pub struct ChannelsForUser {
|
||||
pub channels_with_admin_privileges: HashSet<ChannelId>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RejoinedChannelBuffer {
|
||||
pub buffer: proto::RejoinedChannelBuffer,
|
||||
pub old_connection_id: ConnectionId,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct JoinRoom {
|
||||
pub room: proto::Room,
|
||||
@ -498,6 +504,11 @@ pub struct RefreshedRoom {
|
||||
pub canceled_calls_to_user_ids: Vec<UserId>,
|
||||
}
|
||||
|
||||
pub struct RefreshedChannelBuffer {
|
||||
pub connection_ids: Vec<ConnectionId>,
|
||||
pub removed_collaborators: Vec<proto::RemoveChannelBufferCollaborator>,
|
||||
}
|
||||
|
||||
pub struct Project {
|
||||
pub collaborators: Vec<ProjectCollaborator>,
|
||||
pub worktrees: BTreeMap<u64, Worktree>,
|
||||
|
@ -10,8 +10,6 @@ impl Database {
|
||||
connection: ConnectionId,
|
||||
) -> Result<proto::JoinChannelBufferResponse> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = tx;
|
||||
|
||||
self.check_user_is_channel_member(channel_id, user_id, &tx)
|
||||
.await?;
|
||||
|
||||
@ -70,7 +68,6 @@ impl Database {
|
||||
.await?;
|
||||
collaborators.push(collaborator);
|
||||
|
||||
// Assemble the buffer state
|
||||
let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?;
|
||||
|
||||
Ok(proto::JoinChannelBufferResponse {
|
||||
@ -78,6 +75,7 @@ impl Database {
|
||||
replica_id: replica_id.to_proto() as u32,
|
||||
base_text,
|
||||
operations,
|
||||
epoch: buffer.epoch as u64,
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| proto::Collaborator {
|
||||
@ -91,6 +89,154 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn rejoin_channel_buffers(
|
||||
&self,
|
||||
buffers: &[proto::ChannelBufferVersion],
|
||||
user_id: UserId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<Vec<RejoinedChannelBuffer>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut results = Vec::new();
|
||||
for client_buffer in buffers {
|
||||
let channel_id = ChannelId::from_proto(client_buffer.channel_id);
|
||||
if self
|
||||
.check_user_is_channel_member(channel_id, user_id, &*tx)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
log::info!("user is not a member of channel");
|
||||
continue;
|
||||
}
|
||||
|
||||
let buffer = self.get_channel_buffer(channel_id, &*tx).await?;
|
||||
let mut collaborators = channel_buffer_collaborator::Entity::find()
|
||||
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
// If the buffer epoch hasn't changed since the client lost
|
||||
// connection, then the client's buffer can be syncronized with
|
||||
// the server's buffer.
|
||||
if buffer.epoch as u64 != client_buffer.epoch {
|
||||
log::info!("can't rejoin buffer, epoch has changed");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find the collaborator record for this user's previous lost
|
||||
// connection. Update it with the new connection id.
|
||||
let server_id = ServerId(connection_id.owner_id as i32);
|
||||
let Some(self_collaborator) = collaborators.iter_mut().find(|c| {
|
||||
c.user_id == user_id
|
||||
&& (c.connection_lost || c.connection_server_id != server_id)
|
||||
}) else {
|
||||
log::info!("can't rejoin buffer, no previous collaborator found");
|
||||
continue;
|
||||
};
|
||||
let old_connection_id = self_collaborator.connection();
|
||||
*self_collaborator = channel_buffer_collaborator::ActiveModel {
|
||||
id: ActiveValue::Unchanged(self_collaborator.id),
|
||||
connection_id: ActiveValue::Set(connection_id.id as i32),
|
||||
connection_server_id: ActiveValue::Set(ServerId(connection_id.owner_id as i32)),
|
||||
connection_lost: ActiveValue::Set(false),
|
||||
..Default::default()
|
||||
}
|
||||
.update(&*tx)
|
||||
.await?;
|
||||
|
||||
let client_version = version_from_wire(&client_buffer.version);
|
||||
let serialization_version = self
|
||||
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
|
||||
.await?;
|
||||
|
||||
let mut rows = buffer_operation::Entity::find()
|
||||
.filter(
|
||||
buffer_operation::Column::BufferId
|
||||
.eq(buffer.id)
|
||||
.and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
// Find the server's version vector and any operations
|
||||
// that the client has not seen.
|
||||
let mut server_version = clock::Global::new();
|
||||
let mut operations = Vec::new();
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
let timestamp = clock::Lamport {
|
||||
replica_id: row.replica_id as u16,
|
||||
value: row.lamport_timestamp as u32,
|
||||
};
|
||||
server_version.observe(timestamp);
|
||||
if !client_version.observed(timestamp) {
|
||||
operations.push(proto::Operation {
|
||||
variant: Some(operation_from_storage(row, serialization_version)?),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
results.push(RejoinedChannelBuffer {
|
||||
old_connection_id,
|
||||
buffer: proto::RejoinedChannelBuffer {
|
||||
channel_id: client_buffer.channel_id,
|
||||
version: version_to_wire(&server_version),
|
||||
operations,
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| proto::Collaborator {
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn clear_stale_channel_buffer_collaborators(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
server_id: ServerId,
|
||||
) -> Result<RefreshedChannelBuffer> {
|
||||
self.transaction(|tx| async move {
|
||||
let collaborators = channel_buffer_collaborator::Entity::find()
|
||||
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut connection_ids = Vec::new();
|
||||
let mut removed_collaborators = Vec::new();
|
||||
let mut collaborator_ids_to_remove = Vec::new();
|
||||
for collaborator in &collaborators {
|
||||
if !collaborator.connection_lost && collaborator.connection_server_id == server_id {
|
||||
connection_ids.push(collaborator.connection());
|
||||
} else {
|
||||
removed_collaborators.push(proto::RemoveChannelBufferCollaborator {
|
||||
channel_id: channel_id.to_proto(),
|
||||
peer_id: Some(collaborator.connection().into()),
|
||||
});
|
||||
collaborator_ids_to_remove.push(collaborator.id);
|
||||
}
|
||||
}
|
||||
|
||||
channel_buffer_collaborator::Entity::delete_many()
|
||||
.filter(channel_buffer_collaborator::Column::Id.is_in(collaborator_ids_to_remove))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(RefreshedChannelBuffer {
|
||||
connection_ids,
|
||||
removed_collaborators,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_channel_buffer(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@ -103,6 +249,39 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_channel_buffers(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||
enum QueryChannelIds {
|
||||
ChannelId,
|
||||
}
|
||||
|
||||
let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
|
||||
.select_only()
|
||||
.column(channel_buffer_collaborator::Column::ChannelId)
|
||||
.filter(Condition::all().add(
|
||||
channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
|
||||
))
|
||||
.into_values::<_, QueryChannelIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for channel_id in channel_ids {
|
||||
let collaborators = self
|
||||
.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
||||
.await?;
|
||||
result.push((channel_id, collaborators));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_channel_buffer_internal(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@ -143,45 +322,12 @@ impl Database {
|
||||
drop(rows);
|
||||
|
||||
if connections.is_empty() {
|
||||
self.snapshot_buffer(channel_id, &tx).await?;
|
||||
self.snapshot_channel_buffer(channel_id, &tx).await?;
|
||||
}
|
||||
|
||||
Ok(connections)
|
||||
}
|
||||
|
||||
pub async fn leave_channel_buffers(
|
||||
&self,
|
||||
connection: ConnectionId,
|
||||
) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||
enum QueryChannelIds {
|
||||
ChannelId,
|
||||
}
|
||||
|
||||
let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
|
||||
.select_only()
|
||||
.column(channel_buffer_collaborator::Column::ChannelId)
|
||||
.filter(Condition::all().add(
|
||||
channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
|
||||
))
|
||||
.into_values::<_, QueryChannelIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
for channel_id in channel_ids {
|
||||
let collaborators = self
|
||||
.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
||||
.await?;
|
||||
result.push((channel_id, collaborators));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channel_buffer_collaborators(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
@ -224,20 +370,9 @@ impl Database {
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
||||
|
||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||
enum QueryVersion {
|
||||
OperationSerializationVersion,
|
||||
}
|
||||
|
||||
let serialization_version: i32 = buffer
|
||||
.find_related(buffer_snapshot::Entity)
|
||||
.select_only()
|
||||
.column(buffer_snapshot::Column::OperationSerializationVersion)
|
||||
.filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch))
|
||||
.into_values::<_, QueryVersion>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("missing buffer snapshot"))?;
|
||||
let serialization_version = self
|
||||
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
|
||||
.await?;
|
||||
|
||||
let operations = operations
|
||||
.iter()
|
||||
@ -245,6 +380,16 @@ impl Database {
|
||||
.collect::<Vec<_>>();
|
||||
if !operations.is_empty() {
|
||||
buffer_operation::Entity::insert_many(operations)
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
buffer_operation::Column::BufferId,
|
||||
buffer_operation::Column::Epoch,
|
||||
buffer_operation::Column::LamportTimestamp,
|
||||
buffer_operation::Column::ReplicaId,
|
||||
])
|
||||
.do_nothing()
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
@ -270,6 +415,38 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
async fn get_buffer_operation_serialization_version(
|
||||
&self,
|
||||
buffer_id: BufferId,
|
||||
epoch: i32,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<i32> {
|
||||
Ok(buffer_snapshot::Entity::find()
|
||||
.filter(buffer_snapshot::Column::BufferId.eq(buffer_id))
|
||||
.filter(buffer_snapshot::Column::Epoch.eq(epoch))
|
||||
.select_only()
|
||||
.column(buffer_snapshot::Column::OperationSerializationVersion)
|
||||
.into_values::<_, QueryOperationSerializationVersion>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("missing buffer snapshot"))?)
|
||||
}
|
||||
|
||||
async fn get_channel_buffer(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<buffer::Model> {
|
||||
Ok(channel::Model {
|
||||
id: channel_id,
|
||||
..Default::default()
|
||||
}
|
||||
.find_related(buffer::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?)
|
||||
}
|
||||
|
||||
async fn get_buffer_state(
|
||||
&self,
|
||||
buffer: &buffer::Model,
|
||||
@ -303,27 +480,20 @@ impl Database {
|
||||
.await?;
|
||||
let mut operations = Vec::new();
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
|
||||
let operation = operation_from_storage(row, version)?;
|
||||
operations.push(proto::Operation {
|
||||
variant: Some(operation),
|
||||
variant: Some(operation_from_storage(row?, version)?),
|
||||
})
|
||||
}
|
||||
|
||||
Ok((base_text, operations))
|
||||
}
|
||||
|
||||
async fn snapshot_buffer(&self, channel_id: ChannelId, tx: &DatabaseTransaction) -> Result<()> {
|
||||
let buffer = channel::Model {
|
||||
id: channel_id,
|
||||
..Default::default()
|
||||
}
|
||||
.find_related(buffer::Entity)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
||||
|
||||
async fn snapshot_channel_buffer(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
let buffer = self.get_channel_buffer(channel_id, tx).await?;
|
||||
let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?;
|
||||
if operations.is_empty() {
|
||||
return Ok(());
|
||||
@ -527,6 +697,22 @@ fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global {
|
||||
version
|
||||
}
|
||||
|
||||
fn version_to_wire(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
|
||||
let mut message = Vec::new();
|
||||
for entry in version.iter() {
|
||||
message.push(proto::VectorClockEntry {
|
||||
replica_id: entry.replica_id as u32,
|
||||
timestamp: entry.value,
|
||||
});
|
||||
}
|
||||
message
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||
enum QueryOperationSerializationVersion {
|
||||
OperationSerializationVersion,
|
||||
}
|
||||
|
||||
mod storage {
|
||||
#![allow(non_snake_case)]
|
||||
use prost::Message;
|
||||
|
@ -1,6 +1,20 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
#[cfg(test)]
|
||||
pub async fn all_channels(&self) -> Result<Vec<(ChannelId, String)>> {
|
||||
self.transaction(move |tx| async move {
|
||||
let mut channels = Vec::new();
|
||||
let mut rows = channel::Entity::find().stream(&*tx).await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
channels.push((row.id, row.name));
|
||||
}
|
||||
Ok(channels)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn create_root_channel(
|
||||
&self,
|
||||
name: &str,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn refresh_room(
|
||||
pub async fn clear_stale_room_participants(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
new_server_id: ServerId,
|
||||
|
@ -14,31 +14,49 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn stale_room_ids(
|
||||
pub async fn stale_server_resource_ids(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<Vec<RoomId>> {
|
||||
) -> Result<(Vec<RoomId>, Vec<ChannelId>)> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
enum QueryRoomIds {
|
||||
RoomId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryChannelIds {
|
||||
ChannelId,
|
||||
}
|
||||
|
||||
let stale_server_epochs = self
|
||||
.stale_server_ids(environment, new_server_id, &tx)
|
||||
.await?;
|
||||
Ok(room_participant::Entity::find()
|
||||
let room_ids = room_participant::Entity::find()
|
||||
.select_only()
|
||||
.column(room_participant::Column::RoomId)
|
||||
.distinct()
|
||||
.filter(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.is_in(stale_server_epochs),
|
||||
.is_in(stale_server_epochs.iter().copied()),
|
||||
)
|
||||
.into_values::<_, QueryAs>()
|
||||
.into_values::<_, QueryRoomIds>()
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
.await?;
|
||||
let channel_ids = channel_buffer_collaborator::Entity::find()
|
||||
.select_only()
|
||||
.column(channel_buffer_collaborator::Column::ChannelId)
|
||||
.distinct()
|
||||
.filter(
|
||||
channel_buffer_collaborator::Column::ConnectionServerId
|
||||
.is_in(stale_server_epochs.iter().copied()),
|
||||
)
|
||||
.into_values::<_, QueryChannelIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((room_ids, channel_ids))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
@ -251,6 +251,7 @@ impl Server {
|
||||
.add_request_handler(join_channel_buffer)
|
||||
.add_request_handler(leave_channel_buffer)
|
||||
.add_message_handler(update_channel_buffer)
|
||||
.add_request_handler(rejoin_channel_buffers)
|
||||
.add_request_handler(get_channel_members)
|
||||
.add_request_handler(respond_to_channel_invite)
|
||||
.add_request_handler(join_channel)
|
||||
@ -277,13 +278,33 @@ impl Server {
|
||||
tracing::info!("waiting for cleanup timeout");
|
||||
timeout.await;
|
||||
tracing::info!("cleanup timeout expired, retrieving stale rooms");
|
||||
if let Some(room_ids) = app_state
|
||||
if let Some((room_ids, channel_ids)) = app_state
|
||||
.db
|
||||
.stale_room_ids(&app_state.config.zed_environment, server_id)
|
||||
.stale_server_resource_ids(&app_state.config.zed_environment, server_id)
|
||||
.await
|
||||
.trace_err()
|
||||
{
|
||||
tracing::info!(stale_room_count = room_ids.len(), "retrieved stale rooms");
|
||||
tracing::info!(
|
||||
stale_channel_buffer_count = channel_ids.len(),
|
||||
"retrieved stale channel buffers"
|
||||
);
|
||||
|
||||
for channel_id in channel_ids {
|
||||
if let Some(refreshed_channel_buffer) = app_state
|
||||
.db
|
||||
.clear_stale_channel_buffer_collaborators(channel_id, server_id)
|
||||
.await
|
||||
.trace_err()
|
||||
{
|
||||
for connection_id in refreshed_channel_buffer.connection_ids {
|
||||
for message in &refreshed_channel_buffer.removed_collaborators {
|
||||
peer.send(connection_id, message.clone()).trace_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for room_id in room_ids {
|
||||
let mut contacts_to_update = HashSet::default();
|
||||
let mut canceled_calls_to_user_ids = Vec::new();
|
||||
@ -292,7 +313,7 @@ impl Server {
|
||||
|
||||
if let Some(mut refreshed_room) = app_state
|
||||
.db
|
||||
.refresh_room(room_id, server_id)
|
||||
.clear_stale_room_participants(room_id, server_id)
|
||||
.await
|
||||
.trace_err()
|
||||
{
|
||||
@ -854,13 +875,13 @@ async fn connection_lost(
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
leave_channel_buffers_for_session(&session)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
futures::select_biased! {
|
||||
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
|
||||
log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id, session.connection_id);
|
||||
leave_room_for_session(&session).await.trace_err();
|
||||
leave_channel_buffers_for_session(&session)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
if !session
|
||||
.connection_pool()
|
||||
@ -2547,6 +2568,41 @@ async fn update_channel_buffer(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn rejoin_channel_buffers(
|
||||
request: proto::RejoinChannelBuffers,
|
||||
response: Response<proto::RejoinChannelBuffers>,
|
||||
session: Session,
|
||||
) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
let buffers = db
|
||||
.rejoin_channel_buffers(&request.buffers, session.user_id, session.connection_id)
|
||||
.await?;
|
||||
|
||||
for buffer in &buffers {
|
||||
let collaborators_to_notify = buffer
|
||||
.buffer
|
||||
.collaborators
|
||||
.iter()
|
||||
.filter_map(|c| Some(c.peer_id?.into()));
|
||||
channel_buffer_updated(
|
||||
session.connection_id,
|
||||
collaborators_to_notify,
|
||||
&proto::UpdateChannelBufferCollaborator {
|
||||
channel_id: buffer.buffer.channel_id,
|
||||
old_peer_id: Some(buffer.old_connection_id.into()),
|
||||
new_peer_id: Some(session.connection_id.into()),
|
||||
},
|
||||
&session.peer,
|
||||
);
|
||||
}
|
||||
|
||||
response.send(proto::RejoinChannelBuffersResponse {
|
||||
buffers: buffers.into_iter().map(|b| b.buffer).collect(),
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn leave_channel_buffer(
|
||||
request: proto::LeaveChannelBuffer,
|
||||
response: Response<proto::LeaveChannelBuffer>,
|
||||
|
@ -1,555 +1,18 @@
|
||||
use crate::{
|
||||
db::{tests::TestDb, NewUserParams, UserId},
|
||||
executor::Executor,
|
||||
rpc::{Server, CLEANUP_TIMEOUT},
|
||||
AppState,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use call::{ActiveCall, Room};
|
||||
use channel::ChannelStore;
|
||||
use client::{
|
||||
self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
|
||||
};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::FakeFs;
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext, WindowHandle};
|
||||
use language::LanguageRegistry;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, WorktreeId};
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
cell::{Ref, RefCell, RefMut},
|
||||
env,
|
||||
ops::{Deref, DerefMut},
|
||||
path::Path,
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use util::http::FakeHttpClient;
|
||||
use workspace::Workspace;
|
||||
use call::Room;
|
||||
use gpui::{ModelHandle, TestAppContext};
|
||||
|
||||
mod channel_buffer_tests;
|
||||
mod channel_tests;
|
||||
mod integration_tests;
|
||||
mod randomized_integration_tests;
|
||||
mod random_channel_buffer_tests;
|
||||
mod random_project_collaboration_tests;
|
||||
mod randomized_test_helpers;
|
||||
mod test_server;
|
||||
|
||||
struct TestServer {
|
||||
app_state: Arc<AppState>,
|
||||
server: Arc<Server>,
|
||||
connection_killers: Arc<Mutex<HashMap<PeerId, Arc<AtomicBool>>>>,
|
||||
forbid_connections: Arc<AtomicBool>,
|
||||
_test_db: TestDb,
|
||||
test_live_kit_server: Arc<live_kit_client::TestServer>,
|
||||
}
|
||||
|
||||
impl TestServer {
|
||||
async fn start(deterministic: &Arc<Deterministic>) -> Self {
|
||||
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
let use_postgres = env::var("USE_POSTGRES").ok();
|
||||
let use_postgres = use_postgres.as_deref();
|
||||
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
|
||||
TestDb::postgres(deterministic.build_background())
|
||||
} else {
|
||||
TestDb::sqlite(deterministic.build_background())
|
||||
};
|
||||
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
|
||||
let live_kit_server = live_kit_client::TestServer::create(
|
||||
format!("http://livekit.{}.test", live_kit_server_id),
|
||||
format!("devkey-{}", live_kit_server_id),
|
||||
format!("secret-{}", live_kit_server_id),
|
||||
deterministic.build_background(),
|
||||
)
|
||||
.unwrap();
|
||||
let app_state = Self::build_app_state(&test_db, &live_kit_server).await;
|
||||
let epoch = app_state
|
||||
.db
|
||||
.create_server(&app_state.config.zed_environment)
|
||||
.await
|
||||
.unwrap();
|
||||
let server = Server::new(
|
||||
epoch,
|
||||
app_state.clone(),
|
||||
Executor::Deterministic(deterministic.build_background()),
|
||||
);
|
||||
server.start().await.unwrap();
|
||||
// Advance clock to ensure the server's cleanup task is finished.
|
||||
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||
Self {
|
||||
app_state,
|
||||
server,
|
||||
connection_killers: Default::default(),
|
||||
forbid_connections: Default::default(),
|
||||
_test_db: test_db,
|
||||
test_live_kit_server: live_kit_server,
|
||||
}
|
||||
}
|
||||
|
||||
async fn reset(&self) {
|
||||
self.app_state.db.reset();
|
||||
let epoch = self
|
||||
.app_state
|
||||
.db
|
||||
.create_server(&self.app_state.config.zed_environment)
|
||||
.await
|
||||
.unwrap();
|
||||
self.server.reset(epoch);
|
||||
}
|
||||
|
||||
async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
||||
cx.update(|cx| {
|
||||
if cx.has_global::<SettingsStore>() {
|
||||
panic!("Same cx used to create two test clients")
|
||||
}
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
});
|
||||
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
|
||||
{
|
||||
user.id
|
||||
} else {
|
||||
self.app_state
|
||||
.db
|
||||
.create_user(
|
||||
&format!("{name}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: name.into(),
|
||||
github_user_id: 0,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect("creating user failed")
|
||||
.user_id
|
||||
};
|
||||
let client_name = name.to_string();
|
||||
let mut client = cx.read(|cx| Client::new(http.clone(), cx));
|
||||
let server = self.server.clone();
|
||||
let db = self.app_state.db.clone();
|
||||
let connection_killers = self.connection_killers.clone();
|
||||
let forbid_connections = self.forbid_connections.clone();
|
||||
|
||||
Arc::get_mut(&mut client)
|
||||
.unwrap()
|
||||
.set_id(user_id.0 as usize)
|
||||
.override_authenticate(move |cx| {
|
||||
cx.spawn(|_| async move {
|
||||
let access_token = "the-token".to_string();
|
||||
Ok(Credentials {
|
||||
user_id: user_id.0 as u64,
|
||||
access_token,
|
||||
})
|
||||
})
|
||||
})
|
||||
.override_establish_connection(move |credentials, cx| {
|
||||
assert_eq!(credentials.user_id, user_id.0 as u64);
|
||||
assert_eq!(credentials.access_token, "the-token");
|
||||
|
||||
let server = server.clone();
|
||||
let db = db.clone();
|
||||
let connection_killers = connection_killers.clone();
|
||||
let forbid_connections = forbid_connections.clone();
|
||||
let client_name = client_name.clone();
|
||||
cx.spawn(move |cx| async move {
|
||||
if forbid_connections.load(SeqCst) {
|
||||
Err(EstablishConnectionError::other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
)))
|
||||
} else {
|
||||
let (client_conn, server_conn, killed) =
|
||||
Connection::in_memory(cx.background());
|
||||
let (connection_id_tx, connection_id_rx) = oneshot::channel();
|
||||
let user = db
|
||||
.get_user_by_id(user_id)
|
||||
.await
|
||||
.expect("retrieving user failed")
|
||||
.unwrap();
|
||||
cx.background()
|
||||
.spawn(server.handle_connection(
|
||||
server_conn,
|
||||
client_name,
|
||||
user,
|
||||
Some(connection_id_tx),
|
||||
Executor::Deterministic(cx.background()),
|
||||
))
|
||||
.detach();
|
||||
let connection_id = connection_id_rx.await.unwrap();
|
||||
connection_killers
|
||||
.lock()
|
||||
.insert(connection_id.into(), killed);
|
||||
Ok(client_conn)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
||||
let channel_store =
|
||||
cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
channel_store: channel_store.clone(),
|
||||
languages: Arc::new(LanguageRegistry::test()),
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _, _| Default::default(),
|
||||
initialize_workspace: |_, _, _, _| Task::ready(Ok(())),
|
||||
background_actions: || &[],
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
theme::init((), cx);
|
||||
Project::init(&client, cx);
|
||||
client::init(&client, cx);
|
||||
language::init(cx);
|
||||
editor::init_settings(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
audio::init((), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
channel::init(&client);
|
||||
});
|
||||
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let client = TestClient {
|
||||
app_state,
|
||||
username: name.to_string(),
|
||||
state: Default::default(),
|
||||
};
|
||||
client.wait_for_current_user(cx).await;
|
||||
client
|
||||
}
|
||||
|
||||
fn disconnect_client(&self, peer_id: PeerId) {
|
||||
self.connection_killers
|
||||
.lock()
|
||||
.remove(&peer_id)
|
||||
.unwrap()
|
||||
.store(true, SeqCst);
|
||||
}
|
||||
|
||||
fn forbid_connections(&self) {
|
||||
self.forbid_connections.store(true, SeqCst);
|
||||
}
|
||||
|
||||
fn allow_connections(&self) {
|
||||
self.forbid_connections.store(false, SeqCst);
|
||||
}
|
||||
|
||||
async fn make_contacts(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||
for ix in 1..clients.len() {
|
||||
let (left, right) = clients.split_at_mut(ix);
|
||||
let (client_a, cx_a) = left.last_mut().unwrap();
|
||||
for (client_b, cx_b) in right {
|
||||
client_a
|
||||
.app_state
|
||||
.user_store
|
||||
.update(*cx_a, |store, cx| {
|
||||
store.request_contact(client_b.user_id().unwrap(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx_a.foreground().run_until_parked();
|
||||
client_b
|
||||
.app_state
|
||||
.user_store
|
||||
.update(*cx_b, |store, cx| {
|
||||
store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn make_channel(
|
||||
&self,
|
||||
channel: &str,
|
||||
admin: (&TestClient, &mut TestAppContext),
|
||||
members: &mut [(&TestClient, &mut TestAppContext)],
|
||||
) -> u64 {
|
||||
let (admin_client, admin_cx) = admin;
|
||||
let channel_id = admin_client
|
||||
.app_state
|
||||
.channel_store
|
||||
.update(admin_cx, |channel_store, cx| {
|
||||
channel_store.create_channel(channel, None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
for (member_client, member_cx) in members {
|
||||
admin_client
|
||||
.app_state
|
||||
.channel_store
|
||||
.update(admin_cx, |channel_store, cx| {
|
||||
channel_store.invite_member(
|
||||
channel_id,
|
||||
member_client.user_id().unwrap(),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
admin_cx.foreground().run_until_parked();
|
||||
|
||||
member_client
|
||||
.app_state
|
||||
.channel_store
|
||||
.update(*member_cx, |channels, _| {
|
||||
channels.respond_to_channel_invite(channel_id, true)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
channel_id
|
||||
}
|
||||
|
||||
async fn create_room(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||
self.make_contacts(clients).await;
|
||||
|
||||
let (left, right) = clients.split_at_mut(1);
|
||||
let (_client_a, cx_a) = &mut left[0];
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
for (client_b, cx_b) in right {
|
||||
let user_id_b = client_b.current_user_id(*cx_b).to_proto();
|
||||
active_call_a
|
||||
.update(*cx_a, |call, cx| call.invite(user_id_b, None, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx_b.foreground().run_until_parked();
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
active_call_b
|
||||
.update(*cx_b, |call, cx| call.accept_incoming(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_app_state(
|
||||
test_db: &TestDb,
|
||||
fake_server: &live_kit_client::TestServer,
|
||||
) -> Arc<AppState> {
|
||||
Arc::new(AppState {
|
||||
db: test_db.db().clone(),
|
||||
live_kit_client: Some(Arc::new(fake_server.create_api_client())),
|
||||
config: Default::default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TestServer {
|
||||
type Target = Server;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.server
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.server.teardown();
|
||||
self.test_live_kit_server.teardown().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
struct TestClient {
|
||||
username: String,
|
||||
state: RefCell<TestClientState>,
|
||||
app_state: Arc<workspace::AppState>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct TestClientState {
|
||||
local_projects: Vec<ModelHandle<Project>>,
|
||||
remote_projects: Vec<ModelHandle<Project>>,
|
||||
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
|
||||
}
|
||||
|
||||
impl Deref for TestClient {
|
||||
type Target = Arc<Client>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.app_state.client
|
||||
}
|
||||
}
|
||||
|
||||
struct ContactsSummary {
|
||||
pub current: Vec<String>,
|
||||
pub outgoing_requests: Vec<String>,
|
||||
pub incoming_requests: Vec<String>,
|
||||
}
|
||||
|
||||
impl TestClient {
|
||||
pub fn fs(&self) -> &FakeFs {
|
||||
self.app_state.fs.as_fake()
|
||||
}
|
||||
|
||||
pub fn channel_store(&self) -> &ModelHandle<ChannelStore> {
|
||||
&self.app_state.channel_store
|
||||
}
|
||||
|
||||
pub fn user_store(&self) -> &ModelHandle<UserStore> {
|
||||
&self.app_state.user_store
|
||||
}
|
||||
|
||||
pub fn language_registry(&self) -> &Arc<LanguageRegistry> {
|
||||
&self.app_state.languages
|
||||
}
|
||||
|
||||
pub fn client(&self) -> &Arc<Client> {
|
||||
&self.app_state.client
|
||||
}
|
||||
|
||||
pub fn current_user_id(&self, cx: &TestAppContext) -> UserId {
|
||||
UserId::from_proto(
|
||||
self.app_state
|
||||
.user_store
|
||||
.read_with(cx, |user_store, _| user_store.current_user().unwrap().id),
|
||||
)
|
||||
}
|
||||
|
||||
async fn wait_for_current_user(&self, cx: &TestAppContext) {
|
||||
let mut authed_user = self
|
||||
.app_state
|
||||
.user_store
|
||||
.read_with(cx, |user_store, _| user_store.watch_current_user());
|
||||
while authed_user.next().await.unwrap().is_none() {}
|
||||
}
|
||||
|
||||
async fn clear_contacts(&self, cx: &mut TestAppContext) {
|
||||
self.app_state
|
||||
.user_store
|
||||
.update(cx, |store, _| store.clear_contacts())
|
||||
.await;
|
||||
}
|
||||
|
||||
fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
Ref::map(self.state.borrow(), |state| &state.local_projects)
|
||||
}
|
||||
|
||||
fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
Ref::map(self.state.borrow(), |state| &state.remote_projects)
|
||||
}
|
||||
|
||||
fn local_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
|
||||
}
|
||||
|
||||
fn remote_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
|
||||
}
|
||||
|
||||
fn buffers_for_project<'a>(
|
||||
&'a self,
|
||||
project: &ModelHandle<Project>,
|
||||
) -> impl DerefMut<Target = HashSet<ModelHandle<language::Buffer>>> + 'a {
|
||||
RefMut::map(self.state.borrow_mut(), |state| {
|
||||
state.buffers.entry(project.clone()).or_default()
|
||||
})
|
||||
}
|
||||
|
||||
fn buffers<'a>(
|
||||
&'a self,
|
||||
) -> impl DerefMut<Target = HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>> + 'a
|
||||
{
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers)
|
||||
}
|
||||
|
||||
fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary {
|
||||
self.app_state
|
||||
.user_store
|
||||
.read_with(cx, |store, _| ContactsSummary {
|
||||
current: store
|
||||
.contacts()
|
||||
.iter()
|
||||
.map(|contact| contact.user.github_login.clone())
|
||||
.collect(),
|
||||
outgoing_requests: store
|
||||
.outgoing_contact_requests()
|
||||
.iter()
|
||||
.map(|user| user.github_login.clone())
|
||||
.collect(),
|
||||
incoming_requests: store
|
||||
.incoming_contact_requests()
|
||||
.iter()
|
||||
.map(|user| user.github_login.clone())
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
async fn build_local_project(
|
||||
&self,
|
||||
root_path: impl AsRef<Path>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> (ModelHandle<Project>, WorktreeId) {
|
||||
let project = cx.update(|cx| {
|
||||
Project::local(
|
||||
self.client().clone(),
|
||||
self.app_state.user_store.clone(),
|
||||
self.app_state.languages.clone(),
|
||||
self.app_state.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let (worktree, _) = project
|
||||
.update(cx, |p, cx| {
|
||||
p.find_or_create_local_worktree(root_path, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
worktree
|
||||
.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
(project, worktree.read_with(cx, |tree, _| tree.id()))
|
||||
}
|
||||
|
||||
async fn build_remote_project(
|
||||
&self,
|
||||
host_project_id: u64,
|
||||
guest_cx: &mut TestAppContext,
|
||||
) -> ModelHandle<Project> {
|
||||
let active_call = guest_cx.read(ActiveCall::global);
|
||||
let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
|
||||
room.update(guest_cx, |room, cx| {
|
||||
room.join_project(
|
||||
host_project_id,
|
||||
self.app_state.languages.clone(),
|
||||
self.app_state.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn build_workspace(
|
||||
&self,
|
||||
project: &ModelHandle<Project>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> WindowHandle<Workspace> {
|
||||
cx.add_window(|cx| Workspace::new(0, project.clone(), self.app_state.clone(), cx))
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestClient {
|
||||
fn drop(&mut self) {
|
||||
self.app_state.client.teardown();
|
||||
}
|
||||
}
|
||||
pub use randomized_test_helpers::{
|
||||
run_randomized_test, save_randomized_test_plan, RandomizedTest, TestError, UserTestPlan,
|
||||
};
|
||||
pub use test_server::{TestClient, TestServer};
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
struct RoomParticipants {
|
||||
|
@ -1,4 +1,7 @@
|
||||
use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
|
||||
use crate::{
|
||||
rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
|
||||
tests::TestServer,
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use channel::Channel;
|
||||
use client::UserId;
|
||||
@ -21,20 +24,19 @@ async fn test_core_channel_buffers(
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
||||
let zed_id = server
|
||||
let channel_id = server
|
||||
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
// Client A joins the channel buffer
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Client A edits the buffer
|
||||
let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
|
||||
|
||||
buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.edit([(0..0, "hello world")], None, cx)
|
||||
});
|
||||
@ -45,17 +47,15 @@ async fn test_core_channel_buffers(
|
||||
buffer.edit([(0..5, "goodbye")], None, cx)
|
||||
});
|
||||
buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
|
||||
deterministic.run_until_parked();
|
||||
|
||||
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Client B joins the channel buffer
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
||||
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(
|
||||
buffer.collaborators(),
|
||||
@ -91,9 +91,7 @@ async fn test_core_channel_buffers(
|
||||
// Client A rejoins the channel buffer
|
||||
let _channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channels, cx| {
|
||||
channels.open_channel_buffer(zed_id, cx)
|
||||
})
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
deterministic.run_until_parked();
|
||||
@ -136,7 +134,7 @@ async fn test_channel_buffer_replica_ids(
|
||||
|
||||
let channel_id = server
|
||||
.make_channel(
|
||||
"zed",
|
||||
"the-channel",
|
||||
(&client_a, cx_a),
|
||||
&mut [(&client_b, cx_b), (&client_c, cx_c)],
|
||||
)
|
||||
@ -160,23 +158,17 @@ async fn test_channel_buffer_replica_ids(
|
||||
// C first so that the replica IDs in the project and the channel buffer are different
|
||||
let channel_buffer_c = client_c
|
||||
.channel_store()
|
||||
.update(cx_c, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.update(cx_c, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -286,28 +278,30 @@ async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mu
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
|
||||
let zed_id = server.make_channel("zed", (&client_a, cx_a), &mut []).await;
|
||||
let channel_id = server
|
||||
.make_channel("the-channel", (&client_a, cx_a), &mut [])
|
||||
.await;
|
||||
|
||||
let channel_buffer_1 = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
|
||||
let channel_buffer_2 = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
|
||||
let channel_buffer_3 = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
|
||||
|
||||
// All concurrent tasks for opening a channel buffer return the same model handle.
|
||||
let (channel_buffer_1, channel_buffer_2, channel_buffer_3) =
|
||||
let (channel_buffer, channel_buffer_2, channel_buffer_3) =
|
||||
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
|
||||
.await
|
||||
.unwrap();
|
||||
let model_id = channel_buffer_1.id();
|
||||
assert_eq!(channel_buffer_1, channel_buffer_2);
|
||||
assert_eq!(channel_buffer_1, channel_buffer_3);
|
||||
let channel_buffer_model_id = channel_buffer.id();
|
||||
assert_eq!(channel_buffer, channel_buffer_2);
|
||||
assert_eq!(channel_buffer, channel_buffer_3);
|
||||
|
||||
channel_buffer_1.update(cx_a, |buffer, cx| {
|
||||
channel_buffer.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, "hello")], None, cx);
|
||||
})
|
||||
@ -315,7 +309,7 @@ async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mu
|
||||
deterministic.run_until_parked();
|
||||
|
||||
cx_a.update(|_| {
|
||||
drop(channel_buffer_1);
|
||||
drop(channel_buffer);
|
||||
drop(channel_buffer_2);
|
||||
drop(channel_buffer_3);
|
||||
});
|
||||
@ -324,10 +318,10 @@ async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mu
|
||||
// The channel buffer can be reopened after dropping it.
|
||||
let channel_buffer = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_ne!(channel_buffer.id(), model_id);
|
||||
assert_ne!(channel_buffer.id(), channel_buffer_model_id);
|
||||
channel_buffer.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, _| {
|
||||
assert_eq!(buffer.text(), "hello");
|
||||
@ -347,22 +341,17 @@ async fn test_channel_buffer_disconnect(
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
||||
let channel_id = server
|
||||
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||
.make_channel("the-channel", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |channel, cx| {
|
||||
channel.open_channel_buffer(channel_id, cx)
|
||||
})
|
||||
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -375,7 +364,7 @@ async fn test_channel_buffer_disconnect(
|
||||
buffer.channel().as_ref(),
|
||||
&Channel {
|
||||
id: channel_id,
|
||||
name: "zed".to_string()
|
||||
name: "the-channel".to_string()
|
||||
}
|
||||
);
|
||||
assert!(!buffer.is_connected());
|
||||
@ -403,13 +392,180 @@ async fn test_channel_buffer_disconnect(
|
||||
buffer.channel().as_ref(),
|
||||
&Channel {
|
||||
id: channel_id,
|
||||
name: "zed".to_string()
|
||||
name: "the-channel".to_string()
|
||||
}
|
||||
);
|
||||
assert!(!buffer.is_connected());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_rejoin_channel_buffer(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
|
||||
let channel_id = server
|
||||
.make_channel("the-channel", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||
.await;
|
||||
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, "1")], None, cx);
|
||||
})
|
||||
});
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Client A disconnects.
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
|
||||
// Both clients make an edit.
|
||||
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(1..1, "2")], None, cx);
|
||||
})
|
||||
});
|
||||
channel_buffer_b.update(cx_b, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, "0")], None, cx);
|
||||
})
|
||||
});
|
||||
|
||||
// Both clients see their own edit.
|
||||
deterministic.run_until_parked();
|
||||
channel_buffer_a.read_with(cx_a, |buffer, cx| {
|
||||
assert_eq!(buffer.buffer().read(cx).text(), "12");
|
||||
});
|
||||
channel_buffer_b.read_with(cx_b, |buffer, cx| {
|
||||
assert_eq!(buffer.buffer().read(cx).text(), "01");
|
||||
});
|
||||
|
||||
// Client A reconnects. Both clients see each other's edits, and see
|
||||
// the same collaborators.
|
||||
server.allow_connections();
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
channel_buffer_a.read_with(cx_a, |buffer, cx| {
|
||||
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||
});
|
||||
channel_buffer_b.read_with(cx_b, |buffer, cx| {
|
||||
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||
});
|
||||
|
||||
channel_buffer_a.read_with(cx_a, |buffer_a, _| {
|
||||
channel_buffer_b.read_with(cx_b, |buffer_b, _| {
|
||||
assert_eq!(buffer_a.collaborators(), buffer_b.collaborators());
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_channel_buffers_and_server_restarts(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
cx_c: &mut TestAppContext,
|
||||
) {
|
||||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
let client_c = server.create_client(cx_c, "user_c").await;
|
||||
|
||||
let channel_id = server
|
||||
.make_channel(
|
||||
"the-channel",
|
||||
(&client_a, cx_a),
|
||||
&mut [(&client_b, cx_b), (&client_c, cx_c)],
|
||||
)
|
||||
.await;
|
||||
|
||||
let channel_buffer_a = client_a
|
||||
.channel_store()
|
||||
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let channel_buffer_b = client_b
|
||||
.channel_store()
|
||||
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let _channel_buffer_c = client_c
|
||||
.channel_store()
|
||||
.update(cx_c, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, "1")], None, cx);
|
||||
})
|
||||
});
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Client C can't reconnect.
|
||||
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
|
||||
|
||||
// Server stops.
|
||||
server.reset().await;
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
|
||||
// While the server is down, both clients make an edit.
|
||||
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(1..1, "2")], None, cx);
|
||||
})
|
||||
});
|
||||
channel_buffer_b.update(cx_b, |buffer, cx| {
|
||||
buffer.buffer().update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, "0")], None, cx);
|
||||
})
|
||||
});
|
||||
|
||||
// Server restarts.
|
||||
server.start().await.unwrap();
|
||||
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||
|
||||
// Clients reconnects. Clients A and B see each other's edits, and see
|
||||
// that client C has disconnected.
|
||||
channel_buffer_a.read_with(cx_a, |buffer, cx| {
|
||||
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||
});
|
||||
channel_buffer_b.read_with(cx_b, |buffer, cx| {
|
||||
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||
});
|
||||
|
||||
channel_buffer_a.read_with(cx_a, |buffer_a, _| {
|
||||
channel_buffer_b.read_with(cx_b, |buffer_b, _| {
|
||||
assert_eq!(
|
||||
buffer_a
|
||||
.collaborators()
|
||||
.iter()
|
||||
.map(|c| c.user_id)
|
||||
.collect::<Vec<_>>(),
|
||||
vec![client_a.user_id().unwrap(), client_b.user_id().unwrap()]
|
||||
);
|
||||
assert_eq!(buffer_a.collaborators(), buffer_b.collaborators());
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option<UserId>]) {
|
||||
assert_eq!(
|
||||
|
288
crates/collab/src/tests/random_channel_buffer_tests.rs
Normal file
288
crates/collab/src/tests/random_channel_buffer_tests.rs
Normal file
@ -0,0 +1,288 @@
|
||||
use super::{run_randomized_test, RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use gpui::{executor::Deterministic, TestAppContext};
|
||||
use rand::prelude::*;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::{ops::Range, rc::Rc, sync::Arc};
|
||||
use text::Bias;
|
||||
|
||||
#[gpui::test(
|
||||
iterations = 100,
|
||||
on_failure = "crate::tests::save_randomized_test_plan"
|
||||
)]
|
||||
async fn test_random_channel_buffers(
|
||||
cx: &mut TestAppContext,
|
||||
deterministic: Arc<Deterministic>,
|
||||
rng: StdRng,
|
||||
) {
|
||||
run_randomized_test::<RandomChannelBufferTest>(cx, deterministic, rng).await;
|
||||
}
|
||||
|
||||
struct RandomChannelBufferTest;
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
enum ChannelBufferOperation {
|
||||
JoinChannelNotes {
|
||||
channel_name: String,
|
||||
},
|
||||
LeaveChannelNotes {
|
||||
channel_name: String,
|
||||
},
|
||||
EditChannelNotes {
|
||||
channel_name: String,
|
||||
edits: Vec<(Range<usize>, Arc<str>)>,
|
||||
},
|
||||
Noop,
|
||||
}
|
||||
|
||||
const CHANNEL_COUNT: usize = 3;
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl RandomizedTest for RandomChannelBufferTest {
|
||||
type Operation = ChannelBufferOperation;
|
||||
|
||||
async fn initialize(server: &mut TestServer, users: &[UserTestPlan]) {
|
||||
let db = &server.app_state.db;
|
||||
for ix in 0..CHANNEL_COUNT {
|
||||
let id = db
|
||||
.create_channel(
|
||||
&format!("channel-{ix}"),
|
||||
None,
|
||||
&format!("livekit-room-{ix}"),
|
||||
users[0].user_id,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
for user in &users[1..] {
|
||||
db.invite_channel_member(id, user.user_id, users[0].user_id, false)
|
||||
.await
|
||||
.unwrap();
|
||||
db.respond_to_channel_invite(id, user.user_id, true)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_operation(
|
||||
client: &TestClient,
|
||||
rng: &mut StdRng,
|
||||
_: &mut UserTestPlan,
|
||||
cx: &TestAppContext,
|
||||
) -> ChannelBufferOperation {
|
||||
let channel_store = client.channel_store().clone();
|
||||
let channel_buffers = client.channel_buffers();
|
||||
|
||||
// When signed out, we can't do anything unless a channel buffer is
|
||||
// already open.
|
||||
if channel_buffers.is_empty()
|
||||
&& channel_store.read_with(cx, |store, _| store.channel_count() == 0)
|
||||
{
|
||||
return ChannelBufferOperation::Noop;
|
||||
}
|
||||
|
||||
loop {
|
||||
match rng.gen_range(0..100_u32) {
|
||||
0..=29 => {
|
||||
let channel_name = client.channel_store().read_with(cx, |store, cx| {
|
||||
store.channels().find_map(|(_, channel)| {
|
||||
if store.has_open_channel_buffer(channel.id, cx) {
|
||||
None
|
||||
} else {
|
||||
Some(channel.name.clone())
|
||||
}
|
||||
})
|
||||
});
|
||||
if let Some(channel_name) = channel_name {
|
||||
break ChannelBufferOperation::JoinChannelNotes { channel_name };
|
||||
}
|
||||
}
|
||||
|
||||
30..=40 => {
|
||||
if let Some(buffer) = channel_buffers.iter().choose(rng) {
|
||||
let channel_name = buffer.read_with(cx, |b, _| b.channel().name.clone());
|
||||
break ChannelBufferOperation::LeaveChannelNotes { channel_name };
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
if let Some(buffer) = channel_buffers.iter().choose(rng) {
|
||||
break buffer.read_with(cx, |b, _| {
|
||||
let channel_name = b.channel().name.clone();
|
||||
let edits = b
|
||||
.buffer()
|
||||
.read_with(cx, |buffer, _| buffer.get_random_edits(rng, 3));
|
||||
ChannelBufferOperation::EditChannelNotes {
|
||||
channel_name,
|
||||
edits,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn apply_operation(
|
||||
client: &TestClient,
|
||||
operation: ChannelBufferOperation,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Result<(), TestError> {
|
||||
match operation {
|
||||
ChannelBufferOperation::JoinChannelNotes { channel_name } => {
|
||||
let buffer = client.channel_store().update(cx, |store, cx| {
|
||||
let channel_id = store
|
||||
.channels()
|
||||
.find(|(_, c)| c.name == channel_name)
|
||||
.unwrap()
|
||||
.1
|
||||
.id;
|
||||
if store.has_open_channel_buffer(channel_id, cx) {
|
||||
Err(TestError::Inapplicable)
|
||||
} else {
|
||||
Ok(store.open_channel_buffer(channel_id, cx))
|
||||
}
|
||||
})?;
|
||||
|
||||
log::info!(
|
||||
"{}: opening notes for channel {channel_name}",
|
||||
client.username
|
||||
);
|
||||
client.channel_buffers().insert(buffer.await?);
|
||||
}
|
||||
|
||||
ChannelBufferOperation::LeaveChannelNotes { channel_name } => {
|
||||
let buffer = cx.update(|cx| {
|
||||
let mut left_buffer = Err(TestError::Inapplicable);
|
||||
client.channel_buffers().retain(|buffer| {
|
||||
if buffer.read(cx).channel().name == channel_name {
|
||||
left_buffer = Ok(buffer.clone());
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
left_buffer
|
||||
})?;
|
||||
|
||||
log::info!(
|
||||
"{}: closing notes for channel {channel_name}",
|
||||
client.username
|
||||
);
|
||||
cx.update(|_| drop(buffer));
|
||||
}
|
||||
|
||||
ChannelBufferOperation::EditChannelNotes {
|
||||
channel_name,
|
||||
edits,
|
||||
} => {
|
||||
let channel_buffer = cx
|
||||
.read(|cx| {
|
||||
client
|
||||
.channel_buffers()
|
||||
.iter()
|
||||
.find(|buffer| buffer.read(cx).channel().name == channel_name)
|
||||
.cloned()
|
||||
})
|
||||
.ok_or_else(|| TestError::Inapplicable)?;
|
||||
|
||||
log::info!(
|
||||
"{}: editing notes for channel {channel_name} with {:?}",
|
||||
client.username,
|
||||
edits
|
||||
);
|
||||
|
||||
channel_buffer.update(cx, |buffer, cx| {
|
||||
let buffer = buffer.buffer();
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot();
|
||||
buffer.edit(
|
||||
edits.into_iter().map(|(range, text)| {
|
||||
let start = snapshot.clip_offset(range.start, Bias::Left);
|
||||
let end = snapshot.clip_offset(range.end, Bias::Right);
|
||||
(start..end, text)
|
||||
}),
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
ChannelBufferOperation::Noop => Err(TestError::Inapplicable)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn on_client_added(client: &Rc<TestClient>, cx: &mut TestAppContext) {
|
||||
let channel_store = client.channel_store();
|
||||
while channel_store.read_with(cx, |store, _| store.channel_count() == 0) {
|
||||
channel_store.next_notification(cx).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn on_quiesce(server: &mut TestServer, clients: &mut [(Rc<TestClient>, TestAppContext)]) {
|
||||
let channels = server.app_state.db.all_channels().await.unwrap();
|
||||
|
||||
for (client, client_cx) in clients.iter_mut() {
|
||||
client_cx.update(|cx| {
|
||||
client
|
||||
.channel_buffers()
|
||||
.retain(|b| b.read(cx).is_connected());
|
||||
});
|
||||
}
|
||||
|
||||
for (channel_id, channel_name) in channels {
|
||||
let mut prev_text: Option<(u64, String)> = None;
|
||||
|
||||
let mut collaborator_user_ids = server
|
||||
.app_state
|
||||
.db
|
||||
.get_channel_buffer_collaborators(channel_id)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|id| id.to_proto())
|
||||
.collect::<Vec<_>>();
|
||||
collaborator_user_ids.sort();
|
||||
|
||||
for (client, client_cx) in clients.iter() {
|
||||
let user_id = client.user_id().unwrap();
|
||||
client_cx.read(|cx| {
|
||||
if let Some(channel_buffer) = client
|
||||
.channel_buffers()
|
||||
.iter()
|
||||
.find(|b| b.read(cx).channel().id == channel_id.to_proto())
|
||||
{
|
||||
let channel_buffer = channel_buffer.read(cx);
|
||||
|
||||
// Assert that channel buffer's text matches other clients' copies.
|
||||
let text = channel_buffer.buffer().read(cx).text();
|
||||
if let Some((prev_user_id, prev_text)) = &prev_text {
|
||||
assert_eq!(
|
||||
&text,
|
||||
prev_text,
|
||||
"client {user_id} has different text than client {prev_user_id} for channel {channel_name}",
|
||||
);
|
||||
} else {
|
||||
prev_text = Some((user_id, text.clone()));
|
||||
}
|
||||
|
||||
// Assert that all clients and the server agree about who is present in the
|
||||
// channel buffer.
|
||||
let collaborators = channel_buffer.collaborators();
|
||||
let mut user_ids =
|
||||
collaborators.iter().map(|c| c.user_id).collect::<Vec<_>>();
|
||||
user_ids.sort();
|
||||
assert_eq!(
|
||||
user_ids,
|
||||
collaborator_user_ids,
|
||||
"client {user_id} has different user ids for channel {channel_name} than the server",
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
1585
crates/collab/src/tests/random_project_collaboration_tests.rs
Normal file
1585
crates/collab/src/tests/random_project_collaboration_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
689
crates/collab/src/tests/randomized_test_helpers.rs
Normal file
689
crates/collab/src/tests/randomized_test_helpers.rs
Normal file
@ -0,0 +1,689 @@
|
||||
use crate::{
|
||||
db::{self, NewUserParams, UserId},
|
||||
rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
|
||||
tests::{TestClient, TestServer},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use futures::StreamExt;
|
||||
use gpui::{executor::Deterministic, Task, TestAppContext};
|
||||
use parking_lot::Mutex;
|
||||
use rand::prelude::*;
|
||||
use rpc::RECEIVE_TIMEOUT;
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
env,
|
||||
path::PathBuf,
|
||||
rc::Rc,
|
||||
sync::{
|
||||
atomic::{AtomicBool, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref PLAN_LOAD_PATH: Option<PathBuf> = path_env_var("LOAD_PLAN");
|
||||
static ref PLAN_SAVE_PATH: Option<PathBuf> = path_env_var("SAVE_PLAN");
|
||||
static ref MAX_PEERS: usize = env::var("MAX_PEERS")
|
||||
.map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
|
||||
.unwrap_or(3);
|
||||
static ref MAX_OPERATIONS: usize = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
}
|
||||
|
||||
static LOADED_PLAN_JSON: Mutex<Option<Vec<u8>>> = Mutex::new(None);
|
||||
static LAST_PLAN: Mutex<Option<Box<dyn Send + FnOnce() -> Vec<u8>>>> = Mutex::new(None);
|
||||
|
||||
struct TestPlan<T: RandomizedTest> {
|
||||
rng: StdRng,
|
||||
replay: bool,
|
||||
stored_operations: Vec<(StoredOperation<T::Operation>, Arc<AtomicBool>)>,
|
||||
max_operations: usize,
|
||||
operation_ix: usize,
|
||||
users: Vec<UserTestPlan>,
|
||||
next_batch_id: usize,
|
||||
allow_server_restarts: bool,
|
||||
allow_client_reconnection: bool,
|
||||
allow_client_disconnection: bool,
|
||||
}
|
||||
|
||||
pub struct UserTestPlan {
|
||||
pub user_id: UserId,
|
||||
pub username: String,
|
||||
pub allow_client_reconnection: bool,
|
||||
pub allow_client_disconnection: bool,
|
||||
next_root_id: usize,
|
||||
operation_ix: usize,
|
||||
online: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum StoredOperation<T> {
|
||||
Server(ServerOperation),
|
||||
Client {
|
||||
user_id: UserId,
|
||||
batch_id: usize,
|
||||
operation: T,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
enum ServerOperation {
|
||||
AddConnection {
|
||||
user_id: UserId,
|
||||
},
|
||||
RemoveConnection {
|
||||
user_id: UserId,
|
||||
},
|
||||
BounceConnection {
|
||||
user_id: UserId,
|
||||
},
|
||||
RestartServer,
|
||||
MutateClients {
|
||||
batch_id: usize,
|
||||
#[serde(skip_serializing)]
|
||||
#[serde(skip_deserializing)]
|
||||
user_ids: Vec<UserId>,
|
||||
quiesce: bool,
|
||||
},
|
||||
}
|
||||
|
||||
pub enum TestError {
|
||||
Inapplicable,
|
||||
Other(anyhow::Error),
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait RandomizedTest: 'static + Sized {
|
||||
type Operation: Send + Clone + Serialize + DeserializeOwned;
|
||||
|
||||
fn generate_operation(
|
||||
client: &TestClient,
|
||||
rng: &mut StdRng,
|
||||
plan: &mut UserTestPlan,
|
||||
cx: &TestAppContext,
|
||||
) -> Self::Operation;
|
||||
|
||||
async fn apply_operation(
|
||||
client: &TestClient,
|
||||
operation: Self::Operation,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Result<(), TestError>;
|
||||
|
||||
async fn initialize(server: &mut TestServer, users: &[UserTestPlan]);
|
||||
|
||||
async fn on_client_added(client: &Rc<TestClient>, cx: &mut TestAppContext);
|
||||
|
||||
async fn on_quiesce(server: &mut TestServer, client: &mut [(Rc<TestClient>, TestAppContext)]);
|
||||
}
|
||||
|
||||
pub async fn run_randomized_test<T: RandomizedTest>(
|
||||
cx: &mut TestAppContext,
|
||||
deterministic: Arc<Deterministic>,
|
||||
rng: StdRng,
|
||||
) {
|
||||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(&deterministic).await;
|
||||
let plan = TestPlan::<T>::new(&mut server, rng).await;
|
||||
|
||||
LAST_PLAN.lock().replace({
|
||||
let plan = plan.clone();
|
||||
Box::new(move || plan.lock().serialize())
|
||||
});
|
||||
|
||||
let mut clients = Vec::new();
|
||||
let mut client_tasks = Vec::new();
|
||||
let mut operation_channels = Vec::new();
|
||||
loop {
|
||||
let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else {
|
||||
break;
|
||||
};
|
||||
applied.store(true, SeqCst);
|
||||
let did_apply = TestPlan::apply_server_operation(
|
||||
plan.clone(),
|
||||
deterministic.clone(),
|
||||
&mut server,
|
||||
&mut clients,
|
||||
&mut client_tasks,
|
||||
&mut operation_channels,
|
||||
next_operation,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
if !did_apply {
|
||||
applied.store(false, SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
drop(operation_channels);
|
||||
deterministic.start_waiting();
|
||||
futures::future::join_all(client_tasks).await;
|
||||
deterministic.finish_waiting();
|
||||
|
||||
deterministic.run_until_parked();
|
||||
T::on_quiesce(&mut server, &mut clients).await;
|
||||
|
||||
for (client, mut cx) in clients {
|
||||
cx.update(|cx| {
|
||||
let store = cx.remove_global::<SettingsStore>();
|
||||
cx.clear_globals();
|
||||
cx.set_global(store);
|
||||
drop(client);
|
||||
});
|
||||
}
|
||||
deterministic.run_until_parked();
|
||||
|
||||
if let Some(path) = &*PLAN_SAVE_PATH {
|
||||
eprintln!("saved test plan to path {:?}", path);
|
||||
std::fs::write(path, plan.lock().serialize()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_randomized_test_plan() {
|
||||
if let Some(serialize_plan) = LAST_PLAN.lock().take() {
|
||||
if let Some(path) = &*PLAN_SAVE_PATH {
|
||||
eprintln!("saved test plan to path {:?}", path);
|
||||
std::fs::write(path, serialize_plan()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: RandomizedTest> TestPlan<T> {
|
||||
pub async fn new(server: &mut TestServer, mut rng: StdRng) -> Arc<Mutex<Self>> {
|
||||
let allow_server_restarts = rng.gen_bool(0.7);
|
||||
let allow_client_reconnection = rng.gen_bool(0.7);
|
||||
let allow_client_disconnection = rng.gen_bool(0.1);
|
||||
|
||||
let mut users = Vec::new();
|
||||
for ix in 0..*MAX_PEERS {
|
||||
let username = format!("user-{}", ix + 1);
|
||||
let user_id = server
|
||||
.app_state
|
||||
.db
|
||||
.create_user(
|
||||
&format!("{username}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: username.clone(),
|
||||
github_user_id: (ix + 1) as i32,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
users.push(UserTestPlan {
|
||||
user_id,
|
||||
username,
|
||||
online: false,
|
||||
next_root_id: 0,
|
||||
operation_ix: 0,
|
||||
allow_client_disconnection,
|
||||
allow_client_reconnection,
|
||||
});
|
||||
}
|
||||
|
||||
T::initialize(server, &users).await;
|
||||
|
||||
let plan = Arc::new(Mutex::new(Self {
|
||||
replay: false,
|
||||
allow_server_restarts,
|
||||
allow_client_reconnection,
|
||||
allow_client_disconnection,
|
||||
stored_operations: Vec::new(),
|
||||
operation_ix: 0,
|
||||
next_batch_id: 0,
|
||||
max_operations: *MAX_OPERATIONS,
|
||||
users,
|
||||
rng,
|
||||
}));
|
||||
|
||||
if let Some(path) = &*PLAN_LOAD_PATH {
|
||||
let json = LOADED_PLAN_JSON
|
||||
.lock()
|
||||
.get_or_insert_with(|| {
|
||||
eprintln!("loaded test plan from path {:?}", path);
|
||||
std::fs::read(path).unwrap()
|
||||
})
|
||||
.clone();
|
||||
plan.lock().deserialize(json);
|
||||
}
|
||||
|
||||
plan
|
||||
}
|
||||
|
||||
fn deserialize(&mut self, json: Vec<u8>) {
|
||||
let stored_operations: Vec<StoredOperation<T::Operation>> =
|
||||
serde_json::from_slice(&json).unwrap();
|
||||
self.replay = true;
|
||||
self.stored_operations = stored_operations
|
||||
.iter()
|
||||
.cloned()
|
||||
.enumerate()
|
||||
.map(|(i, mut operation)| {
|
||||
let did_apply = Arc::new(AtomicBool::new(false));
|
||||
if let StoredOperation::Server(ServerOperation::MutateClients {
|
||||
batch_id: current_batch_id,
|
||||
user_ids,
|
||||
..
|
||||
}) = &mut operation
|
||||
{
|
||||
assert!(user_ids.is_empty());
|
||||
user_ids.extend(stored_operations[i + 1..].iter().filter_map(|operation| {
|
||||
if let StoredOperation::Client {
|
||||
user_id, batch_id, ..
|
||||
} = operation
|
||||
{
|
||||
if batch_id == current_batch_id {
|
||||
return Some(user_id);
|
||||
}
|
||||
}
|
||||
None
|
||||
}));
|
||||
user_ids.sort_unstable();
|
||||
}
|
||||
(operation, did_apply)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn serialize(&mut self) -> Vec<u8> {
|
||||
// Format each operation as one line
|
||||
let mut json = Vec::new();
|
||||
json.push(b'[');
|
||||
for (operation, applied) in &self.stored_operations {
|
||||
if !applied.load(SeqCst) {
|
||||
continue;
|
||||
}
|
||||
if json.len() > 1 {
|
||||
json.push(b',');
|
||||
}
|
||||
json.extend_from_slice(b"\n ");
|
||||
serde_json::to_writer(&mut json, operation).unwrap();
|
||||
}
|
||||
json.extend_from_slice(b"\n]\n");
|
||||
json
|
||||
}
|
||||
|
||||
fn next_server_operation(
|
||||
&mut self,
|
||||
clients: &[(Rc<TestClient>, TestAppContext)],
|
||||
) -> Option<(ServerOperation, Arc<AtomicBool>)> {
|
||||
if self.replay {
|
||||
while let Some(stored_operation) = self.stored_operations.get(self.operation_ix) {
|
||||
self.operation_ix += 1;
|
||||
if let (StoredOperation::Server(operation), applied) = stored_operation {
|
||||
return Some((operation.clone(), applied.clone()));
|
||||
}
|
||||
}
|
||||
None
|
||||
} else {
|
||||
let operation = self.generate_server_operation(clients)?;
|
||||
let applied = Arc::new(AtomicBool::new(false));
|
||||
self.stored_operations
|
||||
.push((StoredOperation::Server(operation.clone()), applied.clone()));
|
||||
Some((operation, applied))
|
||||
}
|
||||
}
|
||||
|
||||
fn next_client_operation(
|
||||
&mut self,
|
||||
client: &TestClient,
|
||||
current_batch_id: usize,
|
||||
cx: &TestAppContext,
|
||||
) -> Option<(T::Operation, Arc<AtomicBool>)> {
|
||||
let current_user_id = client.current_user_id(cx);
|
||||
let user_ix = self
|
||||
.users
|
||||
.iter()
|
||||
.position(|user| user.user_id == current_user_id)
|
||||
.unwrap();
|
||||
let user_plan = &mut self.users[user_ix];
|
||||
|
||||
if self.replay {
|
||||
while let Some(stored_operation) = self.stored_operations.get(user_plan.operation_ix) {
|
||||
user_plan.operation_ix += 1;
|
||||
if let (
|
||||
StoredOperation::Client {
|
||||
user_id, operation, ..
|
||||
},
|
||||
applied,
|
||||
) = stored_operation
|
||||
{
|
||||
if user_id == ¤t_user_id {
|
||||
return Some((operation.clone(), applied.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
} else {
|
||||
if self.operation_ix == self.max_operations {
|
||||
return None;
|
||||
}
|
||||
self.operation_ix += 1;
|
||||
let operation = T::generate_operation(
|
||||
client,
|
||||
&mut self.rng,
|
||||
self.users
|
||||
.iter_mut()
|
||||
.find(|user| user.user_id == current_user_id)
|
||||
.unwrap(),
|
||||
cx,
|
||||
);
|
||||
let applied = Arc::new(AtomicBool::new(false));
|
||||
self.stored_operations.push((
|
||||
StoredOperation::Client {
|
||||
user_id: current_user_id,
|
||||
batch_id: current_batch_id,
|
||||
operation: operation.clone(),
|
||||
},
|
||||
applied.clone(),
|
||||
));
|
||||
Some((operation, applied))
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_server_operation(
|
||||
&mut self,
|
||||
clients: &[(Rc<TestClient>, TestAppContext)],
|
||||
) -> Option<ServerOperation> {
|
||||
if self.operation_ix == self.max_operations {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(loop {
|
||||
break match self.rng.gen_range(0..100) {
|
||||
0..=29 if clients.len() < self.users.len() => {
|
||||
let user = self
|
||||
.users
|
||||
.iter()
|
||||
.filter(|u| !u.online)
|
||||
.choose(&mut self.rng)
|
||||
.unwrap();
|
||||
self.operation_ix += 1;
|
||||
ServerOperation::AddConnection {
|
||||
user_id: user.user_id,
|
||||
}
|
||||
}
|
||||
30..=34 if clients.len() > 1 && self.allow_client_disconnection => {
|
||||
let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
|
||||
let user_id = client.current_user_id(cx);
|
||||
self.operation_ix += 1;
|
||||
ServerOperation::RemoveConnection { user_id }
|
||||
}
|
||||
35..=39 if clients.len() > 1 && self.allow_client_reconnection => {
|
||||
let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
|
||||
let user_id = client.current_user_id(cx);
|
||||
self.operation_ix += 1;
|
||||
ServerOperation::BounceConnection { user_id }
|
||||
}
|
||||
40..=44 if self.allow_server_restarts && clients.len() > 1 => {
|
||||
self.operation_ix += 1;
|
||||
ServerOperation::RestartServer
|
||||
}
|
||||
_ if !clients.is_empty() => {
|
||||
let count = self
|
||||
.rng
|
||||
.gen_range(1..10)
|
||||
.min(self.max_operations - self.operation_ix);
|
||||
let batch_id = util::post_inc(&mut self.next_batch_id);
|
||||
let mut user_ids = (0..count)
|
||||
.map(|_| {
|
||||
let ix = self.rng.gen_range(0..clients.len());
|
||||
let (client, cx) = &clients[ix];
|
||||
client.current_user_id(cx)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
user_ids.sort_unstable();
|
||||
ServerOperation::MutateClients {
|
||||
user_ids,
|
||||
batch_id,
|
||||
quiesce: self.rng.gen_bool(0.7),
|
||||
}
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
})
|
||||
}
|
||||
|
||||
async fn apply_server_operation(
|
||||
plan: Arc<Mutex<Self>>,
|
||||
deterministic: Arc<Deterministic>,
|
||||
server: &mut TestServer,
|
||||
clients: &mut Vec<(Rc<TestClient>, TestAppContext)>,
|
||||
client_tasks: &mut Vec<Task<()>>,
|
||||
operation_channels: &mut Vec<futures::channel::mpsc::UnboundedSender<usize>>,
|
||||
operation: ServerOperation,
|
||||
cx: &mut TestAppContext,
|
||||
) -> bool {
|
||||
match operation {
|
||||
ServerOperation::AddConnection { user_id } => {
|
||||
let username;
|
||||
{
|
||||
let mut plan = plan.lock();
|
||||
let user = plan.user(user_id);
|
||||
if user.online {
|
||||
return false;
|
||||
}
|
||||
user.online = true;
|
||||
username = user.username.clone();
|
||||
};
|
||||
log::info!("adding new connection for {}", username);
|
||||
let next_entity_id = (user_id.0 * 10_000) as usize;
|
||||
let mut client_cx = TestAppContext::new(
|
||||
cx.foreground_platform(),
|
||||
cx.platform(),
|
||||
deterministic.build_foreground(user_id.0 as usize),
|
||||
deterministic.build_background(),
|
||||
cx.font_cache(),
|
||||
cx.leak_detector(),
|
||||
next_entity_id,
|
||||
cx.function_name.clone(),
|
||||
);
|
||||
|
||||
let (operation_tx, operation_rx) = futures::channel::mpsc::unbounded();
|
||||
let client = Rc::new(server.create_client(&mut client_cx, &username).await);
|
||||
operation_channels.push(operation_tx);
|
||||
clients.push((client.clone(), client_cx.clone()));
|
||||
client_tasks.push(client_cx.foreground().spawn(Self::simulate_client(
|
||||
plan.clone(),
|
||||
client,
|
||||
operation_rx,
|
||||
client_cx,
|
||||
)));
|
||||
|
||||
log::info!("added connection for {}", username);
|
||||
}
|
||||
|
||||
ServerOperation::RemoveConnection {
|
||||
user_id: removed_user_id,
|
||||
} => {
|
||||
log::info!("simulating full disconnection of user {}", removed_user_id);
|
||||
let client_ix = clients
|
||||
.iter()
|
||||
.position(|(client, cx)| client.current_user_id(cx) == removed_user_id);
|
||||
let Some(client_ix) = client_ix else {
|
||||
return false;
|
||||
};
|
||||
let user_connection_ids = server
|
||||
.connection_pool
|
||||
.lock()
|
||||
.user_connection_ids(removed_user_id)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(user_connection_ids.len(), 1);
|
||||
let removed_peer_id = user_connection_ids[0].into();
|
||||
let (client, mut client_cx) = clients.remove(client_ix);
|
||||
let client_task = client_tasks.remove(client_ix);
|
||||
operation_channels.remove(client_ix);
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(removed_peer_id);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
deterministic.start_waiting();
|
||||
log::info!("waiting for user {} to exit...", removed_user_id);
|
||||
client_task.await;
|
||||
deterministic.finish_waiting();
|
||||
server.allow_connections();
|
||||
|
||||
for project in client.remote_projects().iter() {
|
||||
project.read_with(&client_cx, |project, _| {
|
||||
assert!(
|
||||
project.is_read_only(),
|
||||
"project {:?} should be read only",
|
||||
project.remote_id()
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
for (client, cx) in clients {
|
||||
let contacts = server
|
||||
.app_state
|
||||
.db
|
||||
.get_contacts(client.current_user_id(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let pool = server.connection_pool.lock();
|
||||
for contact in contacts {
|
||||
if let db::Contact::Accepted { user_id, busy, .. } = contact {
|
||||
if user_id == removed_user_id {
|
||||
assert!(!pool.is_user_online(user_id));
|
||||
assert!(!busy);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("{} removed", client.username);
|
||||
plan.lock().user(removed_user_id).online = false;
|
||||
client_cx.update(|cx| {
|
||||
cx.clear_globals();
|
||||
drop(client);
|
||||
});
|
||||
}
|
||||
|
||||
ServerOperation::BounceConnection { user_id } => {
|
||||
log::info!("simulating temporary disconnection of user {}", user_id);
|
||||
let user_connection_ids = server
|
||||
.connection_pool
|
||||
.lock()
|
||||
.user_connection_ids(user_id)
|
||||
.collect::<Vec<_>>();
|
||||
if user_connection_ids.is_empty() {
|
||||
return false;
|
||||
}
|
||||
assert_eq!(user_connection_ids.len(), 1);
|
||||
let peer_id = user_connection_ids[0].into();
|
||||
server.disconnect_client(peer_id);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
}
|
||||
|
||||
ServerOperation::RestartServer => {
|
||||
log::info!("simulating server restart");
|
||||
server.reset().await;
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
server.start().await.unwrap();
|
||||
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||
let environment = &server.app_state.config.zed_environment;
|
||||
let (stale_room_ids, _) = server
|
||||
.app_state
|
||||
.db
|
||||
.stale_server_resource_ids(environment, server.id())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(stale_room_ids, vec![]);
|
||||
}
|
||||
|
||||
ServerOperation::MutateClients {
|
||||
user_ids,
|
||||
batch_id,
|
||||
quiesce,
|
||||
} => {
|
||||
let mut applied = false;
|
||||
for user_id in user_ids {
|
||||
let client_ix = clients
|
||||
.iter()
|
||||
.position(|(client, cx)| client.current_user_id(cx) == user_id);
|
||||
let Some(client_ix) = client_ix else { continue };
|
||||
applied = true;
|
||||
if let Err(err) = operation_channels[client_ix].unbounded_send(batch_id) {
|
||||
log::error!("error signaling user {user_id}: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
if quiesce && applied {
|
||||
deterministic.run_until_parked();
|
||||
T::on_quiesce(server, clients).await;
|
||||
}
|
||||
|
||||
return applied;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
async fn simulate_client(
|
||||
plan: Arc<Mutex<Self>>,
|
||||
client: Rc<TestClient>,
|
||||
mut operation_rx: futures::channel::mpsc::UnboundedReceiver<usize>,
|
||||
mut cx: TestAppContext,
|
||||
) {
|
||||
T::on_client_added(&client, &mut cx).await;
|
||||
|
||||
while let Some(batch_id) = operation_rx.next().await {
|
||||
let Some((operation, applied)) =
|
||||
plan.lock().next_client_operation(&client, batch_id, &cx)
|
||||
else {
|
||||
break;
|
||||
};
|
||||
applied.store(true, SeqCst);
|
||||
match T::apply_operation(&client, operation, &mut cx).await {
|
||||
Ok(()) => {}
|
||||
Err(TestError::Inapplicable) => {
|
||||
applied.store(false, SeqCst);
|
||||
log::info!("skipped operation");
|
||||
}
|
||||
Err(TestError::Other(error)) => {
|
||||
log::error!("{} error: {}", client.username, error);
|
||||
}
|
||||
}
|
||||
cx.background().simulate_random_delay().await;
|
||||
}
|
||||
log::info!("{}: done", client.username);
|
||||
}
|
||||
|
||||
fn user(&mut self, user_id: UserId) -> &mut UserTestPlan {
|
||||
self.users
|
||||
.iter_mut()
|
||||
.find(|user| user.user_id == user_id)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl UserTestPlan {
|
||||
pub fn next_root_dir_name(&mut self) -> String {
|
||||
let user_id = self.user_id;
|
||||
let root_id = util::post_inc(&mut self.next_root_id);
|
||||
format!("dir-{user_id}-{root_id}")
|
||||
}
|
||||
}
|
||||
|
||||
impl From<anyhow::Error> for TestError {
|
||||
fn from(value: anyhow::Error) -> Self {
|
||||
Self::Other(value)
|
||||
}
|
||||
}
|
||||
|
||||
fn path_env_var(name: &str) -> Option<PathBuf> {
|
||||
let value = env::var(name).ok()?;
|
||||
let mut path = PathBuf::from(value);
|
||||
if path.is_relative() {
|
||||
let mut abs_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
abs_path.pop();
|
||||
abs_path.pop();
|
||||
abs_path.push(path);
|
||||
path = abs_path
|
||||
}
|
||||
Some(path)
|
||||
}
|
558
crates/collab/src/tests/test_server.rs
Normal file
558
crates/collab/src/tests/test_server.rs
Normal file
@ -0,0 +1,558 @@
|
||||
use crate::{
|
||||
db::{tests::TestDb, NewUserParams, UserId},
|
||||
executor::Executor,
|
||||
rpc::{Server, CLEANUP_TIMEOUT},
|
||||
AppState,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use call::ActiveCall;
|
||||
use channel::{channel_buffer::ChannelBuffer, ChannelStore};
|
||||
use client::{
|
||||
self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
|
||||
};
|
||||
use collections::{HashMap, HashSet};
|
||||
use fs::FakeFs;
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext, WindowHandle};
|
||||
use language::LanguageRegistry;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, WorktreeId};
|
||||
use settings::SettingsStore;
|
||||
use std::{
|
||||
cell::{Ref, RefCell, RefMut},
|
||||
env,
|
||||
ops::{Deref, DerefMut},
|
||||
path::Path,
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use util::http::FakeHttpClient;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct TestServer {
|
||||
pub app_state: Arc<AppState>,
|
||||
pub test_live_kit_server: Arc<live_kit_client::TestServer>,
|
||||
server: Arc<Server>,
|
||||
connection_killers: Arc<Mutex<HashMap<PeerId, Arc<AtomicBool>>>>,
|
||||
forbid_connections: Arc<AtomicBool>,
|
||||
_test_db: TestDb,
|
||||
}
|
||||
|
||||
pub struct TestClient {
|
||||
pub username: String,
|
||||
pub app_state: Arc<workspace::AppState>,
|
||||
state: RefCell<TestClientState>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct TestClientState {
|
||||
local_projects: Vec<ModelHandle<Project>>,
|
||||
remote_projects: Vec<ModelHandle<Project>>,
|
||||
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
|
||||
channel_buffers: HashSet<ModelHandle<ChannelBuffer>>,
|
||||
}
|
||||
|
||||
pub struct ContactsSummary {
|
||||
pub current: Vec<String>,
|
||||
pub outgoing_requests: Vec<String>,
|
||||
pub incoming_requests: Vec<String>,
|
||||
}
|
||||
|
||||
impl TestServer {
|
||||
pub async fn start(deterministic: &Arc<Deterministic>) -> Self {
|
||||
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
let use_postgres = env::var("USE_POSTGRES").ok();
|
||||
let use_postgres = use_postgres.as_deref();
|
||||
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
|
||||
TestDb::postgres(deterministic.build_background())
|
||||
} else {
|
||||
TestDb::sqlite(deterministic.build_background())
|
||||
};
|
||||
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
|
||||
let live_kit_server = live_kit_client::TestServer::create(
|
||||
format!("http://livekit.{}.test", live_kit_server_id),
|
||||
format!("devkey-{}", live_kit_server_id),
|
||||
format!("secret-{}", live_kit_server_id),
|
||||
deterministic.build_background(),
|
||||
)
|
||||
.unwrap();
|
||||
let app_state = Self::build_app_state(&test_db, &live_kit_server).await;
|
||||
let epoch = app_state
|
||||
.db
|
||||
.create_server(&app_state.config.zed_environment)
|
||||
.await
|
||||
.unwrap();
|
||||
let server = Server::new(
|
||||
epoch,
|
||||
app_state.clone(),
|
||||
Executor::Deterministic(deterministic.build_background()),
|
||||
);
|
||||
server.start().await.unwrap();
|
||||
// Advance clock to ensure the server's cleanup task is finished.
|
||||
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||
Self {
|
||||
app_state,
|
||||
server,
|
||||
connection_killers: Default::default(),
|
||||
forbid_connections: Default::default(),
|
||||
_test_db: test_db,
|
||||
test_live_kit_server: live_kit_server,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn reset(&self) {
|
||||
self.app_state.db.reset();
|
||||
let epoch = self
|
||||
.app_state
|
||||
.db
|
||||
.create_server(&self.app_state.config.zed_environment)
|
||||
.await
|
||||
.unwrap();
|
||||
self.server.reset(epoch);
|
||||
}
|
||||
|
||||
pub async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
||||
cx.update(|cx| {
|
||||
if cx.has_global::<SettingsStore>() {
|
||||
panic!("Same cx used to create two test clients")
|
||||
}
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
});
|
||||
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
|
||||
{
|
||||
user.id
|
||||
} else {
|
||||
self.app_state
|
||||
.db
|
||||
.create_user(
|
||||
&format!("{name}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: name.into(),
|
||||
github_user_id: 0,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.expect("creating user failed")
|
||||
.user_id
|
||||
};
|
||||
let client_name = name.to_string();
|
||||
let mut client = cx.read(|cx| Client::new(http.clone(), cx));
|
||||
let server = self.server.clone();
|
||||
let db = self.app_state.db.clone();
|
||||
let connection_killers = self.connection_killers.clone();
|
||||
let forbid_connections = self.forbid_connections.clone();
|
||||
|
||||
Arc::get_mut(&mut client)
|
||||
.unwrap()
|
||||
.set_id(user_id.0 as usize)
|
||||
.override_authenticate(move |cx| {
|
||||
cx.spawn(|_| async move {
|
||||
let access_token = "the-token".to_string();
|
||||
Ok(Credentials {
|
||||
user_id: user_id.0 as u64,
|
||||
access_token,
|
||||
})
|
||||
})
|
||||
})
|
||||
.override_establish_connection(move |credentials, cx| {
|
||||
assert_eq!(credentials.user_id, user_id.0 as u64);
|
||||
assert_eq!(credentials.access_token, "the-token");
|
||||
|
||||
let server = server.clone();
|
||||
let db = db.clone();
|
||||
let connection_killers = connection_killers.clone();
|
||||
let forbid_connections = forbid_connections.clone();
|
||||
let client_name = client_name.clone();
|
||||
cx.spawn(move |cx| async move {
|
||||
if forbid_connections.load(SeqCst) {
|
||||
Err(EstablishConnectionError::other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
)))
|
||||
} else {
|
||||
let (client_conn, server_conn, killed) =
|
||||
Connection::in_memory(cx.background());
|
||||
let (connection_id_tx, connection_id_rx) = oneshot::channel();
|
||||
let user = db
|
||||
.get_user_by_id(user_id)
|
||||
.await
|
||||
.expect("retrieving user failed")
|
||||
.unwrap();
|
||||
cx.background()
|
||||
.spawn(server.handle_connection(
|
||||
server_conn,
|
||||
client_name,
|
||||
user,
|
||||
Some(connection_id_tx),
|
||||
Executor::Deterministic(cx.background()),
|
||||
))
|
||||
.detach();
|
||||
let connection_id = connection_id_rx.await.unwrap();
|
||||
connection_killers
|
||||
.lock()
|
||||
.insert(connection_id.into(), killed);
|
||||
Ok(client_conn)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
||||
let channel_store =
|
||||
cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
channel_store: channel_store.clone(),
|
||||
languages: Arc::new(LanguageRegistry::test()),
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _, _| Default::default(),
|
||||
initialize_workspace: |_, _, _, _| Task::ready(Ok(())),
|
||||
background_actions: || &[],
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
theme::init((), cx);
|
||||
Project::init(&client, cx);
|
||||
client::init(&client, cx);
|
||||
language::init(cx);
|
||||
editor::init_settings(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
audio::init((), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
channel::init(&client);
|
||||
});
|
||||
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let client = TestClient {
|
||||
app_state,
|
||||
username: name.to_string(),
|
||||
state: Default::default(),
|
||||
};
|
||||
client.wait_for_current_user(cx).await;
|
||||
client
|
||||
}
|
||||
|
||||
pub fn disconnect_client(&self, peer_id: PeerId) {
|
||||
self.connection_killers
|
||||
.lock()
|
||||
.remove(&peer_id)
|
||||
.unwrap()
|
||||
.store(true, SeqCst);
|
||||
}
|
||||
|
||||
pub fn forbid_connections(&self) {
|
||||
self.forbid_connections.store(true, SeqCst);
|
||||
}
|
||||
|
||||
pub fn allow_connections(&self) {
|
||||
self.forbid_connections.store(false, SeqCst);
|
||||
}
|
||||
|
||||
pub async fn make_contacts(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||
for ix in 1..clients.len() {
|
||||
let (left, right) = clients.split_at_mut(ix);
|
||||
let (client_a, cx_a) = left.last_mut().unwrap();
|
||||
for (client_b, cx_b) in right {
|
||||
client_a
|
||||
.app_state
|
||||
.user_store
|
||||
.update(*cx_a, |store, cx| {
|
||||
store.request_contact(client_b.user_id().unwrap(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx_a.foreground().run_until_parked();
|
||||
client_b
|
||||
.app_state
|
||||
.user_store
|
||||
.update(*cx_b, |store, cx| {
|
||||
store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn make_channel(
|
||||
&self,
|
||||
channel: &str,
|
||||
admin: (&TestClient, &mut TestAppContext),
|
||||
members: &mut [(&TestClient, &mut TestAppContext)],
|
||||
) -> u64 {
|
||||
let (admin_client, admin_cx) = admin;
|
||||
let channel_id = admin_client
|
||||
.app_state
|
||||
.channel_store
|
||||
.update(admin_cx, |channel_store, cx| {
|
||||
channel_store.create_channel(channel, None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
for (member_client, member_cx) in members {
|
||||
admin_client
|
||||
.app_state
|
||||
.channel_store
|
||||
.update(admin_cx, |channel_store, cx| {
|
||||
channel_store.invite_member(
|
||||
channel_id,
|
||||
member_client.user_id().unwrap(),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
admin_cx.foreground().run_until_parked();
|
||||
|
||||
member_client
|
||||
.app_state
|
||||
.channel_store
|
||||
.update(*member_cx, |channels, _| {
|
||||
channels.respond_to_channel_invite(channel_id, true)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
channel_id
|
||||
}
|
||||
|
||||
pub async fn create_room(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||
self.make_contacts(clients).await;
|
||||
|
||||
let (left, right) = clients.split_at_mut(1);
|
||||
let (_client_a, cx_a) = &mut left[0];
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
|
||||
for (client_b, cx_b) in right {
|
||||
let user_id_b = client_b.current_user_id(*cx_b).to_proto();
|
||||
active_call_a
|
||||
.update(*cx_a, |call, cx| call.invite(user_id_b, None, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx_b.foreground().run_until_parked();
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
active_call_b
|
||||
.update(*cx_b, |call, cx| call.accept_incoming(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn build_app_state(
|
||||
test_db: &TestDb,
|
||||
fake_server: &live_kit_client::TestServer,
|
||||
) -> Arc<AppState> {
|
||||
Arc::new(AppState {
|
||||
db: test_db.db().clone(),
|
||||
live_kit_client: Some(Arc::new(fake_server.create_api_client())),
|
||||
config: Default::default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TestServer {
|
||||
type Target = Server;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.server
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.server.teardown();
|
||||
self.test_live_kit_server.teardown().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for TestClient {
|
||||
type Target = Arc<Client>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.app_state.client
|
||||
}
|
||||
}
|
||||
|
||||
impl TestClient {
|
||||
pub fn fs(&self) -> &FakeFs {
|
||||
self.app_state.fs.as_fake()
|
||||
}
|
||||
|
||||
pub fn channel_store(&self) -> &ModelHandle<ChannelStore> {
|
||||
&self.app_state.channel_store
|
||||
}
|
||||
|
||||
pub fn user_store(&self) -> &ModelHandle<UserStore> {
|
||||
&self.app_state.user_store
|
||||
}
|
||||
|
||||
pub fn language_registry(&self) -> &Arc<LanguageRegistry> {
|
||||
&self.app_state.languages
|
||||
}
|
||||
|
||||
pub fn client(&self) -> &Arc<Client> {
|
||||
&self.app_state.client
|
||||
}
|
||||
|
||||
pub fn current_user_id(&self, cx: &TestAppContext) -> UserId {
|
||||
UserId::from_proto(
|
||||
self.app_state
|
||||
.user_store
|
||||
.read_with(cx, |user_store, _| user_store.current_user().unwrap().id),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn wait_for_current_user(&self, cx: &TestAppContext) {
|
||||
let mut authed_user = self
|
||||
.app_state
|
||||
.user_store
|
||||
.read_with(cx, |user_store, _| user_store.watch_current_user());
|
||||
while authed_user.next().await.unwrap().is_none() {}
|
||||
}
|
||||
|
||||
pub async fn clear_contacts(&self, cx: &mut TestAppContext) {
|
||||
self.app_state
|
||||
.user_store
|
||||
.update(cx, |store, _| store.clear_contacts())
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
Ref::map(self.state.borrow(), |state| &state.local_projects)
|
||||
}
|
||||
|
||||
pub fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
Ref::map(self.state.borrow(), |state| &state.remote_projects)
|
||||
}
|
||||
|
||||
pub fn local_projects_mut<'a>(
|
||||
&'a self,
|
||||
) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
|
||||
}
|
||||
|
||||
pub fn remote_projects_mut<'a>(
|
||||
&'a self,
|
||||
) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
|
||||
}
|
||||
|
||||
pub fn buffers_for_project<'a>(
|
||||
&'a self,
|
||||
project: &ModelHandle<Project>,
|
||||
) -> impl DerefMut<Target = HashSet<ModelHandle<language::Buffer>>> + 'a {
|
||||
RefMut::map(self.state.borrow_mut(), |state| {
|
||||
state.buffers.entry(project.clone()).or_default()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn buffers<'a>(
|
||||
&'a self,
|
||||
) -> impl DerefMut<Target = HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>> + 'a
|
||||
{
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers)
|
||||
}
|
||||
|
||||
pub fn channel_buffers<'a>(
|
||||
&'a self,
|
||||
) -> impl DerefMut<Target = HashSet<ModelHandle<ChannelBuffer>>> + 'a {
|
||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.channel_buffers)
|
||||
}
|
||||
|
||||
pub fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary {
|
||||
self.app_state
|
||||
.user_store
|
||||
.read_with(cx, |store, _| ContactsSummary {
|
||||
current: store
|
||||
.contacts()
|
||||
.iter()
|
||||
.map(|contact| contact.user.github_login.clone())
|
||||
.collect(),
|
||||
outgoing_requests: store
|
||||
.outgoing_contact_requests()
|
||||
.iter()
|
||||
.map(|user| user.github_login.clone())
|
||||
.collect(),
|
||||
incoming_requests: store
|
||||
.incoming_contact_requests()
|
||||
.iter()
|
||||
.map(|user| user.github_login.clone())
|
||||
.collect(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn build_local_project(
|
||||
&self,
|
||||
root_path: impl AsRef<Path>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> (ModelHandle<Project>, WorktreeId) {
|
||||
let project = cx.update(|cx| {
|
||||
Project::local(
|
||||
self.client().clone(),
|
||||
self.app_state.user_store.clone(),
|
||||
self.app_state.languages.clone(),
|
||||
self.app_state.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let (worktree, _) = project
|
||||
.update(cx, |p, cx| {
|
||||
p.find_or_create_local_worktree(root_path, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
worktree
|
||||
.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
|
||||
.await;
|
||||
(project, worktree.read_with(cx, |tree, _| tree.id()))
|
||||
}
|
||||
|
||||
pub async fn build_remote_project(
|
||||
&self,
|
||||
host_project_id: u64,
|
||||
guest_cx: &mut TestAppContext,
|
||||
) -> ModelHandle<Project> {
|
||||
let active_call = guest_cx.read(ActiveCall::global);
|
||||
let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
|
||||
room.update(guest_cx, |room, cx| {
|
||||
room.join_project(
|
||||
host_project_id,
|
||||
self.app_state.languages.clone(),
|
||||
self.app_state.fs.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn build_workspace(
|
||||
&self,
|
||||
project: &ModelHandle<Project>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> WindowHandle<Workspace> {
|
||||
cx.add_window(|cx| Workspace::new(0, project.clone(), self.app_state.clone(), cx))
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestClient {
|
||||
fn drop(&mut self) {
|
||||
self.app_state.client.teardown();
|
||||
}
|
||||
}
|
@ -2240,7 +2240,8 @@ impl CollabPanel {
|
||||
fn open_channel_buffer(&mut self, action: &OpenChannelBuffer, cx: &mut ViewContext<Self>) {
|
||||
if let Some(workspace) = self.workspace.upgrade(cx) {
|
||||
let pane = workspace.read(cx).active_pane().clone();
|
||||
let channel_view = ChannelView::open(action.channel_id, pane.clone(), workspace, cx);
|
||||
let channel_id = action.channel_id;
|
||||
let channel_view = ChannelView::open(channel_id, pane.clone(), workspace, cx);
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let channel_view = channel_view.await?;
|
||||
pane.update(&mut cx, |pane, cx| {
|
||||
@ -2249,6 +2250,18 @@ impl CollabPanel {
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
let room_id = ActiveCall::global(cx)
|
||||
.read(cx)
|
||||
.room()
|
||||
.map(|room| room.read(cx).id());
|
||||
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"open channel notes",
|
||||
room_id,
|
||||
Some(channel_id),
|
||||
&self.client,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -49,7 +49,7 @@ pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
if room.is_screen_sharing() {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"disable screen share",
|
||||
room.id(),
|
||||
Some(room.id()),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
@ -58,7 +58,7 @@ pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
|
||||
} else {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"enable screen share",
|
||||
room.id(),
|
||||
Some(room.id()),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
@ -78,7 +78,7 @@ pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
|
||||
if room.is_muted(cx) {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"enable microphone",
|
||||
room.id(),
|
||||
Some(room.id()),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
@ -86,7 +86,7 @@ pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
|
||||
} else {
|
||||
ActiveCall::report_call_event_for_room(
|
||||
"disable microphone",
|
||||
room.id(),
|
||||
Some(room.id()),
|
||||
room.channel_id(),
|
||||
&client,
|
||||
cx,
|
||||
|
@ -41,7 +41,7 @@ actions!(
|
||||
[Suggest, NextSuggestion, PreviousSuggestion, Reinstall]
|
||||
);
|
||||
|
||||
pub fn init(http: Arc<dyn HttpClient>, node_runtime: Arc<NodeRuntime>, cx: &mut AppContext) {
|
||||
pub fn init(http: Arc<dyn HttpClient>, node_runtime: Arc<dyn NodeRuntime>, cx: &mut AppContext) {
|
||||
let copilot = cx.add_model({
|
||||
let node_runtime = node_runtime.clone();
|
||||
move |cx| Copilot::start(http, node_runtime, cx)
|
||||
@ -265,7 +265,7 @@ pub struct Completion {
|
||||
|
||||
pub struct Copilot {
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<NodeRuntime>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
server: CopilotServer,
|
||||
buffers: HashSet<WeakModelHandle<Buffer>>,
|
||||
}
|
||||
@ -299,7 +299,7 @@ impl Copilot {
|
||||
|
||||
fn start(
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<NodeRuntime>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let mut this = Self {
|
||||
@ -335,12 +335,15 @@ impl Copilot {
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn fake(cx: &mut gpui::TestAppContext) -> (ModelHandle<Self>, lsp::FakeLanguageServer) {
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
|
||||
let (server, fake_server) =
|
||||
LanguageServer::fake("copilot".into(), Default::default(), cx.to_async());
|
||||
let http = util::http::FakeHttpClient::create(|_| async { unreachable!() });
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let this = cx.add_model(|_| Self {
|
||||
http: http.clone(),
|
||||
node_runtime: NodeRuntime::instance(http),
|
||||
node_runtime,
|
||||
server: CopilotServer::Running(RunningCopilotServer {
|
||||
lsp: Arc::new(server),
|
||||
sign_in_status: SignInStatus::Authorized,
|
||||
@ -353,7 +356,7 @@ impl Copilot {
|
||||
|
||||
fn start_language_server(
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<NodeRuntime>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
this: ModelHandle<Self>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> impl Future<Output = ()> {
|
||||
|
@ -312,6 +312,10 @@ actions!(
|
||||
CopyPath,
|
||||
CopyRelativePath,
|
||||
CopyHighlightJson,
|
||||
ContextMenuFirst,
|
||||
ContextMenuPrev,
|
||||
ContextMenuNext,
|
||||
ContextMenuLast,
|
||||
]
|
||||
);
|
||||
|
||||
@ -468,6 +472,10 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.add_action(Editor::next_copilot_suggestion);
|
||||
cx.add_action(Editor::previous_copilot_suggestion);
|
||||
cx.add_action(Editor::copilot_suggest);
|
||||
cx.add_action(Editor::context_menu_first);
|
||||
cx.add_action(Editor::context_menu_prev);
|
||||
cx.add_action(Editor::context_menu_next);
|
||||
cx.add_action(Editor::context_menu_last);
|
||||
|
||||
hover_popover::init(cx);
|
||||
scroll::actions::init(cx);
|
||||
@ -1654,7 +1662,7 @@ impl Editor {
|
||||
.excerpt_containing(self.selections.newest_anchor().head(), cx)
|
||||
}
|
||||
|
||||
fn style(&self, cx: &AppContext) -> EditorStyle {
|
||||
pub fn style(&self, cx: &AppContext) -> EditorStyle {
|
||||
build_style(
|
||||
settings::get::<ThemeSettings>(cx),
|
||||
self.get_field_editor_theme.as_deref(),
|
||||
@ -5166,12 +5174,6 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
if context_menu.select_prev(cx) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
cx.propagate_action();
|
||||
return;
|
||||
@ -5194,15 +5196,6 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if self
|
||||
.context_menu
|
||||
.as_mut()
|
||||
.map(|menu| menu.select_first(cx))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
cx.propagate_action();
|
||||
return;
|
||||
@ -5242,12 +5235,6 @@ impl Editor {
|
||||
pub fn move_down(&mut self, _: &MoveDown, cx: &mut ViewContext<Self>) {
|
||||
self.take_rename(true, cx);
|
||||
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
if context_menu.select_next(cx) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if self.mode == EditorMode::SingleLine {
|
||||
cx.propagate_action();
|
||||
return;
|
||||
@ -5315,6 +5302,30 @@ impl Editor {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn context_menu_first(&mut self, _: &ContextMenuFirst, cx: &mut ViewContext<Self>) {
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
context_menu.select_first(cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn context_menu_prev(&mut self, _: &ContextMenuPrev, cx: &mut ViewContext<Self>) {
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
context_menu.select_prev(cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn context_menu_next(&mut self, _: &ContextMenuNext, cx: &mut ViewContext<Self>) {
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
context_menu.select_next(cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn context_menu_last(&mut self, _: &ContextMenuLast, cx: &mut ViewContext<Self>) {
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
context_menu.select_last(cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn move_to_previous_word_start(
|
||||
&mut self,
|
||||
_: &MoveToPreviousWordStart,
|
||||
@ -8666,17 +8677,20 @@ impl View for Editor {
|
||||
if self.pending_rename.is_some() {
|
||||
keymap.add_identifier("renaming");
|
||||
}
|
||||
match self.context_menu.as_ref() {
|
||||
Some(ContextMenu::Completions(_)) => {
|
||||
keymap.add_identifier("menu");
|
||||
keymap.add_identifier("showing_completions")
|
||||
if self.context_menu_visible() {
|
||||
match self.context_menu.as_ref() {
|
||||
Some(ContextMenu::Completions(_)) => {
|
||||
keymap.add_identifier("menu");
|
||||
keymap.add_identifier("showing_completions")
|
||||
}
|
||||
Some(ContextMenu::CodeActions(_)) => {
|
||||
keymap.add_identifier("menu");
|
||||
keymap.add_identifier("showing_code_actions")
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
Some(ContextMenu::CodeActions(_)) => {
|
||||
keymap.add_identifier("menu");
|
||||
keymap.add_identifier("showing_code_actions")
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
for layer in self.keymap_context_layers.values() {
|
||||
keymap.extend(layer);
|
||||
}
|
||||
|
@ -5340,7 +5340,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||
cx.condition(|editor, _| editor.context_menu_visible())
|
||||
.await;
|
||||
let apply_additional_edits = cx.update_editor(|editor, cx| {
|
||||
editor.move_down(&MoveDown, cx);
|
||||
editor.context_menu_next(&Default::default(), cx);
|
||||
editor
|
||||
.confirm_completion(&ConfirmCompletion::default(), cx)
|
||||
.unwrap()
|
||||
|
@ -1,8 +1,14 @@
|
||||
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::{char_kind, CharKind, ToPoint};
|
||||
use crate::{char_kind, CharKind, ToOffset, ToPoint};
|
||||
use language::Point;
|
||||
use std::ops::Range;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum FindRange {
|
||||
SingleLine,
|
||||
MultiLine,
|
||||
}
|
||||
|
||||
pub fn left(map: &DisplaySnapshot, mut point: DisplayPoint) -> DisplayPoint {
|
||||
if point.column() > 0 {
|
||||
*point.column_mut() -= 1;
|
||||
@ -179,7 +185,7 @@ pub fn previous_word_start(map: &DisplaySnapshot, point: DisplayPoint) -> Displa
|
||||
let raw_point = point.to_point(map);
|
||||
let scope = map.buffer_snapshot.language_scope_at(raw_point);
|
||||
|
||||
find_preceding_boundary(map, point, |left, right| {
|
||||
find_preceding_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
(char_kind(&scope, left) != char_kind(&scope, right) && !right.is_whitespace())
|
||||
|| left == '\n'
|
||||
})
|
||||
@ -188,7 +194,8 @@ pub fn previous_word_start(map: &DisplaySnapshot, point: DisplayPoint) -> Displa
|
||||
pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let scope = map.buffer_snapshot.language_scope_at(raw_point);
|
||||
find_preceding_boundary(map, point, |left, right| {
|
||||
|
||||
find_preceding_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
let is_word_start =
|
||||
char_kind(&scope, left) != char_kind(&scope, right) && !right.is_whitespace();
|
||||
let is_subword_start =
|
||||
@ -200,7 +207,8 @@ pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> Dis
|
||||
pub fn next_word_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let scope = map.buffer_snapshot.language_scope_at(raw_point);
|
||||
find_boundary(map, point, |left, right| {
|
||||
|
||||
find_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
(char_kind(&scope, left) != char_kind(&scope, right) && !left.is_whitespace())
|
||||
|| right == '\n'
|
||||
})
|
||||
@ -209,7 +217,8 @@ pub fn next_word_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint
|
||||
pub fn next_subword_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
let raw_point = point.to_point(map);
|
||||
let scope = map.buffer_snapshot.language_scope_at(raw_point);
|
||||
find_boundary(map, point, |left, right| {
|
||||
|
||||
find_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
let is_word_end =
|
||||
(char_kind(&scope, left) != char_kind(&scope, right)) && !left.is_whitespace();
|
||||
let is_subword_end =
|
||||
@ -272,79 +281,34 @@ pub fn end_of_paragraph(
|
||||
map.max_point()
|
||||
}
|
||||
|
||||
/// Scans for a boundary preceding the given start point `from` until a boundary is found, indicated by the
|
||||
/// given predicate returning true. The predicate is called with the character to the left and right
|
||||
/// of the candidate boundary location, and will be called with `\n` characters indicating the start
|
||||
/// or end of a line.
|
||||
/// Scans for a boundary preceding the given start point `from` until a boundary is found,
|
||||
/// indicated by the given predicate returning true.
|
||||
/// The predicate is called with the character to the left and right of the candidate boundary location.
|
||||
/// If FindRange::SingleLine is specified and no boundary is found before the start of the current line, the start of the current line will be returned.
|
||||
pub fn find_preceding_boundary(
|
||||
map: &DisplaySnapshot,
|
||||
from: DisplayPoint,
|
||||
find_range: FindRange,
|
||||
mut is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
let mut start_column = 0;
|
||||
let mut soft_wrap_row = from.row() + 1;
|
||||
let mut prev_ch = None;
|
||||
let mut offset = from.to_point(map).to_offset(&map.buffer_snapshot);
|
||||
|
||||
let mut prev = None;
|
||||
for (ch, point) in map.reverse_chars_at(from) {
|
||||
// Recompute soft_wrap_indent if the row has changed
|
||||
if point.row() != soft_wrap_row {
|
||||
soft_wrap_row = point.row();
|
||||
|
||||
if point.row() == 0 {
|
||||
start_column = 0;
|
||||
} else if let Some(indent) = map.soft_wrap_indent(point.row() - 1) {
|
||||
start_column = indent;
|
||||
}
|
||||
}
|
||||
|
||||
// If the current point is in the soft_wrap, skip comparing it
|
||||
if point.column() < start_column {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some((prev_ch, prev_point)) = prev {
|
||||
if is_boundary(ch, prev_ch) {
|
||||
return map.clip_point(prev_point, Bias::Left);
|
||||
}
|
||||
}
|
||||
|
||||
prev = Some((ch, point));
|
||||
}
|
||||
map.clip_point(DisplayPoint::zero(), Bias::Left)
|
||||
}
|
||||
|
||||
/// Scans for a boundary preceding the given start point `from` until a boundary is found, indicated by the
|
||||
/// given predicate returning true. The predicate is called with the character to the left and right
|
||||
/// of the candidate boundary location, and will be called with `\n` characters indicating the start
|
||||
/// or end of a line. If no boundary is found, the start of the line is returned.
|
||||
pub fn find_preceding_boundary_in_line(
|
||||
map: &DisplaySnapshot,
|
||||
from: DisplayPoint,
|
||||
mut is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
let mut start_column = 0;
|
||||
if from.row() > 0 {
|
||||
if let Some(indent) = map.soft_wrap_indent(from.row() - 1) {
|
||||
start_column = indent;
|
||||
}
|
||||
}
|
||||
|
||||
let mut prev = None;
|
||||
for (ch, point) in map.reverse_chars_at(from) {
|
||||
if let Some((prev_ch, prev_point)) = prev {
|
||||
if is_boundary(ch, prev_ch) {
|
||||
return map.clip_point(prev_point, Bias::Left);
|
||||
}
|
||||
}
|
||||
|
||||
if ch == '\n' || point.column() < start_column {
|
||||
for ch in map.buffer_snapshot.reversed_chars_at(offset) {
|
||||
if find_range == FindRange::SingleLine && ch == '\n' {
|
||||
break;
|
||||
}
|
||||
if let Some(prev_ch) = prev_ch {
|
||||
if is_boundary(ch, prev_ch) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
prev = Some((ch, point));
|
||||
offset -= ch.len_utf8();
|
||||
prev_ch = Some(ch);
|
||||
}
|
||||
|
||||
map.clip_point(prev.map(|(_, point)| point).unwrap_or(from), Bias::Left)
|
||||
map.clip_point(offset.to_display_point(map), Bias::Left)
|
||||
}
|
||||
|
||||
/// Scans for a boundary following the given start point until a boundary is found, indicated by the
|
||||
@ -354,47 +318,26 @@ pub fn find_preceding_boundary_in_line(
|
||||
pub fn find_boundary(
|
||||
map: &DisplaySnapshot,
|
||||
from: DisplayPoint,
|
||||
find_range: FindRange,
|
||||
mut is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
let mut offset = from.to_offset(&map, Bias::Right);
|
||||
let mut prev_ch = None;
|
||||
for (ch, point) in map.chars_at(from) {
|
||||
if let Some(prev_ch) = prev_ch {
|
||||
if is_boundary(prev_ch, ch) {
|
||||
return map.clip_point(point, Bias::Right);
|
||||
}
|
||||
}
|
||||
|
||||
prev_ch = Some(ch);
|
||||
}
|
||||
map.clip_point(map.max_point(), Bias::Right)
|
||||
}
|
||||
|
||||
/// Scans for a boundary following the given start point until a boundary is found, indicated by the
|
||||
/// given predicate returning true. The predicate is called with the character to the left and right
|
||||
/// of the candidate boundary location, and will be called with `\n` characters indicating the start
|
||||
/// or end of a line. If no boundary is found, the end of the line is returned
|
||||
pub fn find_boundary_in_line(
|
||||
map: &DisplaySnapshot,
|
||||
from: DisplayPoint,
|
||||
mut is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
let mut prev = None;
|
||||
for (ch, point) in map.chars_at(from) {
|
||||
if let Some((prev_ch, _)) = prev {
|
||||
if is_boundary(prev_ch, ch) {
|
||||
return map.clip_point(point, Bias::Right);
|
||||
}
|
||||
}
|
||||
|
||||
prev = Some((ch, point));
|
||||
|
||||
if ch == '\n' {
|
||||
for ch in map.buffer_snapshot.chars_at(offset) {
|
||||
if find_range == FindRange::SingleLine && ch == '\n' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(prev_ch) = prev_ch {
|
||||
if is_boundary(prev_ch, ch) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Return the last position checked so that we give a point right before the newline or eof.
|
||||
map.clip_point(prev.map(|(_, point)| point).unwrap_or(from), Bias::Right)
|
||||
offset += ch.len_utf8();
|
||||
prev_ch = Some(ch);
|
||||
}
|
||||
map.clip_point(offset.to_display_point(map), Bias::Right)
|
||||
}
|
||||
|
||||
pub fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
|
||||
@ -533,7 +476,12 @@ mod tests {
|
||||
) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
find_preceding_boundary(&snapshot, display_points[1], is_boundary),
|
||||
find_preceding_boundary(
|
||||
&snapshot,
|
||||
display_points[1],
|
||||
FindRange::MultiLine,
|
||||
is_boundary
|
||||
),
|
||||
display_points[0]
|
||||
);
|
||||
}
|
||||
@ -612,21 +560,15 @@ mod tests {
|
||||
find_preceding_boundary(
|
||||
&snapshot,
|
||||
buffer_snapshot.len().to_display_point(&snapshot),
|
||||
|left, _| left == 'a',
|
||||
FindRange::MultiLine,
|
||||
|left, _| left == 'e',
|
||||
),
|
||||
0.to_display_point(&snapshot),
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.offset_to_point(5)
|
||||
.to_display_point(&snapshot),
|
||||
"Should not stop at inlays when looking for boundaries"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
find_preceding_boundary_in_line(
|
||||
&snapshot,
|
||||
buffer_snapshot.len().to_display_point(&snapshot),
|
||||
|left, _| left == 'a',
|
||||
),
|
||||
0.to_display_point(&snapshot),
|
||||
"Should not stop at inlays when looking for boundaries in line"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@ -699,7 +641,12 @@ mod tests {
|
||||
) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
find_boundary(&snapshot, display_points[0], is_boundary),
|
||||
find_boundary(
|
||||
&snapshot,
|
||||
display_points[0],
|
||||
FindRange::MultiLine,
|
||||
is_boundary
|
||||
),
|
||||
display_points[1]
|
||||
);
|
||||
}
|
||||
|
@ -378,10 +378,6 @@ impl Editor {
|
||||
return;
|
||||
}
|
||||
|
||||
if amount.move_context_menu_selection(self, cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
let cur_position = self.scroll_position(cx);
|
||||
let new_pos = cur_position + vec2f(0., amount.lines(self));
|
||||
self.set_scroll_position(new_pos, cx);
|
||||
|
@ -1,8 +1,5 @@
|
||||
use gpui::ViewContext;
|
||||
use serde::Deserialize;
|
||||
use util::iife;
|
||||
|
||||
use crate::Editor;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Clone, PartialEq, Deserialize)]
|
||||
pub enum ScrollAmount {
|
||||
@ -13,25 +10,6 @@ pub enum ScrollAmount {
|
||||
}
|
||||
|
||||
impl ScrollAmount {
|
||||
pub fn move_context_menu_selection(
|
||||
&self,
|
||||
editor: &mut Editor,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> bool {
|
||||
iife!({
|
||||
let context_menu = editor.context_menu.as_mut()?;
|
||||
|
||||
match self {
|
||||
Self::Line(c) if *c > 0. => context_menu.select_next(cx),
|
||||
Self::Line(_) => context_menu.select_prev(cx),
|
||||
Self::Page(c) if *c > 0. => context_menu.select_last(cx),
|
||||
Self::Page(_) => context_menu.select_first(cx),
|
||||
}
|
||||
.then_some(())
|
||||
})
|
||||
.is_some()
|
||||
}
|
||||
|
||||
pub fn lines(&self, editor: &mut Editor) -> f32 {
|
||||
match self {
|
||||
Self::Line(count) => *count,
|
||||
@ -39,7 +17,7 @@ impl ScrollAmount {
|
||||
.visible_line_count()
|
||||
// subtract one to leave an anchor line
|
||||
// round towards zero (so page-up and page-down are symmetric)
|
||||
.map(|l| ((l - 1.) * count).trunc())
|
||||
.map(|l| (l * count).trunc() - count.signum())
|
||||
.unwrap_or(0.),
|
||||
}
|
||||
}
|
||||
|
@ -106,6 +106,7 @@ pub struct Deterministic {
|
||||
parker: parking_lot::Mutex<parking::Parker>,
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub enum Timer {
|
||||
Production(smol::Timer),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
|
@ -37,8 +37,14 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||
Some("seed") => starting_seed = parse_int(&meta.lit)?,
|
||||
Some("on_failure") => {
|
||||
if let Lit::Str(name) = meta.lit {
|
||||
let ident = Ident::new(&name.value(), name.span());
|
||||
on_failure_fn_name = quote!(Some(#ident));
|
||||
let mut path = syn::Path {
|
||||
leading_colon: None,
|
||||
segments: Default::default(),
|
||||
};
|
||||
for part in name.value().split("::") {
|
||||
path.segments.push(Ident::new(part, name.span()).into());
|
||||
}
|
||||
on_failure_fn_name = quote!(Some(#path));
|
||||
} else {
|
||||
return Err(TokenStream::from(
|
||||
syn::Error::new(
|
||||
|
@ -127,6 +127,31 @@ pub fn serialize_undo_map_entry(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn split_operations(
|
||||
mut operations: Vec<proto::Operation>,
|
||||
) -> impl Iterator<Item = Vec<proto::Operation>> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
const CHUNK_SIZE: usize = 5;
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
const CHUNK_SIZE: usize = 100;
|
||||
|
||||
let mut done = false;
|
||||
std::iter::from_fn(move || {
|
||||
if done {
|
||||
return None;
|
||||
}
|
||||
|
||||
let operations = operations
|
||||
.drain(..std::cmp::min(CHUNK_SIZE, operations.len()))
|
||||
.collect::<Vec<_>>();
|
||||
if operations.is_empty() {
|
||||
done = true;
|
||||
}
|
||||
Some(operations)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
|
||||
selections.iter().map(serialize_selection).collect()
|
||||
}
|
||||
|
@ -570,10 +570,12 @@ impl View for LspLogToolbarItemView {
|
||||
let Some(log_view) = self.log_view.as_ref() else {
|
||||
return Empty::new().into_any();
|
||||
};
|
||||
let log_view = log_view.read(cx);
|
||||
let menu_rows = log_view.menu_items(cx).unwrap_or_default();
|
||||
let (menu_rows, current_server_id) = log_view.update(cx, |log_view, cx| {
|
||||
let menu_rows = log_view.menu_items(cx).unwrap_or_default();
|
||||
let current_server_id = log_view.current_server_id;
|
||||
(menu_rows, current_server_id)
|
||||
});
|
||||
|
||||
let current_server_id = log_view.current_server_id;
|
||||
let current_server = current_server_id.and_then(|current_server_id| {
|
||||
if let Ok(ix) = menu_rows.binary_search_by_key(¤t_server_id, |e| e.server_id) {
|
||||
Some(menu_rows[ix].clone())
|
||||
@ -581,10 +583,10 @@ impl View for LspLogToolbarItemView {
|
||||
None
|
||||
}
|
||||
});
|
||||
let server_selected = current_server.is_some();
|
||||
|
||||
enum Menu {}
|
||||
|
||||
Stack::new()
|
||||
let lsp_menu = Stack::new()
|
||||
.with_child(Self::render_language_server_menu_header(
|
||||
current_server,
|
||||
&theme,
|
||||
@ -631,8 +633,47 @@ impl View for LspLogToolbarItemView {
|
||||
})
|
||||
.aligned()
|
||||
.left()
|
||||
.clipped()
|
||||
.into_any()
|
||||
.clipped();
|
||||
|
||||
enum LspCleanupButton {}
|
||||
let log_cleanup_button =
|
||||
MouseEventHandler::new::<LspCleanupButton, _>(1, cx, |state, cx| {
|
||||
let theme = theme::current(cx).clone();
|
||||
let style = theme
|
||||
.workspace
|
||||
.toolbar
|
||||
.toggleable_text_tool
|
||||
.in_state(server_selected)
|
||||
.style_for(state);
|
||||
Label::new("Clear", style.text.clone())
|
||||
.aligned()
|
||||
.contained()
|
||||
.with_style(style.container)
|
||||
.constrained()
|
||||
.with_height(theme.toolbar_dropdown_menu.row_height / 6.0 * 5.0)
|
||||
})
|
||||
.on_click(MouseButton::Left, move |_, this, cx| {
|
||||
if let Some(log_view) = this.log_view.as_ref() {
|
||||
log_view.update(cx, |log_view, cx| {
|
||||
log_view.editor.update(cx, |editor, cx| {
|
||||
editor.set_read_only(false);
|
||||
editor.clear(cx);
|
||||
editor.set_read_only(true);
|
||||
});
|
||||
})
|
||||
}
|
||||
})
|
||||
.with_cursor_style(CursorStyle::PointingHand)
|
||||
.aligned()
|
||||
.right();
|
||||
|
||||
Flex::row()
|
||||
.with_child(lsp_menu)
|
||||
.with_child(log_cleanup_button)
|
||||
.contained()
|
||||
.aligned()
|
||||
.left()
|
||||
.into_any_named("lsp log controls")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,8 +42,8 @@
|
||||
"repositoryURL": "https://github.com/apple/swift-protobuf.git",
|
||||
"state": {
|
||||
"branch": null,
|
||||
"revision": "ce20dc083ee485524b802669890291c0d8090170",
|
||||
"version": "1.22.1"
|
||||
"revision": "0af9125c4eae12a4973fb66574c53a54962a9e1e",
|
||||
"version": "1.21.0"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -14,6 +14,7 @@ util = { path = "../util" }
|
||||
async-compression = { version = "0.3", features = ["gzip", "futures-bufread"] }
|
||||
async-tar = "0.4.2"
|
||||
futures.workspace = true
|
||||
async-trait.workspace = true
|
||||
anyhow.workspace = true
|
||||
parking_lot.workspace = true
|
||||
serde.workspace = true
|
||||
|
@ -7,14 +7,12 @@ use std::process::{Output, Stdio};
|
||||
use std::{
|
||||
env::consts,
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, OnceLock},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::http::HttpClient;
|
||||
|
||||
const VERSION: &str = "v18.15.0";
|
||||
|
||||
static RUNTIME_INSTANCE: OnceLock<Arc<NodeRuntime>> = OnceLock::new();
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct NpmInfo {
|
||||
@ -28,23 +26,88 @@ pub struct NpmInfoDistTags {
|
||||
latest: Option<String>,
|
||||
}
|
||||
|
||||
pub struct NodeRuntime {
|
||||
#[async_trait::async_trait]
|
||||
pub trait NodeRuntime: Send + Sync {
|
||||
async fn binary_path(&self) -> Result<PathBuf>;
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
directory: Option<&Path>,
|
||||
subcommand: &str,
|
||||
args: &[&str],
|
||||
) -> Result<Output>;
|
||||
|
||||
async fn npm_package_latest_version(&self, name: &str) -> Result<String>;
|
||||
|
||||
async fn npm_install_packages(&self, directory: &Path, packages: &[(&str, &str)])
|
||||
-> Result<()>;
|
||||
}
|
||||
|
||||
pub struct RealNodeRuntime {
|
||||
http: Arc<dyn HttpClient>,
|
||||
}
|
||||
|
||||
impl NodeRuntime {
|
||||
pub fn instance(http: Arc<dyn HttpClient>) -> Arc<NodeRuntime> {
|
||||
RUNTIME_INSTANCE
|
||||
.get_or_init(|| Arc::new(NodeRuntime { http }))
|
||||
.clone()
|
||||
impl RealNodeRuntime {
|
||||
pub fn new(http: Arc<dyn HttpClient>) -> Arc<dyn NodeRuntime> {
|
||||
Arc::new(RealNodeRuntime { http })
|
||||
}
|
||||
|
||||
pub async fn binary_path(&self) -> Result<PathBuf> {
|
||||
async fn install_if_needed(&self) -> Result<PathBuf> {
|
||||
log::info!("Node runtime install_if_needed");
|
||||
|
||||
let arch = match consts::ARCH {
|
||||
"x86_64" => "x64",
|
||||
"aarch64" => "arm64",
|
||||
other => bail!("Running on unsupported platform: {other}"),
|
||||
};
|
||||
|
||||
let folder_name = format!("node-{VERSION}-darwin-{arch}");
|
||||
let node_containing_dir = util::paths::SUPPORT_DIR.join("node");
|
||||
let node_dir = node_containing_dir.join(folder_name);
|
||||
let node_binary = node_dir.join("bin/node");
|
||||
let npm_file = node_dir.join("bin/npm");
|
||||
|
||||
let result = Command::new(&node_binary)
|
||||
.arg(npm_file)
|
||||
.arg("--version")
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.await;
|
||||
let valid = matches!(result, Ok(status) if status.success());
|
||||
|
||||
if !valid {
|
||||
_ = fs::remove_dir_all(&node_containing_dir).await;
|
||||
fs::create_dir(&node_containing_dir)
|
||||
.await
|
||||
.context("error creating node containing dir")?;
|
||||
|
||||
let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz");
|
||||
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
|
||||
let mut response = self
|
||||
.http
|
||||
.get(&url, Default::default(), true)
|
||||
.await
|
||||
.context("error downloading Node binary tarball")?;
|
||||
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
|
||||
let archive = Archive::new(decompressed_bytes);
|
||||
archive.unpack(&node_containing_dir).await?;
|
||||
}
|
||||
|
||||
anyhow::Ok(node_dir)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntime for RealNodeRuntime {
|
||||
async fn binary_path(&self) -> Result<PathBuf> {
|
||||
let installation_path = self.install_if_needed().await?;
|
||||
Ok(installation_path.join("bin/node"))
|
||||
}
|
||||
|
||||
pub async fn run_npm_subcommand(
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
directory: Option<&Path>,
|
||||
subcommand: &str,
|
||||
@ -106,7 +169,7 @@ impl NodeRuntime {
|
||||
output.map_err(|e| anyhow!("{e}"))
|
||||
}
|
||||
|
||||
pub async fn npm_package_latest_version(&self, name: &str) -> Result<String> {
|
||||
async fn npm_package_latest_version(&self, name: &str) -> Result<String> {
|
||||
let output = self
|
||||
.run_npm_subcommand(
|
||||
None,
|
||||
@ -131,10 +194,10 @@ impl NodeRuntime {
|
||||
.ok_or_else(|| anyhow!("no version found for npm package {}", name))
|
||||
}
|
||||
|
||||
pub async fn npm_install_packages(
|
||||
async fn npm_install_packages(
|
||||
&self,
|
||||
directory: &Path,
|
||||
packages: impl IntoIterator<Item = (&str, &str)>,
|
||||
packages: &[(&str, &str)],
|
||||
) -> Result<()> {
|
||||
let packages: Vec<_> = packages
|
||||
.into_iter()
|
||||
@ -155,51 +218,31 @@ impl NodeRuntime {
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn install_if_needed(&self) -> Result<PathBuf> {
|
||||
log::info!("Node runtime install_if_needed");
|
||||
pub struct FakeNodeRuntime;
|
||||
|
||||
let arch = match consts::ARCH {
|
||||
"x86_64" => "x64",
|
||||
"aarch64" => "arm64",
|
||||
other => bail!("Running on unsupported platform: {other}"),
|
||||
};
|
||||
|
||||
let folder_name = format!("node-{VERSION}-darwin-{arch}");
|
||||
let node_containing_dir = util::paths::SUPPORT_DIR.join("node");
|
||||
let node_dir = node_containing_dir.join(folder_name);
|
||||
let node_binary = node_dir.join("bin/node");
|
||||
let npm_file = node_dir.join("bin/npm");
|
||||
|
||||
let result = Command::new(&node_binary)
|
||||
.arg(npm_file)
|
||||
.arg("--version")
|
||||
.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.await;
|
||||
let valid = matches!(result, Ok(status) if status.success());
|
||||
|
||||
if !valid {
|
||||
_ = fs::remove_dir_all(&node_containing_dir).await;
|
||||
fs::create_dir(&node_containing_dir)
|
||||
.await
|
||||
.context("error creating node containing dir")?;
|
||||
|
||||
let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz");
|
||||
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
|
||||
let mut response = self
|
||||
.http
|
||||
.get(&url, Default::default(), true)
|
||||
.await
|
||||
.context("error downloading Node binary tarball")?;
|
||||
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
|
||||
let archive = Archive::new(decompressed_bytes);
|
||||
archive.unpack(&node_containing_dir).await?;
|
||||
}
|
||||
|
||||
anyhow::Ok(node_dir)
|
||||
impl FakeNodeRuntime {
|
||||
pub fn new() -> Arc<dyn NodeRuntime> {
|
||||
Arc::new(FakeNodeRuntime)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntime for FakeNodeRuntime {
|
||||
async fn binary_path(&self) -> Result<PathBuf> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(&self, _: Option<&Path>, _: &str, _: &[&str]) -> Result<Output> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(&self, _: &str) -> Result<String> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
async fn npm_install_packages(&self, _: &Path, _: &[(&str, &str)]) -> Result<()> {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ use language::{
|
||||
point_to_lsp,
|
||||
proto::{
|
||||
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
|
||||
serialize_anchor, serialize_version,
|
||||
serialize_anchor, serialize_version, split_operations,
|
||||
},
|
||||
range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
|
||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
|
||||
@ -8200,31 +8200,6 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
fn split_operations(
|
||||
mut operations: Vec<proto::Operation>,
|
||||
) -> impl Iterator<Item = Vec<proto::Operation>> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
const CHUNK_SIZE: usize = 5;
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
const CHUNK_SIZE: usize = 100;
|
||||
|
||||
let mut done = false;
|
||||
std::iter::from_fn(move || {
|
||||
if done {
|
||||
return None;
|
||||
}
|
||||
|
||||
let operations = operations
|
||||
.drain(..cmp::min(CHUNK_SIZE, operations.len()))
|
||||
.collect::<Vec<_>>();
|
||||
if operations.is_empty() {
|
||||
done = true;
|
||||
}
|
||||
Some(operations)
|
||||
})
|
||||
}
|
||||
|
||||
fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
|
||||
proto::Symbol {
|
||||
language_server_name: symbol.language_server_name.0.to_string(),
|
||||
|
@ -1,6 +1,8 @@
|
||||
syntax = "proto3";
|
||||
package zed.messages;
|
||||
|
||||
// Looking for a number? Search "// Current max"
|
||||
|
||||
message PeerId {
|
||||
uint32 owner_id = 1;
|
||||
uint32 id = 2;
|
||||
@ -151,6 +153,9 @@ message Envelope {
|
||||
LeaveChannelBuffer leave_channel_buffer = 134;
|
||||
AddChannelBufferCollaborator add_channel_buffer_collaborator = 135;
|
||||
RemoveChannelBufferCollaborator remove_channel_buffer_collaborator = 136;
|
||||
UpdateChannelBufferCollaborator update_channel_buffer_collaborator = 139;
|
||||
RejoinChannelBuffers rejoin_channel_buffers = 140;
|
||||
RejoinChannelBuffersResponse rejoin_channel_buffers_response = 141; // Current max
|
||||
}
|
||||
}
|
||||
|
||||
@ -430,6 +435,12 @@ message RemoveChannelBufferCollaborator {
|
||||
PeerId peer_id = 2;
|
||||
}
|
||||
|
||||
message UpdateChannelBufferCollaborator {
|
||||
uint64 channel_id = 1;
|
||||
PeerId old_peer_id = 2;
|
||||
PeerId new_peer_id = 3;
|
||||
}
|
||||
|
||||
message GetDefinition {
|
||||
uint64 project_id = 1;
|
||||
uint64 buffer_id = 2;
|
||||
@ -616,6 +627,12 @@ message BufferVersion {
|
||||
repeated VectorClockEntry version = 2;
|
||||
}
|
||||
|
||||
message ChannelBufferVersion {
|
||||
uint64 channel_id = 1;
|
||||
repeated VectorClockEntry version = 2;
|
||||
uint64 epoch = 3;
|
||||
}
|
||||
|
||||
enum FormatTrigger {
|
||||
Save = 0;
|
||||
Manual = 1;
|
||||
@ -1008,12 +1025,28 @@ message JoinChannelBuffer {
|
||||
uint64 channel_id = 1;
|
||||
}
|
||||
|
||||
message RejoinChannelBuffers {
|
||||
repeated ChannelBufferVersion buffers = 1;
|
||||
}
|
||||
|
||||
message RejoinChannelBuffersResponse {
|
||||
repeated RejoinedChannelBuffer buffers = 1;
|
||||
}
|
||||
|
||||
message JoinChannelBufferResponse {
|
||||
uint64 buffer_id = 1;
|
||||
uint32 replica_id = 2;
|
||||
string base_text = 3;
|
||||
repeated Operation operations = 4;
|
||||
repeated Collaborator collaborators = 5;
|
||||
uint64 epoch = 6;
|
||||
}
|
||||
|
||||
message RejoinedChannelBuffer {
|
||||
uint64 channel_id = 1;
|
||||
repeated VectorClockEntry version = 2;
|
||||
repeated Operation operations = 3;
|
||||
repeated Collaborator collaborators = 4;
|
||||
}
|
||||
|
||||
message LeaveChannelBuffer {
|
||||
|
@ -229,6 +229,8 @@ messages!(
|
||||
(StartLanguageServer, Foreground),
|
||||
(SynchronizeBuffers, Foreground),
|
||||
(SynchronizeBuffersResponse, Foreground),
|
||||
(RejoinChannelBuffers, Foreground),
|
||||
(RejoinChannelBuffersResponse, Foreground),
|
||||
(Test, Foreground),
|
||||
(Unfollow, Foreground),
|
||||
(UnshareProject, Foreground),
|
||||
@ -257,6 +259,7 @@ messages!(
|
||||
(UpdateChannelBuffer, Foreground),
|
||||
(RemoveChannelBufferCollaborator, Foreground),
|
||||
(AddChannelBufferCollaborator, Foreground),
|
||||
(UpdateChannelBufferCollaborator, Foreground),
|
||||
);
|
||||
|
||||
request_messages!(
|
||||
@ -319,6 +322,7 @@ request_messages!(
|
||||
(SearchProject, SearchProjectResponse),
|
||||
(ShareProject, ShareProjectResponse),
|
||||
(SynchronizeBuffers, SynchronizeBuffersResponse),
|
||||
(RejoinChannelBuffers, RejoinChannelBuffersResponse),
|
||||
(Test, Test),
|
||||
(UpdateBuffer, Ack),
|
||||
(UpdateParticipantLocation, Ack),
|
||||
@ -386,7 +390,8 @@ entity_messages!(
|
||||
channel_id,
|
||||
UpdateChannelBuffer,
|
||||
RemoveChannelBufferCollaborator,
|
||||
AddChannelBufferCollaborator
|
||||
AddChannelBufferCollaborator,
|
||||
UpdateChannelBufferCollaborator
|
||||
);
|
||||
|
||||
const KIB: usize = 1024;
|
||||
|
@ -12,15 +12,13 @@ use editor::{
|
||||
SelectAll, MAX_TAB_TITLE_LEN,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
|
||||
use gpui::platform::PromptLevel;
|
||||
|
||||
use gpui::{
|
||||
actions, elements::*, platform::MouseButton, Action, AnyElement, AnyViewHandle, AppContext,
|
||||
Entity, ModelContext, ModelHandle, Subscription, Task, View, ViewContext, ViewHandle,
|
||||
WeakModelHandle, WeakViewHandle,
|
||||
actions,
|
||||
elements::*,
|
||||
platform::{MouseButton, PromptLevel},
|
||||
Action, AnyElement, AnyViewHandle, AppContext, Entity, ModelContext, ModelHandle, Subscription,
|
||||
Task, View, ViewContext, ViewHandle, WeakModelHandle, WeakViewHandle,
|
||||
};
|
||||
|
||||
use menu::Confirm;
|
||||
use postage::stream::Stream;
|
||||
use project::{
|
||||
@ -132,8 +130,7 @@ pub struct ProjectSearchView {
|
||||
}
|
||||
|
||||
struct SemanticSearchState {
|
||||
file_count: usize,
|
||||
outstanding_file_count: usize,
|
||||
pending_file_count: usize,
|
||||
_progress_task: Task<()>,
|
||||
}
|
||||
|
||||
@ -319,12 +316,8 @@ impl View for ProjectSearchView {
|
||||
};
|
||||
|
||||
let semantic_status = if let Some(semantic) = &self.semantic_state {
|
||||
if semantic.outstanding_file_count > 0 {
|
||||
format!(
|
||||
"Indexing: {} of {}...",
|
||||
semantic.file_count - semantic.outstanding_file_count,
|
||||
semantic.file_count
|
||||
)
|
||||
if semantic.pending_file_count > 0 {
|
||||
format!("Remaining files to index: {}", semantic.pending_file_count)
|
||||
} else {
|
||||
"Indexing complete".to_string()
|
||||
}
|
||||
@ -641,26 +634,27 @@ impl ProjectSearchView {
|
||||
|
||||
let project = self.model.read(cx).project.clone();
|
||||
|
||||
let index_task = semantic_index.update(cx, |semantic_index, cx| {
|
||||
semantic_index.index_project(project, cx)
|
||||
let mut pending_file_count_rx = semantic_index.update(cx, |semantic_index, cx| {
|
||||
semantic_index
|
||||
.index_project(project.clone(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
semantic_index.pending_file_count(&project).unwrap()
|
||||
});
|
||||
|
||||
cx.spawn(|search_view, mut cx| async move {
|
||||
let (files_to_index, mut files_remaining_rx) = index_task.await?;
|
||||
|
||||
search_view.update(&mut cx, |search_view, cx| {
|
||||
cx.notify();
|
||||
let pending_file_count = *pending_file_count_rx.borrow();
|
||||
search_view.semantic_state = Some(SemanticSearchState {
|
||||
file_count: files_to_index,
|
||||
outstanding_file_count: files_to_index,
|
||||
pending_file_count,
|
||||
_progress_task: cx.spawn(|search_view, mut cx| async move {
|
||||
while let Some(count) = files_remaining_rx.recv().await {
|
||||
while let Some(count) = pending_file_count_rx.recv().await {
|
||||
search_view
|
||||
.update(&mut cx, |search_view, cx| {
|
||||
if let Some(semantic_search_state) =
|
||||
&mut search_view.semantic_state
|
||||
{
|
||||
semantic_search_state.outstanding_file_count = count;
|
||||
semantic_search_state.pending_file_count = count;
|
||||
cx.notify();
|
||||
if count == 0 {
|
||||
return;
|
||||
@ -959,7 +953,7 @@ impl ProjectSearchView {
|
||||
match mode {
|
||||
SearchMode::Semantic => {
|
||||
if let Some(semantic) = &mut self.semantic_state {
|
||||
if semantic.outstanding_file_count > 0 {
|
||||
if semantic.pending_file_count > 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -9,6 +9,7 @@ path = "src/semantic_index.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
language = { path = "../language" }
|
||||
project = { path = "../project" }
|
||||
@ -39,8 +40,10 @@ rand.workspace = true
|
||||
schemars.workspace = true
|
||||
globset.workspace = true
|
||||
sha1 = "0.10.5"
|
||||
parse_duration = "2.1.1"
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
language = { path = "../language", features = ["test-support"] }
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
|
@ -1,20 +1,26 @@
|
||||
use crate::{parsing::Document, SEMANTIC_INDEX_VERSION};
|
||||
use crate::{
|
||||
embedding::Embedding,
|
||||
parsing::{Span, SpanDigest},
|
||||
SEMANTIC_INDEX_VERSION,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::HashMap;
|
||||
use futures::channel::oneshot;
|
||||
use gpui::executor;
|
||||
use project::{search::PathMatcher, Fs};
|
||||
use rpc::proto::Timestamp;
|
||||
use rusqlite::{
|
||||
params,
|
||||
types::{FromSql, FromSqlResult, ValueRef},
|
||||
};
|
||||
use rusqlite::params;
|
||||
use rusqlite::types::Value;
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::HashMap,
|
||||
future::Future,
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
time::SystemTime,
|
||||
time::{Instant, SystemTime},
|
||||
};
|
||||
use util::TryFutureExt;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FileRecord {
|
||||
@ -23,286 +29,371 @@ pub struct FileRecord {
|
||||
pub mtime: Timestamp,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Embedding(pub Vec<f32>);
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Sha1(pub Vec<u8>);
|
||||
|
||||
impl FromSql for Embedding {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if embedding.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
|
||||
}
|
||||
return Ok(Embedding(embedding.unwrap()));
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql for Sha1 {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let sha1: Result<Vec<u8>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if sha1.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(sha1.unwrap_err()));
|
||||
}
|
||||
return Ok(Sha1(sha1.unwrap()));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VectorDatabase {
|
||||
db: rusqlite::Connection,
|
||||
path: Arc<Path>,
|
||||
transactions:
|
||||
smol::channel::Sender<Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>>,
|
||||
}
|
||||
|
||||
impl VectorDatabase {
|
||||
pub async fn new(fs: Arc<dyn Fs>, path: Arc<PathBuf>) -> Result<Self> {
|
||||
pub async fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
path: Arc<Path>,
|
||||
executor: Arc<executor::Background>,
|
||||
) -> Result<Self> {
|
||||
if let Some(db_directory) = path.parent() {
|
||||
fs.create_dir(db_directory).await?;
|
||||
}
|
||||
|
||||
let (transactions_tx, transactions_rx) = smol::channel::unbounded::<
|
||||
Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>,
|
||||
>();
|
||||
executor
|
||||
.spawn({
|
||||
let path = path.clone();
|
||||
async move {
|
||||
let mut connection = rusqlite::Connection::open(&path)?;
|
||||
|
||||
connection.pragma_update(None, "journal_mode", "wal")?;
|
||||
connection.pragma_update(None, "synchronous", "normal")?;
|
||||
connection.pragma_update(None, "cache_size", 1000000)?;
|
||||
connection.pragma_update(None, "temp_store", "MEMORY")?;
|
||||
|
||||
while let Ok(transaction) = transactions_rx.recv().await {
|
||||
transaction(&mut connection);
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
})
|
||||
.detach();
|
||||
let this = Self {
|
||||
db: rusqlite::Connection::open(path.as_path())?,
|
||||
transactions: transactions_tx,
|
||||
path,
|
||||
};
|
||||
this.initialize_database()?;
|
||||
this.initialize_database().await?;
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
fn get_existing_version(&self) -> Result<i64> {
|
||||
let mut version_query = self
|
||||
.db
|
||||
.prepare("SELECT version from semantic_index_config")?;
|
||||
version_query
|
||||
.query_row([], |row| Ok(row.get::<_, i64>(0)?))
|
||||
.map_err(|err| anyhow!("version query failed: {err}"))
|
||||
pub fn path(&self) -> &Arc<Path> {
|
||||
&self.path
|
||||
}
|
||||
|
||||
fn initialize_database(&self) -> Result<()> {
|
||||
rusqlite::vtab::array::load_module(&self.db)?;
|
||||
|
||||
// Delete existing tables, if SEMANTIC_INDEX_VERSION is bumped
|
||||
if self
|
||||
.get_existing_version()
|
||||
.map_or(false, |version| version == SEMANTIC_INDEX_VERSION as i64)
|
||||
{
|
||||
log::trace!("vector database schema up to date");
|
||||
return Ok(());
|
||||
fn transact<F, T>(&self, f: F) -> impl Future<Output = Result<T>>
|
||||
where
|
||||
F: 'static + Send + FnOnce(&rusqlite::Transaction) -> Result<T>,
|
||||
T: 'static + Send,
|
||||
{
|
||||
let (tx, rx) = oneshot::channel();
|
||||
let transactions = self.transactions.clone();
|
||||
async move {
|
||||
if transactions
|
||||
.send(Box::new(|connection| {
|
||||
let result = connection
|
||||
.transaction()
|
||||
.map_err(|err| anyhow!(err))
|
||||
.and_then(|transaction| {
|
||||
let result = f(&transaction)?;
|
||||
transaction.commit()?;
|
||||
Ok(result)
|
||||
});
|
||||
let _ = tx.send(result);
|
||||
}))
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return Err(anyhow!("connection was dropped"))?;
|
||||
}
|
||||
rx.await?
|
||||
}
|
||||
|
||||
log::trace!("vector database schema out of date. updating...");
|
||||
self.db
|
||||
.execute("DROP TABLE IF EXISTS documents", [])
|
||||
.context("failed to drop 'documents' table")?;
|
||||
self.db
|
||||
.execute("DROP TABLE IF EXISTS files", [])
|
||||
.context("failed to drop 'files' table")?;
|
||||
self.db
|
||||
.execute("DROP TABLE IF EXISTS worktrees", [])
|
||||
.context("failed to drop 'worktrees' table")?;
|
||||
self.db
|
||||
.execute("DROP TABLE IF EXISTS semantic_index_config", [])
|
||||
.context("failed to drop 'semantic_index_config' table")?;
|
||||
|
||||
// Initialize Vector Databasing Tables
|
||||
self.db.execute(
|
||||
"CREATE TABLE semantic_index_config (
|
||||
version INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
self.db.execute(
|
||||
"INSERT INTO semantic_index_config (version) VALUES (?1)",
|
||||
params![SEMANTIC_INDEX_VERSION],
|
||||
)?;
|
||||
|
||||
self.db.execute(
|
||||
"CREATE TABLE worktrees (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
absolute_path VARCHAR NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX worktrees_absolute_path ON worktrees (absolute_path);
|
||||
",
|
||||
[],
|
||||
)?;
|
||||
|
||||
self.db.execute(
|
||||
"CREATE TABLE files (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
worktree_id INTEGER NOT NULL,
|
||||
relative_path VARCHAR NOT NULL,
|
||||
mtime_seconds INTEGER NOT NULL,
|
||||
mtime_nanos INTEGER NOT NULL,
|
||||
FOREIGN KEY(worktree_id) REFERENCES worktrees(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
self.db.execute(
|
||||
"CREATE TABLE documents (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
file_id INTEGER NOT NULL,
|
||||
start_byte INTEGER NOT NULL,
|
||||
end_byte INTEGER NOT NULL,
|
||||
name VARCHAR NOT NULL,
|
||||
embedding BLOB NOT NULL,
|
||||
sha1 BLOB NOT NULL,
|
||||
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
log::trace!("vector database initialized with updated schema.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_file(&self, worktree_id: i64, delete_path: PathBuf) -> Result<()> {
|
||||
self.db.execute(
|
||||
"DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2",
|
||||
params![worktree_id, delete_path.to_str()],
|
||||
)?;
|
||||
Ok(())
|
||||
fn initialize_database(&self) -> impl Future<Output = Result<()>> {
|
||||
self.transact(|db| {
|
||||
rusqlite::vtab::array::load_module(&db)?;
|
||||
|
||||
// Delete existing tables, if SEMANTIC_INDEX_VERSION is bumped
|
||||
let version_query = db.prepare("SELECT version from semantic_index_config");
|
||||
let version = version_query
|
||||
.and_then(|mut query| query.query_row([], |row| Ok(row.get::<_, i64>(0)?)));
|
||||
if version.map_or(false, |version| version == SEMANTIC_INDEX_VERSION as i64) {
|
||||
log::trace!("vector database schema up to date");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
log::trace!("vector database schema out of date. updating...");
|
||||
// We renamed the `documents` table to `spans`, so we want to drop
|
||||
// `documents` without recreating it if it exists.
|
||||
db.execute("DROP TABLE IF EXISTS documents", [])
|
||||
.context("failed to drop 'documents' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS spans", [])
|
||||
.context("failed to drop 'spans' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS files", [])
|
||||
.context("failed to drop 'files' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS worktrees", [])
|
||||
.context("failed to drop 'worktrees' table")?;
|
||||
db.execute("DROP TABLE IF EXISTS semantic_index_config", [])
|
||||
.context("failed to drop 'semantic_index_config' table")?;
|
||||
|
||||
// Initialize Vector Databasing Tables
|
||||
db.execute(
|
||||
"CREATE TABLE semantic_index_config (
|
||||
version INTEGER NOT NULL
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"INSERT INTO semantic_index_config (version) VALUES (?1)",
|
||||
params![SEMANTIC_INDEX_VERSION],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE TABLE worktrees (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
absolute_path VARCHAR NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX worktrees_absolute_path ON worktrees (absolute_path);
|
||||
",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE TABLE files (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
worktree_id INTEGER NOT NULL,
|
||||
relative_path VARCHAR NOT NULL,
|
||||
mtime_seconds INTEGER NOT NULL,
|
||||
mtime_nanos INTEGER NOT NULL,
|
||||
FOREIGN KEY(worktree_id) REFERENCES worktrees(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE UNIQUE INDEX files_worktree_id_and_relative_path ON files (worktree_id, relative_path)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
db.execute(
|
||||
"CREATE TABLE spans (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
file_id INTEGER NOT NULL,
|
||||
start_byte INTEGER NOT NULL,
|
||||
end_byte INTEGER NOT NULL,
|
||||
name VARCHAR NOT NULL,
|
||||
embedding BLOB NOT NULL,
|
||||
digest BLOB NOT NULL,
|
||||
FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
|
||||
)",
|
||||
[],
|
||||
)?;
|
||||
|
||||
log::trace!("vector database initialized with updated schema.");
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_file(
|
||||
&self,
|
||||
worktree_id: i64,
|
||||
delete_path: Arc<Path>,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
self.transact(move |db| {
|
||||
db.execute(
|
||||
"DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2",
|
||||
params![worktree_id, delete_path.to_str()],
|
||||
)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_file(
|
||||
&self,
|
||||
worktree_id: i64,
|
||||
path: PathBuf,
|
||||
path: Arc<Path>,
|
||||
mtime: SystemTime,
|
||||
documents: Vec<Document>,
|
||||
) -> Result<()> {
|
||||
// Return the existing ID, if both the file and mtime match
|
||||
let mtime = Timestamp::from(mtime);
|
||||
let mut existing_id_query = self.db.prepare("SELECT id FROM files WHERE worktree_id = ?1 AND relative_path = ?2 AND mtime_seconds = ?3 AND mtime_nanos = ?4")?;
|
||||
let existing_id = existing_id_query
|
||||
.query_row(
|
||||
spans: Vec<Span>,
|
||||
) -> impl Future<Output = Result<()>> {
|
||||
self.transact(move |db| {
|
||||
// Return the existing ID, if both the file and mtime match
|
||||
let mtime = Timestamp::from(mtime);
|
||||
|
||||
db.execute(
|
||||
"
|
||||
REPLACE INTO files
|
||||
(worktree_id, relative_path, mtime_seconds, mtime_nanos)
|
||||
VALUES (?1, ?2, ?3, ?4)
|
||||
",
|
||||
params![worktree_id, path.to_str(), mtime.seconds, mtime.nanos],
|
||||
|row| Ok(row.get::<_, i64>(0)?),
|
||||
)
|
||||
.map_err(|err| anyhow!(err));
|
||||
let file_id = if existing_id.is_ok() {
|
||||
// If already exists, just return the existing id
|
||||
existing_id.unwrap()
|
||||
} else {
|
||||
// Delete Existing Row
|
||||
self.db.execute(
|
||||
"DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2;",
|
||||
params![worktree_id, path.to_str()],
|
||||
)?;
|
||||
self.db.execute("INSERT INTO files (worktree_id, relative_path, mtime_seconds, mtime_nanos) VALUES (?1, ?2, ?3, ?4);", params![worktree_id, path.to_str(), mtime.seconds, mtime.nanos])?;
|
||||
self.db.last_insert_rowid()
|
||||
};
|
||||
|
||||
// Currently inserting at approximately 3400 documents a second
|
||||
// I imagine we can speed this up with a bulk insert of some kind.
|
||||
for document in documents {
|
||||
let embedding_blob = bincode::serialize(&document.embedding)?;
|
||||
let sha_blob = bincode::serialize(&document.sha1)?;
|
||||
let file_id = db.last_insert_rowid();
|
||||
|
||||
self.db.execute(
|
||||
"INSERT INTO documents (file_id, start_byte, end_byte, name, embedding, sha1) VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
|
||||
params![
|
||||
let t0 = Instant::now();
|
||||
let mut query = db.prepare(
|
||||
"
|
||||
INSERT INTO spans
|
||||
(file_id, start_byte, end_byte, name, embedding, digest)
|
||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6)
|
||||
",
|
||||
)?;
|
||||
log::trace!(
|
||||
"Preparing Query Took: {:?} milliseconds",
|
||||
t0.elapsed().as_millis()
|
||||
);
|
||||
|
||||
for span in spans {
|
||||
query.execute(params![
|
||||
file_id,
|
||||
document.range.start.to_string(),
|
||||
document.range.end.to_string(),
|
||||
document.name,
|
||||
embedding_blob,
|
||||
sha_blob
|
||||
],
|
||||
)?;
|
||||
}
|
||||
span.range.start.to_string(),
|
||||
span.range.end.to_string(),
|
||||
span.name,
|
||||
span.embedding,
|
||||
span.digest
|
||||
])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn worktree_previously_indexed(&self, worktree_root_path: &Path) -> Result<bool> {
|
||||
let mut worktree_query = self
|
||||
.db
|
||||
.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
|
||||
let worktree_id = worktree_query
|
||||
.query_row(params![worktree_root_path.to_string_lossy()], |row| {
|
||||
Ok(row.get::<_, i64>(0)?)
|
||||
})
|
||||
.map_err(|err| anyhow!(err));
|
||||
pub fn worktree_previously_indexed(
|
||||
&self,
|
||||
worktree_root_path: &Path,
|
||||
) -> impl Future<Output = Result<bool>> {
|
||||
let worktree_root_path = worktree_root_path.to_string_lossy().into_owned();
|
||||
self.transact(move |db| {
|
||||
let mut worktree_query =
|
||||
db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
|
||||
let worktree_id = worktree_query
|
||||
.query_row(params![worktree_root_path], |row| Ok(row.get::<_, i64>(0)?));
|
||||
|
||||
if worktree_id.is_ok() {
|
||||
return Ok(true);
|
||||
} else {
|
||||
return Ok(false);
|
||||
}
|
||||
if worktree_id.is_ok() {
|
||||
return Ok(true);
|
||||
} else {
|
||||
return Ok(false);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_or_create_worktree(&self, worktree_root_path: &Path) -> Result<i64> {
|
||||
// Check that the absolute path doesnt exist
|
||||
let mut worktree_query = self
|
||||
.db
|
||||
.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
|
||||
|
||||
let worktree_id = worktree_query
|
||||
.query_row(params![worktree_root_path.to_string_lossy()], |row| {
|
||||
Ok(row.get::<_, i64>(0)?)
|
||||
})
|
||||
.map_err(|err| anyhow!(err));
|
||||
|
||||
if worktree_id.is_ok() {
|
||||
return worktree_id;
|
||||
}
|
||||
|
||||
// If worktree_id is Err, insert new worktree
|
||||
self.db.execute(
|
||||
"
|
||||
INSERT into worktrees (absolute_path) VALUES (?1)
|
||||
pub fn embeddings_for_files(
|
||||
&self,
|
||||
worktree_id_file_paths: HashMap<i64, Vec<Arc<Path>>>,
|
||||
) -> impl Future<Output = Result<HashMap<SpanDigest, Embedding>>> {
|
||||
self.transact(move |db| {
|
||||
let mut query = db.prepare(
|
||||
"
|
||||
SELECT digest, embedding
|
||||
FROM spans
|
||||
LEFT JOIN files ON files.id = spans.file_id
|
||||
WHERE files.worktree_id = ? AND files.relative_path IN rarray(?)
|
||||
",
|
||||
params![worktree_root_path.to_string_lossy()],
|
||||
)?;
|
||||
Ok(self.db.last_insert_rowid())
|
||||
)?;
|
||||
let mut embeddings_by_digest = HashMap::default();
|
||||
for (worktree_id, file_paths) in worktree_id_file_paths {
|
||||
let file_paths = Rc::new(
|
||||
file_paths
|
||||
.into_iter()
|
||||
.map(|p| Value::Text(p.to_string_lossy().into_owned()))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
let rows = query.query_map(params![worktree_id, file_paths], |row| {
|
||||
Ok((row.get::<_, SpanDigest>(0)?, row.get::<_, Embedding>(1)?))
|
||||
})?;
|
||||
|
||||
for row in rows {
|
||||
if let Ok(row) = row {
|
||||
embeddings_by_digest.insert(row.0, row.1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(embeddings_by_digest)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_file_mtimes(&self, worktree_id: i64) -> Result<HashMap<PathBuf, SystemTime>> {
|
||||
let mut statement = self.db.prepare(
|
||||
"
|
||||
SELECT relative_path, mtime_seconds, mtime_nanos
|
||||
FROM files
|
||||
WHERE worktree_id = ?1
|
||||
ORDER BY relative_path",
|
||||
)?;
|
||||
let mut result: HashMap<PathBuf, SystemTime> = HashMap::new();
|
||||
for row in statement.query_map(params![worktree_id], |row| {
|
||||
Ok((
|
||||
row.get::<_, String>(0)?.into(),
|
||||
Timestamp {
|
||||
seconds: row.get(1)?,
|
||||
nanos: row.get(2)?,
|
||||
}
|
||||
.into(),
|
||||
))
|
||||
})? {
|
||||
let row = row?;
|
||||
result.insert(row.0, row.1);
|
||||
}
|
||||
Ok(result)
|
||||
pub fn find_or_create_worktree(
|
||||
&self,
|
||||
worktree_root_path: Arc<Path>,
|
||||
) -> impl Future<Output = Result<i64>> {
|
||||
self.transact(move |db| {
|
||||
let mut worktree_query =
|
||||
db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
|
||||
let worktree_id = worktree_query
|
||||
.query_row(params![worktree_root_path.to_string_lossy()], |row| {
|
||||
Ok(row.get::<_, i64>(0)?)
|
||||
});
|
||||
|
||||
if worktree_id.is_ok() {
|
||||
return Ok(worktree_id?);
|
||||
}
|
||||
|
||||
// If worktree_id is Err, insert new worktree
|
||||
db.execute(
|
||||
"INSERT into worktrees (absolute_path) VALUES (?1)",
|
||||
params![worktree_root_path.to_string_lossy()],
|
||||
)?;
|
||||
Ok(db.last_insert_rowid())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_file_mtimes(
|
||||
&self,
|
||||
worktree_id: i64,
|
||||
) -> impl Future<Output = Result<HashMap<PathBuf, SystemTime>>> {
|
||||
self.transact(move |db| {
|
||||
let mut statement = db.prepare(
|
||||
"
|
||||
SELECT relative_path, mtime_seconds, mtime_nanos
|
||||
FROM files
|
||||
WHERE worktree_id = ?1
|
||||
ORDER BY relative_path",
|
||||
)?;
|
||||
let mut result: HashMap<PathBuf, SystemTime> = HashMap::default();
|
||||
for row in statement.query_map(params![worktree_id], |row| {
|
||||
Ok((
|
||||
row.get::<_, String>(0)?.into(),
|
||||
Timestamp {
|
||||
seconds: row.get(1)?,
|
||||
nanos: row.get(2)?,
|
||||
}
|
||||
.into(),
|
||||
))
|
||||
})? {
|
||||
let row = row?;
|
||||
result.insert(row.0, row.1);
|
||||
}
|
||||
Ok(result)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn top_k_search(
|
||||
&self,
|
||||
query_embedding: &Vec<f32>,
|
||||
query_embedding: &Embedding,
|
||||
limit: usize,
|
||||
file_ids: &[i64],
|
||||
) -> Result<Vec<(i64, f32)>> {
|
||||
let mut results = Vec::<(i64, f32)>::with_capacity(limit + 1);
|
||||
self.for_each_document(file_ids, |id, embedding| {
|
||||
let similarity = dot(&embedding, &query_embedding);
|
||||
let ix = match results
|
||||
.binary_search_by(|(_, s)| similarity.partial_cmp(&s).unwrap_or(Ordering::Equal))
|
||||
{
|
||||
Ok(ix) => ix,
|
||||
Err(ix) => ix,
|
||||
};
|
||||
results.insert(ix, (id, similarity));
|
||||
results.truncate(limit);
|
||||
})?;
|
||||
) -> impl Future<Output = Result<Vec<(i64, f32)>>> {
|
||||
let query_embedding = query_embedding.clone();
|
||||
let file_ids = file_ids.to_vec();
|
||||
self.transact(move |db| {
|
||||
let mut results = Vec::<(i64, f32)>::with_capacity(limit + 1);
|
||||
Self::for_each_span(db, &file_ids, |id, embedding| {
|
||||
let similarity = embedding.similarity(&query_embedding);
|
||||
let ix = match results.binary_search_by(|(_, s)| {
|
||||
similarity.partial_cmp(&s).unwrap_or(Ordering::Equal)
|
||||
}) {
|
||||
Ok(ix) => ix,
|
||||
Err(ix) => ix,
|
||||
};
|
||||
results.insert(ix, (id, similarity));
|
||||
results.truncate(limit);
|
||||
})?;
|
||||
|
||||
Ok(results)
|
||||
anyhow::Ok(results)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn retrieve_included_file_ids(
|
||||
@ -310,42 +401,51 @@ impl VectorDatabase {
|
||||
worktree_ids: &[i64],
|
||||
includes: &[PathMatcher],
|
||||
excludes: &[PathMatcher],
|
||||
) -> Result<Vec<i64>> {
|
||||
let mut file_query = self.db.prepare(
|
||||
"
|
||||
SELECT
|
||||
id, relative_path
|
||||
FROM
|
||||
files
|
||||
WHERE
|
||||
worktree_id IN rarray(?)
|
||||
",
|
||||
)?;
|
||||
) -> impl Future<Output = Result<Vec<i64>>> {
|
||||
let worktree_ids = worktree_ids.to_vec();
|
||||
let includes = includes.to_vec();
|
||||
let excludes = excludes.to_vec();
|
||||
self.transact(move |db| {
|
||||
let mut file_query = db.prepare(
|
||||
"
|
||||
SELECT
|
||||
id, relative_path
|
||||
FROM
|
||||
files
|
||||
WHERE
|
||||
worktree_id IN rarray(?)
|
||||
",
|
||||
)?;
|
||||
|
||||
let mut file_ids = Vec::<i64>::new();
|
||||
let mut rows = file_query.query([ids_to_sql(worktree_ids)])?;
|
||||
let mut file_ids = Vec::<i64>::new();
|
||||
let mut rows = file_query.query([ids_to_sql(&worktree_ids)])?;
|
||||
|
||||
while let Some(row) = rows.next()? {
|
||||
let file_id = row.get(0)?;
|
||||
let relative_path = row.get_ref(1)?.as_str()?;
|
||||
let included =
|
||||
includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
|
||||
let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
|
||||
if included && !excluded {
|
||||
file_ids.push(file_id);
|
||||
while let Some(row) = rows.next()? {
|
||||
let file_id = row.get(0)?;
|
||||
let relative_path = row.get_ref(1)?.as_str()?;
|
||||
let included =
|
||||
includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
|
||||
let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
|
||||
if included && !excluded {
|
||||
file_ids.push(file_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(file_ids)
|
||||
anyhow::Ok(file_ids)
|
||||
})
|
||||
}
|
||||
|
||||
fn for_each_document(&self, file_ids: &[i64], mut f: impl FnMut(i64, Vec<f32>)) -> Result<()> {
|
||||
let mut query_statement = self.db.prepare(
|
||||
fn for_each_span(
|
||||
db: &rusqlite::Connection,
|
||||
file_ids: &[i64],
|
||||
mut f: impl FnMut(i64, Embedding),
|
||||
) -> Result<()> {
|
||||
let mut query_statement = db.prepare(
|
||||
"
|
||||
SELECT
|
||||
id, embedding
|
||||
FROM
|
||||
documents
|
||||
spans
|
||||
WHERE
|
||||
file_id IN rarray(?)
|
||||
",
|
||||
@ -356,51 +456,57 @@ impl VectorDatabase {
|
||||
Ok((row.get(0)?, row.get::<_, Embedding>(1)?))
|
||||
})?
|
||||
.filter_map(|row| row.ok())
|
||||
.for_each(|(id, embedding)| f(id, embedding.0));
|
||||
.for_each(|(id, embedding)| f(id, embedding));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_documents_by_ids(&self, ids: &[i64]) -> Result<Vec<(i64, PathBuf, Range<usize>)>> {
|
||||
let mut statement = self.db.prepare(
|
||||
"
|
||||
SELECT
|
||||
documents.id,
|
||||
files.worktree_id,
|
||||
files.relative_path,
|
||||
documents.start_byte,
|
||||
documents.end_byte
|
||||
FROM
|
||||
documents, files
|
||||
WHERE
|
||||
documents.file_id = files.id AND
|
||||
documents.id in rarray(?)
|
||||
",
|
||||
)?;
|
||||
pub fn spans_for_ids(
|
||||
&self,
|
||||
ids: &[i64],
|
||||
) -> impl Future<Output = Result<Vec<(i64, PathBuf, Range<usize>)>>> {
|
||||
let ids = ids.to_vec();
|
||||
self.transact(move |db| {
|
||||
let mut statement = db.prepare(
|
||||
"
|
||||
SELECT
|
||||
spans.id,
|
||||
files.worktree_id,
|
||||
files.relative_path,
|
||||
spans.start_byte,
|
||||
spans.end_byte
|
||||
FROM
|
||||
spans, files
|
||||
WHERE
|
||||
spans.file_id = files.id AND
|
||||
spans.id in rarray(?)
|
||||
",
|
||||
)?;
|
||||
|
||||
let result_iter = statement.query_map(params![ids_to_sql(ids)], |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, i64>(1)?,
|
||||
row.get::<_, String>(2)?.into(),
|
||||
row.get(3)?..row.get(4)?,
|
||||
))
|
||||
})?;
|
||||
let result_iter = statement.query_map(params![ids_to_sql(&ids)], |row| {
|
||||
Ok((
|
||||
row.get::<_, i64>(0)?,
|
||||
row.get::<_, i64>(1)?,
|
||||
row.get::<_, String>(2)?.into(),
|
||||
row.get(3)?..row.get(4)?,
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut values_by_id = HashMap::<i64, (i64, PathBuf, Range<usize>)>::default();
|
||||
for row in result_iter {
|
||||
let (id, worktree_id, path, range) = row?;
|
||||
values_by_id.insert(id, (worktree_id, path, range));
|
||||
}
|
||||
let mut values_by_id = HashMap::<i64, (i64, PathBuf, Range<usize>)>::default();
|
||||
for row in result_iter {
|
||||
let (id, worktree_id, path, range) = row?;
|
||||
values_by_id.insert(id, (worktree_id, path, range));
|
||||
}
|
||||
|
||||
let mut results = Vec::with_capacity(ids.len());
|
||||
for id in ids {
|
||||
let value = values_by_id
|
||||
.remove(id)
|
||||
.ok_or(anyhow!("missing document id {}", id))?;
|
||||
results.push(value);
|
||||
}
|
||||
let mut results = Vec::with_capacity(ids.len());
|
||||
for id in &ids {
|
||||
let value = values_by_id
|
||||
.remove(id)
|
||||
.ok_or(anyhow!("missing span id {}", id))?;
|
||||
results.push(value);
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
Ok(results)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -412,29 +518,3 @@ fn ids_to_sql(ids: &[i64]) -> Rc<Vec<rusqlite::types::Value>> {
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn dot(vec_a: &[f32], vec_b: &[f32]) -> f32 {
|
||||
let len = vec_a.len();
|
||||
assert_eq!(len, vec_b.len());
|
||||
|
||||
let mut result = 0.0;
|
||||
unsafe {
|
||||
matrixmultiply::sgemm(
|
||||
1,
|
||||
len,
|
||||
1,
|
||||
1.0,
|
||||
vec_a.as_ptr(),
|
||||
len as isize,
|
||||
1,
|
||||
vec_b.as_ptr(),
|
||||
1,
|
||||
len as isize,
|
||||
0.0,
|
||||
&mut result as *mut f32,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
}
|
||||
result
|
||||
}
|
||||
|
@ -7,6 +7,9 @@ use isahc::http::StatusCode;
|
||||
use isahc::prelude::Configurable;
|
||||
use isahc::{AsyncBody, Response};
|
||||
use lazy_static::lazy_static;
|
||||
use parse_duration::parse;
|
||||
use rusqlite::types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef};
|
||||
use rusqlite::ToSql;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
@ -19,6 +22,62 @@ lazy_static! {
|
||||
static ref OPENAI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Embedding(Vec<f32>);
|
||||
|
||||
impl From<Vec<f32>> for Embedding {
|
||||
fn from(value: Vec<f32>) -> Self {
|
||||
Embedding(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl Embedding {
|
||||
pub fn similarity(&self, other: &Self) -> f32 {
|
||||
let len = self.0.len();
|
||||
assert_eq!(len, other.0.len());
|
||||
|
||||
let mut result = 0.0;
|
||||
unsafe {
|
||||
matrixmultiply::sgemm(
|
||||
1,
|
||||
len,
|
||||
1,
|
||||
1.0,
|
||||
self.0.as_ptr(),
|
||||
len as isize,
|
||||
1,
|
||||
other.0.as_ptr(),
|
||||
1,
|
||||
len as isize,
|
||||
0.0,
|
||||
&mut result as *mut f32,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl FromSql for Embedding {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let bytes = value.as_blob()?;
|
||||
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
|
||||
if embedding.is_err() {
|
||||
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
|
||||
}
|
||||
Ok(Embedding(embedding.unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql for Embedding {
|
||||
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
|
||||
let bytes = bincode::serialize(&self.0)
|
||||
.map_err(|err| rusqlite::Error::ToSqlConversionFailure(Box::new(err)))?;
|
||||
Ok(ToSqlOutput::Owned(rusqlite::types::Value::Blob(bytes)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenAIEmbeddings {
|
||||
pub client: Arc<dyn HttpClient>,
|
||||
@ -52,42 +111,53 @@ struct OpenAIEmbeddingUsage {
|
||||
|
||||
#[async_trait]
|
||||
pub trait EmbeddingProvider: Sync + Send {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>>;
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>>;
|
||||
fn max_tokens_per_batch(&self) -> usize;
|
||||
fn truncate(&self, span: &str) -> (String, usize);
|
||||
}
|
||||
|
||||
pub struct DummyEmbeddings {}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for DummyEmbeddings {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>> {
|
||||
// 1024 is the OpenAI Embeddings size for ada models.
|
||||
// the model we will likely be starting with.
|
||||
let dummy_vec = vec![0.32 as f32; 1536];
|
||||
let dummy_vec = Embedding::from(vec![0.32 as f32; 1536]);
|
||||
return Ok(vec![dummy_vec; spans.len()]);
|
||||
}
|
||||
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
OPENAI_INPUT_LIMIT
|
||||
}
|
||||
|
||||
fn truncate(&self, span: &str) -> (String, usize) {
|
||||
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span);
|
||||
let token_count = tokens.len();
|
||||
let output = if token_count > OPENAI_INPUT_LIMIT {
|
||||
tokens.truncate(OPENAI_INPUT_LIMIT);
|
||||
let new_input = OPENAI_BPE_TOKENIZER.decode(tokens.clone());
|
||||
new_input.ok().unwrap_or_else(|| span.to_string())
|
||||
} else {
|
||||
span.to_string()
|
||||
};
|
||||
|
||||
(output, tokens.len())
|
||||
}
|
||||
}
|
||||
|
||||
const OPENAI_INPUT_LIMIT: usize = 8190;
|
||||
|
||||
impl OpenAIEmbeddings {
|
||||
fn truncate(span: String) -> String {
|
||||
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span.as_ref());
|
||||
if tokens.len() > OPENAI_INPUT_LIMIT {
|
||||
tokens.truncate(OPENAI_INPUT_LIMIT);
|
||||
let result = OPENAI_BPE_TOKENIZER.decode(tokens.clone());
|
||||
if result.is_ok() {
|
||||
let transformed = result.unwrap();
|
||||
return transformed;
|
||||
}
|
||||
}
|
||||
|
||||
span
|
||||
}
|
||||
|
||||
async fn send_request(&self, api_key: &str, spans: Vec<&str>) -> Result<Response<AsyncBody>> {
|
||||
async fn send_request(
|
||||
&self,
|
||||
api_key: &str,
|
||||
spans: Vec<&str>,
|
||||
request_timeout: u64,
|
||||
) -> Result<Response<AsyncBody>> {
|
||||
let request = Request::post("https://api.openai.com/v1/embeddings")
|
||||
.redirect_policy(isahc::config::RedirectPolicy::Follow)
|
||||
.timeout(Duration::from_secs(4))
|
||||
.timeout(Duration::from_secs(request_timeout))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(
|
||||
@ -105,7 +175,26 @@ impl OpenAIEmbeddings {
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
50000
|
||||
}
|
||||
|
||||
fn truncate(&self, span: &str) -> (String, usize) {
|
||||
let mut tokens = OPENAI_BPE_TOKENIZER.encode_with_special_tokens(span);
|
||||
let output = if tokens.len() > OPENAI_INPUT_LIMIT {
|
||||
tokens.truncate(OPENAI_INPUT_LIMIT);
|
||||
OPENAI_BPE_TOKENIZER
|
||||
.decode(tokens.clone())
|
||||
.ok()
|
||||
.unwrap_or_else(|| span.to_string())
|
||||
} else {
|
||||
span.to_string()
|
||||
};
|
||||
|
||||
(output, tokens.len())
|
||||
}
|
||||
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>> {
|
||||
const BACKOFF_SECONDS: [usize; 4] = [3, 5, 15, 45];
|
||||
const MAX_RETRIES: usize = 4;
|
||||
|
||||
@ -114,45 +203,21 @@ impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
.ok_or_else(|| anyhow!("no api key"))?;
|
||||
|
||||
let mut request_number = 0;
|
||||
let mut truncated = false;
|
||||
let mut request_timeout: u64 = 15;
|
||||
let mut response: Response<AsyncBody>;
|
||||
let mut spans: Vec<String> = spans.iter().map(|x| x.to_string()).collect();
|
||||
while request_number < MAX_RETRIES {
|
||||
response = self
|
||||
.send_request(api_key, spans.iter().map(|x| &**x).collect())
|
||||
.send_request(
|
||||
api_key,
|
||||
spans.iter().map(|x| &**x).collect(),
|
||||
request_timeout,
|
||||
)
|
||||
.await?;
|
||||
request_number += 1;
|
||||
|
||||
if request_number + 1 == MAX_RETRIES && response.status() != StatusCode::OK {
|
||||
return Err(anyhow!(
|
||||
"openai max retries, error: {:?}",
|
||||
&response.status()
|
||||
));
|
||||
}
|
||||
|
||||
match response.status() {
|
||||
StatusCode::TOO_MANY_REQUESTS => {
|
||||
let delay = Duration::from_secs(BACKOFF_SECONDS[request_number - 1] as u64);
|
||||
log::trace!(
|
||||
"open ai rate limiting, delaying request by {:?} seconds",
|
||||
delay.as_secs()
|
||||
);
|
||||
self.executor.timer(delay).await;
|
||||
}
|
||||
StatusCode::BAD_REQUEST => {
|
||||
// Only truncate if it hasnt been truncated before
|
||||
if !truncated {
|
||||
for span in spans.iter_mut() {
|
||||
*span = Self::truncate(span.clone());
|
||||
}
|
||||
truncated = true;
|
||||
} else {
|
||||
// If failing once already truncated, log the error and break the loop
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
log::trace!("open ai bad request: {:?} {:?}", &response.status(), body);
|
||||
break;
|
||||
}
|
||||
StatusCode::REQUEST_TIMEOUT => {
|
||||
request_timeout += 5;
|
||||
}
|
||||
StatusCode::OK => {
|
||||
let mut body = String::new();
|
||||
@ -163,18 +228,96 @@ impl EmbeddingProvider for OpenAIEmbeddings {
|
||||
"openai embedding completed. tokens: {:?}",
|
||||
response.usage.total_tokens
|
||||
);
|
||||
|
||||
return Ok(response
|
||||
.data
|
||||
.into_iter()
|
||||
.map(|embedding| embedding.embedding)
|
||||
.map(|embedding| Embedding::from(embedding.embedding))
|
||||
.collect());
|
||||
}
|
||||
StatusCode::TOO_MANY_REQUESTS => {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
let delay_duration = {
|
||||
let delay = Duration::from_secs(BACKOFF_SECONDS[request_number - 1] as u64);
|
||||
if let Some(time_to_reset) =
|
||||
response.headers().get("x-ratelimit-reset-tokens")
|
||||
{
|
||||
if let Ok(time_str) = time_to_reset.to_str() {
|
||||
parse(time_str).unwrap_or(delay)
|
||||
} else {
|
||||
delay
|
||||
}
|
||||
} else {
|
||||
delay
|
||||
}
|
||||
};
|
||||
|
||||
log::trace!(
|
||||
"openai rate limiting: waiting {:?} until lifted",
|
||||
&delay_duration
|
||||
);
|
||||
|
||||
self.executor.timer(delay_duration).await;
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow!("openai embedding failed {}", response.status()));
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
return Err(anyhow!(
|
||||
"open ai bad request: {:?} {:?}",
|
||||
&response.status(),
|
||||
body
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("openai embedding failed"))
|
||||
Err(anyhow!("openai max retries"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_similarity(mut rng: StdRng) {
|
||||
assert_eq!(
|
||||
Embedding::from(vec![1., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![0., 1., 0., 0., 0.])),
|
||||
0.
|
||||
);
|
||||
assert_eq!(
|
||||
Embedding::from(vec![2., 0., 0., 0., 0.])
|
||||
.similarity(&Embedding::from(vec![3., 1., 0., 0., 0.])),
|
||||
6.
|
||||
);
|
||||
|
||||
for _ in 0..100 {
|
||||
let size = 1536;
|
||||
let mut a = vec![0.; size];
|
||||
let mut b = vec![0.; size];
|
||||
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
|
||||
*a = rng.gen();
|
||||
*b = rng.gen();
|
||||
}
|
||||
let a = Embedding::from(a);
|
||||
let b = Embedding::from(b);
|
||||
|
||||
assert_eq!(
|
||||
round_to_decimals(a.similarity(&b), 1),
|
||||
round_to_decimals(reference_dot(&a.0, &b.0), 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn round_to_decimals(n: f32, decimal_places: i32) -> f32 {
|
||||
let factor = (10.0 as f32).powi(decimal_places);
|
||||
(n * factor).round() / factor
|
||||
}
|
||||
|
||||
fn reference_dot(a: &[f32], b: &[f32]) -> f32 {
|
||||
a.iter().zip(b.iter()).map(|(a, b)| a * b).sum()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
165
crates/semantic_index/src/embedding_queue.rs
Normal file
165
crates/semantic_index/src/embedding_queue.rs
Normal file
@ -0,0 +1,165 @@
|
||||
use crate::{embedding::EmbeddingProvider, parsing::Span, JobHandle};
|
||||
use gpui::executor::Background;
|
||||
use parking_lot::Mutex;
|
||||
use smol::channel;
|
||||
use std::{mem, ops::Range, path::Path, sync::Arc, time::SystemTime};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FileToEmbed {
|
||||
pub worktree_id: i64,
|
||||
pub path: Arc<Path>,
|
||||
pub mtime: SystemTime,
|
||||
pub spans: Vec<Span>,
|
||||
pub job_handle: JobHandle,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for FileToEmbed {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("FileToEmbed")
|
||||
.field("worktree_id", &self.worktree_id)
|
||||
.field("path", &self.path)
|
||||
.field("mtime", &self.mtime)
|
||||
.field("spans", &self.spans)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FileToEmbed {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.worktree_id == other.worktree_id
|
||||
&& self.path == other.path
|
||||
&& self.mtime == other.mtime
|
||||
&& self.spans == other.spans
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EmbeddingQueue {
|
||||
embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
pending_batch: Vec<FileFragmentToEmbed>,
|
||||
executor: Arc<Background>,
|
||||
pending_batch_token_count: usize,
|
||||
finished_files_tx: channel::Sender<FileToEmbed>,
|
||||
finished_files_rx: channel::Receiver<FileToEmbed>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FileFragmentToEmbed {
|
||||
file: Arc<Mutex<FileToEmbed>>,
|
||||
span_range: Range<usize>,
|
||||
}
|
||||
|
||||
impl EmbeddingQueue {
|
||||
pub fn new(embedding_provider: Arc<dyn EmbeddingProvider>, executor: Arc<Background>) -> Self {
|
||||
let (finished_files_tx, finished_files_rx) = channel::unbounded();
|
||||
Self {
|
||||
embedding_provider,
|
||||
executor,
|
||||
pending_batch: Vec::new(),
|
||||
pending_batch_token_count: 0,
|
||||
finished_files_tx,
|
||||
finished_files_rx,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, file: FileToEmbed) {
|
||||
if file.spans.is_empty() {
|
||||
self.finished_files_tx.try_send(file).unwrap();
|
||||
return;
|
||||
}
|
||||
|
||||
let file = Arc::new(Mutex::new(file));
|
||||
|
||||
self.pending_batch.push(FileFragmentToEmbed {
|
||||
file: file.clone(),
|
||||
span_range: 0..0,
|
||||
});
|
||||
|
||||
let mut fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
|
||||
for (ix, span) in file.lock().spans.iter().enumerate() {
|
||||
let span_token_count = if span.embedding.is_none() {
|
||||
span.token_count
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let next_token_count = self.pending_batch_token_count + span_token_count;
|
||||
if next_token_count > self.embedding_provider.max_tokens_per_batch() {
|
||||
let range_end = fragment_range.end;
|
||||
self.flush();
|
||||
self.pending_batch.push(FileFragmentToEmbed {
|
||||
file: file.clone(),
|
||||
span_range: range_end..range_end,
|
||||
});
|
||||
fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
|
||||
}
|
||||
|
||||
fragment_range.end = ix + 1;
|
||||
self.pending_batch_token_count += span_token_count;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn flush(&mut self) {
|
||||
let batch = mem::take(&mut self.pending_batch);
|
||||
self.pending_batch_token_count = 0;
|
||||
if batch.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let finished_files_tx = self.finished_files_tx.clone();
|
||||
let embedding_provider = self.embedding_provider.clone();
|
||||
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let mut spans = Vec::new();
|
||||
for fragment in &batch {
|
||||
let file = fragment.file.lock();
|
||||
spans.extend(
|
||||
file.spans[fragment.span_range.clone()]
|
||||
.iter()
|
||||
.filter(|d| d.embedding.is_none())
|
||||
.map(|d| d.content.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
// If spans is 0, just send the fragment to the finished files if its the last one.
|
||||
if spans.is_empty() {
|
||||
for fragment in batch.clone() {
|
||||
if let Some(file) = Arc::into_inner(fragment.file) {
|
||||
finished_files_tx.try_send(file.into_inner()).unwrap();
|
||||
}
|
||||
}
|
||||
return;
|
||||
};
|
||||
|
||||
match embedding_provider.embed_batch(spans).await {
|
||||
Ok(embeddings) => {
|
||||
let mut embeddings = embeddings.into_iter();
|
||||
for fragment in batch {
|
||||
for span in &mut fragment.file.lock().spans[fragment.span_range.clone()]
|
||||
.iter_mut()
|
||||
.filter(|d| d.embedding.is_none())
|
||||
{
|
||||
if let Some(embedding) = embeddings.next() {
|
||||
span.embedding = Some(embedding);
|
||||
} else {
|
||||
log::error!("number of embeddings != number of documents");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(file) = Arc::into_inner(fragment.file) {
|
||||
finished_files_tx.try_send(file.into_inner()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!("{:?}", error);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn finished_files(&self) -> channel::Receiver<FileToEmbed> {
|
||||
self.finished_files_rx.clone()
|
||||
}
|
||||
}
|
@ -1,5 +1,10 @@
|
||||
use anyhow::{anyhow, Ok, Result};
|
||||
use crate::embedding::{Embedding, EmbeddingProvider};
|
||||
use anyhow::{anyhow, Result};
|
||||
use language::{Grammar, Language};
|
||||
use rusqlite::{
|
||||
types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef},
|
||||
ToSql,
|
||||
};
|
||||
use sha1::{Digest, Sha1};
|
||||
use std::{
|
||||
cmp::{self, Reverse},
|
||||
@ -10,13 +15,44 @@ use std::{
|
||||
};
|
||||
use tree_sitter::{Parser, QueryCursor};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
|
||||
pub struct SpanDigest([u8; 20]);
|
||||
|
||||
impl FromSql for SpanDigest {
|
||||
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
|
||||
let blob = value.as_blob()?;
|
||||
let bytes =
|
||||
blob.try_into()
|
||||
.map_err(|_| rusqlite::types::FromSqlError::InvalidBlobSize {
|
||||
expected_size: 20,
|
||||
blob_size: blob.len(),
|
||||
})?;
|
||||
return Ok(SpanDigest(bytes));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToSql for SpanDigest {
|
||||
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
|
||||
self.0.to_sql()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&'_ str> for SpanDigest {
|
||||
fn from(value: &'_ str) -> Self {
|
||||
let mut sha1 = Sha1::new();
|
||||
sha1.update(value);
|
||||
Self(sha1.finalize().into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Document {
|
||||
pub struct Span {
|
||||
pub name: String,
|
||||
pub range: Range<usize>,
|
||||
pub content: String,
|
||||
pub embedding: Vec<f32>,
|
||||
pub sha1: [u8; 20],
|
||||
pub embedding: Option<Embedding>,
|
||||
pub digest: SpanDigest,
|
||||
pub token_count: usize,
|
||||
}
|
||||
|
||||
const CODE_CONTEXT_TEMPLATE: &str =
|
||||
@ -30,6 +66,7 @@ pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] =
|
||||
pub struct CodeContextRetriever {
|
||||
pub parser: Parser,
|
||||
pub cursor: QueryCursor,
|
||||
pub embedding_provider: Arc<dyn EmbeddingProvider>,
|
||||
}
|
||||
|
||||
// Every match has an item, this represents the fundamental treesitter symbol and anchors the search
|
||||
@ -47,10 +84,11 @@ pub struct CodeContextMatch {
|
||||
}
|
||||
|
||||
impl CodeContextRetriever {
|
||||
pub fn new() -> Self {
|
||||
pub fn new(embedding_provider: Arc<dyn EmbeddingProvider>) -> Self {
|
||||
Self {
|
||||
parser: Parser::new(),
|
||||
cursor: QueryCursor::new(),
|
||||
embedding_provider,
|
||||
}
|
||||
}
|
||||
|
||||
@ -59,38 +97,36 @@ impl CodeContextRetriever {
|
||||
relative_path: &Path,
|
||||
language_name: Arc<str>,
|
||||
content: &str,
|
||||
) -> Result<Vec<Document>> {
|
||||
) -> Result<Vec<Span>> {
|
||||
let document_span = ENTIRE_FILE_TEMPLATE
|
||||
.replace("<path>", relative_path.to_string_lossy().as_ref())
|
||||
.replace("<language>", language_name.as_ref())
|
||||
.replace("<item>", &content);
|
||||
|
||||
let mut sha1 = Sha1::new();
|
||||
sha1.update(&document_span);
|
||||
|
||||
Ok(vec![Document {
|
||||
let digest = SpanDigest::from(document_span.as_str());
|
||||
let (document_span, token_count) = self.embedding_provider.truncate(&document_span);
|
||||
Ok(vec![Span {
|
||||
range: 0..content.len(),
|
||||
content: document_span,
|
||||
embedding: Vec::new(),
|
||||
embedding: Default::default(),
|
||||
name: language_name.to_string(),
|
||||
sha1: sha1.finalize().into(),
|
||||
digest,
|
||||
token_count,
|
||||
}])
|
||||
}
|
||||
|
||||
fn parse_markdown_file(&self, relative_path: &Path, content: &str) -> Result<Vec<Document>> {
|
||||
fn parse_markdown_file(&self, relative_path: &Path, content: &str) -> Result<Vec<Span>> {
|
||||
let document_span = MARKDOWN_CONTEXT_TEMPLATE
|
||||
.replace("<path>", relative_path.to_string_lossy().as_ref())
|
||||
.replace("<item>", &content);
|
||||
|
||||
let mut sha1 = Sha1::new();
|
||||
sha1.update(&document_span);
|
||||
|
||||
Ok(vec![Document {
|
||||
let digest = SpanDigest::from(document_span.as_str());
|
||||
let (document_span, token_count) = self.embedding_provider.truncate(&document_span);
|
||||
Ok(vec![Span {
|
||||
range: 0..content.len(),
|
||||
content: document_span,
|
||||
embedding: Vec::new(),
|
||||
embedding: None,
|
||||
name: "Markdown".to_string(),
|
||||
sha1: sha1.finalize().into(),
|
||||
digest,
|
||||
token_count,
|
||||
}])
|
||||
}
|
||||
|
||||
@ -155,26 +191,32 @@ impl CodeContextRetriever {
|
||||
relative_path: &Path,
|
||||
content: &str,
|
||||
language: Arc<Language>,
|
||||
) -> Result<Vec<Document>> {
|
||||
) -> Result<Vec<Span>> {
|
||||
let language_name = language.name();
|
||||
|
||||
if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
|
||||
return self.parse_entire_file(relative_path, language_name, &content);
|
||||
} else if &language_name.to_string() == &"Markdown".to_string() {
|
||||
} else if language_name.as_ref() == "Markdown" {
|
||||
return self.parse_markdown_file(relative_path, &content);
|
||||
}
|
||||
|
||||
let mut documents = self.parse_file(content, language)?;
|
||||
for document in &mut documents {
|
||||
document.content = CODE_CONTEXT_TEMPLATE
|
||||
let mut spans = self.parse_file(content, language)?;
|
||||
for span in &mut spans {
|
||||
let document_content = CODE_CONTEXT_TEMPLATE
|
||||
.replace("<path>", relative_path.to_string_lossy().as_ref())
|
||||
.replace("<language>", language_name.as_ref())
|
||||
.replace("item", &document.content);
|
||||
.replace("item", &span.content);
|
||||
|
||||
let (document_content, token_count) =
|
||||
self.embedding_provider.truncate(&document_content);
|
||||
|
||||
span.content = document_content;
|
||||
span.token_count = token_count;
|
||||
}
|
||||
Ok(documents)
|
||||
Ok(spans)
|
||||
}
|
||||
|
||||
pub fn parse_file(&mut self, content: &str, language: Arc<Language>) -> Result<Vec<Document>> {
|
||||
pub fn parse_file(&mut self, content: &str, language: Arc<Language>) -> Result<Vec<Span>> {
|
||||
let grammar = language
|
||||
.grammar()
|
||||
.ok_or_else(|| anyhow!("no grammar for language"))?;
|
||||
@ -185,7 +227,7 @@ impl CodeContextRetriever {
|
||||
let language_scope = language.default_scope();
|
||||
let placeholder = language_scope.collapsed_placeholder();
|
||||
|
||||
let mut documents = Vec::new();
|
||||
let mut spans = Vec::new();
|
||||
let mut collapsed_ranges_within = Vec::new();
|
||||
let mut parsed_name_ranges = HashSet::new();
|
||||
for (i, context_match) in matches.iter().enumerate() {
|
||||
@ -225,22 +267,22 @@ impl CodeContextRetriever {
|
||||
|
||||
collapsed_ranges_within.sort_by_key(|r| (r.start, Reverse(r.end)));
|
||||
|
||||
let mut document_content = String::new();
|
||||
let mut span_content = String::new();
|
||||
for context_range in &context_match.context_ranges {
|
||||
add_content_from_range(
|
||||
&mut document_content,
|
||||
&mut span_content,
|
||||
content,
|
||||
context_range.clone(),
|
||||
context_match.start_col,
|
||||
);
|
||||
document_content.push_str("\n");
|
||||
span_content.push_str("\n");
|
||||
}
|
||||
|
||||
let mut offset = item_range.start;
|
||||
for collapsed_range in &collapsed_ranges_within {
|
||||
if collapsed_range.start > offset {
|
||||
add_content_from_range(
|
||||
&mut document_content,
|
||||
&mut span_content,
|
||||
content,
|
||||
offset..collapsed_range.start,
|
||||
context_match.start_col,
|
||||
@ -249,33 +291,32 @@ impl CodeContextRetriever {
|
||||
}
|
||||
|
||||
if collapsed_range.end > offset {
|
||||
document_content.push_str(placeholder);
|
||||
span_content.push_str(placeholder);
|
||||
offset = collapsed_range.end;
|
||||
}
|
||||
}
|
||||
|
||||
if offset < item_range.end {
|
||||
add_content_from_range(
|
||||
&mut document_content,
|
||||
&mut span_content,
|
||||
content,
|
||||
offset..item_range.end,
|
||||
context_match.start_col,
|
||||
);
|
||||
}
|
||||
|
||||
let mut sha1 = Sha1::new();
|
||||
sha1.update(&document_content);
|
||||
|
||||
documents.push(Document {
|
||||
let sha1 = SpanDigest::from(span_content.as_str());
|
||||
spans.push(Span {
|
||||
name,
|
||||
content: document_content,
|
||||
content: span_content,
|
||||
range: item_range.clone(),
|
||||
embedding: vec![],
|
||||
sha1: sha1.finalize().into(),
|
||||
embedding: None,
|
||||
digest: sha1,
|
||||
token_count: 0,
|
||||
})
|
||||
}
|
||||
|
||||
return Ok(documents);
|
||||
return Ok(spans);
|
||||
}
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,14 +1,15 @@
|
||||
use crate::{
|
||||
db::dot,
|
||||
embedding::EmbeddingProvider,
|
||||
parsing::{subtract_ranges, CodeContextRetriever, Document},
|
||||
embedding::{DummyEmbeddings, Embedding, EmbeddingProvider},
|
||||
embedding_queue::EmbeddingQueue,
|
||||
parsing::{subtract_ranges, CodeContextRetriever, Span, SpanDigest},
|
||||
semantic_index_settings::SemanticIndexSettings,
|
||||
SearchResult, SemanticIndex,
|
||||
FileToEmbed, JobHandle, SearchResult, SemanticIndex, EMBEDDING_QUEUE_FLUSH_TIMEOUT,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use gpui::{Task, TestAppContext};
|
||||
use gpui::{executor::Deterministic, Task, TestAppContext};
|
||||
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
|
||||
use parking_lot::Mutex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project};
|
||||
use rand::{rngs::StdRng, Rng};
|
||||
@ -20,8 +21,10 @@ use std::{
|
||||
atomic::{self, AtomicUsize},
|
||||
Arc,
|
||||
},
|
||||
time::SystemTime,
|
||||
};
|
||||
use unindent::Unindent;
|
||||
use util::RandomCharIter;
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
@ -31,12 +34,8 @@ fn init_logger() {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
settings::register::<SemanticIndexSettings>(cx);
|
||||
settings::register::<ProjectSettings>(cx);
|
||||
});
|
||||
async fn test_semantic_index(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
fs.insert_tree(
|
||||
@ -56,6 +55,7 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
fn bbb() {
|
||||
println!(\"bbbbbbbbbbbbb!\");
|
||||
}
|
||||
struct pqpqpqp {}
|
||||
".unindent(),
|
||||
"file3.toml": "
|
||||
ZZZZZZZZZZZZZZZZZZ = 5
|
||||
@ -75,7 +75,7 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
let db_path = db_dir.path().join("db.sqlite");
|
||||
|
||||
let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
|
||||
let store = SemanticIndex::new(
|
||||
let semantic_index = SemanticIndex::new(
|
||||
fs.clone(),
|
||||
db_path,
|
||||
embedding_provider.clone(),
|
||||
@ -87,34 +87,24 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
|
||||
let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
|
||||
|
||||
let _ = store
|
||||
.update(cx, |store, cx| {
|
||||
store.initialize_project(project.clone(), cx)
|
||||
})
|
||||
.await;
|
||||
|
||||
let (file_count, outstanding_file_count) = store
|
||||
.update(cx, |store, cx| store.index_project(project.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(file_count, 3);
|
||||
cx.foreground().run_until_parked();
|
||||
assert_eq!(*outstanding_file_count.borrow(), 0);
|
||||
|
||||
let search_results = store
|
||||
.update(cx, |store, cx| {
|
||||
store.search_project(
|
||||
project.clone(),
|
||||
"aaaaaabbbbzz".to_string(),
|
||||
5,
|
||||
vec![],
|
||||
vec![],
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let search_results = semantic_index.update(cx, |store, cx| {
|
||||
store.search_project(
|
||||
project.clone(),
|
||||
"aaaaaabbbbzz".to_string(),
|
||||
5,
|
||||
vec![],
|
||||
vec![],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let pending_file_count =
|
||||
semantic_index.read_with(cx, |index, _| index.pending_file_count(&project).unwrap());
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(*pending_file_count.borrow(), 3);
|
||||
deterministic.advance_clock(EMBEDDING_QUEUE_FLUSH_TIMEOUT);
|
||||
assert_eq!(*pending_file_count.borrow(), 0);
|
||||
|
||||
let search_results = search_results.await.unwrap();
|
||||
assert_search_results(
|
||||
&search_results,
|
||||
&[
|
||||
@ -122,6 +112,7 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
(Path::new("src/file2.rs").into(), 0),
|
||||
(Path::new("src/file3.toml").into(), 0),
|
||||
(Path::new("src/file1.rs").into(), 45),
|
||||
(Path::new("src/file2.rs").into(), 45),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
@ -129,7 +120,7 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
// Test Include Files Functonality
|
||||
let include_files = vec![PathMatcher::new("*.rs").unwrap()];
|
||||
let exclude_files = vec![PathMatcher::new("*.rs").unwrap()];
|
||||
let rust_only_search_results = store
|
||||
let rust_only_search_results = semantic_index
|
||||
.update(cx, |store, cx| {
|
||||
store.search_project(
|
||||
project.clone(),
|
||||
@ -149,11 +140,12 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
(Path::new("src/file1.rs").into(), 0),
|
||||
(Path::new("src/file2.rs").into(), 0),
|
||||
(Path::new("src/file1.rs").into(), 45),
|
||||
(Path::new("src/file2.rs").into(), 45),
|
||||
],
|
||||
cx,
|
||||
);
|
||||
|
||||
let no_rust_search_results = store
|
||||
let no_rust_search_results = semantic_index
|
||||
.update(cx, |store, cx| {
|
||||
store.search_project(
|
||||
project.clone(),
|
||||
@ -186,24 +178,85 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.foreground().run_until_parked();
|
||||
deterministic.advance_clock(EMBEDDING_QUEUE_FLUSH_TIMEOUT);
|
||||
|
||||
let prev_embedding_count = embedding_provider.embedding_count();
|
||||
let (file_count, outstanding_file_count) = store
|
||||
.update(cx, |store, cx| store.index_project(project.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(file_count, 1);
|
||||
|
||||
cx.foreground().run_until_parked();
|
||||
assert_eq!(*outstanding_file_count.borrow(), 0);
|
||||
let index = semantic_index.update(cx, |store, cx| store.index_project(project.clone(), cx));
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(*pending_file_count.borrow(), 1);
|
||||
deterministic.advance_clock(EMBEDDING_QUEUE_FLUSH_TIMEOUT);
|
||||
assert_eq!(*pending_file_count.borrow(), 0);
|
||||
index.await.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
embedding_provider.embedding_count() - prev_embedding_count,
|
||||
2
|
||||
1
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_embedding_batching(cx: &mut TestAppContext, mut rng: StdRng) {
|
||||
let (outstanding_job_count, _) = postage::watch::channel_with(0);
|
||||
let outstanding_job_count = Arc::new(Mutex::new(outstanding_job_count));
|
||||
|
||||
let files = (1..=3)
|
||||
.map(|file_ix| FileToEmbed {
|
||||
worktree_id: 5,
|
||||
path: Path::new(&format!("path-{file_ix}")).into(),
|
||||
mtime: SystemTime::now(),
|
||||
spans: (0..rng.gen_range(4..22))
|
||||
.map(|document_ix| {
|
||||
let content_len = rng.gen_range(10..100);
|
||||
let content = RandomCharIter::new(&mut rng)
|
||||
.with_simple_text()
|
||||
.take(content_len)
|
||||
.collect::<String>();
|
||||
let digest = SpanDigest::from(content.as_str());
|
||||
Span {
|
||||
range: 0..10,
|
||||
embedding: None,
|
||||
name: format!("document {document_ix}"),
|
||||
content,
|
||||
digest,
|
||||
token_count: rng.gen_range(10..30),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
job_handle: JobHandle::new(&outstanding_job_count),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
|
||||
|
||||
let mut queue = EmbeddingQueue::new(embedding_provider.clone(), cx.background());
|
||||
for file in &files {
|
||||
queue.push(file.clone());
|
||||
}
|
||||
queue.flush();
|
||||
|
||||
cx.foreground().run_until_parked();
|
||||
let finished_files = queue.finished_files();
|
||||
let mut embedded_files: Vec<_> = files
|
||||
.iter()
|
||||
.map(|_| finished_files.try_recv().expect("no finished file"))
|
||||
.collect();
|
||||
|
||||
let expected_files: Vec<_> = files
|
||||
.iter()
|
||||
.map(|file| {
|
||||
let mut file = file.clone();
|
||||
for doc in &mut file.spans {
|
||||
doc.embedding = Some(embedding_provider.embed_sync(doc.content.as_ref()));
|
||||
}
|
||||
file
|
||||
})
|
||||
.collect();
|
||||
|
||||
embedded_files.sort_by_key(|f| f.path.clone());
|
||||
|
||||
assert_eq!(embedded_files, expected_files);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_search_results(
|
||||
actual: &[SearchResult],
|
||||
@ -227,7 +280,8 @@ fn assert_search_results(
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_rust() {
|
||||
let language = rust_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = "
|
||||
/// A doc comment
|
||||
@ -314,7 +368,8 @@ async fn test_code_context_retrieval_rust() {
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_json() {
|
||||
let language = json_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = r#"
|
||||
{
|
||||
@ -382,7 +437,7 @@ async fn test_code_context_retrieval_json() {
|
||||
}
|
||||
|
||||
fn assert_documents_eq(
|
||||
documents: &[Document],
|
||||
documents: &[Span],
|
||||
expected_contents_and_start_offsets: &[(String, usize)],
|
||||
) {
|
||||
assert_eq!(
|
||||
@ -397,7 +452,8 @@ fn assert_documents_eq(
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_javascript() {
|
||||
let language = js_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = "
|
||||
/* globals importScripts, backend */
|
||||
@ -495,7 +551,8 @@ async fn test_code_context_retrieval_javascript() {
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_lua() {
|
||||
let language = lua_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = r#"
|
||||
-- Creates a new class
|
||||
@ -568,7 +625,8 @@ async fn test_code_context_retrieval_lua() {
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_elixir() {
|
||||
let language = elixir_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = r#"
|
||||
defmodule File.Stream do
|
||||
@ -684,7 +742,8 @@ async fn test_code_context_retrieval_elixir() {
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_cpp() {
|
||||
let language = cpp_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = "
|
||||
/**
|
||||
@ -836,7 +895,8 @@ async fn test_code_context_retrieval_cpp() {
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_ruby() {
|
||||
let language = ruby_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = r#"
|
||||
# This concern is inspired by "sudo mode" on GitHub. It
|
||||
@ -1026,7 +1086,8 @@ async fn test_code_context_retrieval_ruby() {
|
||||
#[gpui::test]
|
||||
async fn test_code_context_retrieval_php() {
|
||||
let language = php_lang();
|
||||
let mut retriever = CodeContextRetriever::new();
|
||||
let embedding_provider = Arc::new(DummyEmbeddings {});
|
||||
let mut retriever = CodeContextRetriever::new(embedding_provider);
|
||||
|
||||
let text = r#"
|
||||
<?php
|
||||
@ -1173,36 +1234,6 @@ async fn test_code_context_retrieval_php() {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_dot_product(mut rng: StdRng) {
|
||||
assert_eq!(dot(&[1., 0., 0., 0., 0.], &[0., 1., 0., 0., 0.]), 0.);
|
||||
assert_eq!(dot(&[2., 0., 0., 0., 0.], &[3., 1., 0., 0., 0.]), 6.);
|
||||
|
||||
for _ in 0..100 {
|
||||
let size = 1536;
|
||||
let mut a = vec![0.; size];
|
||||
let mut b = vec![0.; size];
|
||||
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
|
||||
*a = rng.gen();
|
||||
*b = rng.gen();
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
round_to_decimals(dot(&a, &b), 1),
|
||||
round_to_decimals(reference_dot(&a, &b), 1)
|
||||
);
|
||||
}
|
||||
|
||||
fn round_to_decimals(n: f32, decimal_places: i32) -> f32 {
|
||||
let factor = (10.0 as f32).powi(decimal_places);
|
||||
(n * factor).round() / factor
|
||||
}
|
||||
|
||||
fn reference_dot(a: &[f32], b: &[f32]) -> f32 {
|
||||
a.iter().zip(b.iter()).map(|(a, b)| a * b).sum()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct FakeEmbeddingProvider {
|
||||
embedding_count: AtomicUsize,
|
||||
@ -1212,35 +1243,42 @@ impl FakeEmbeddingProvider {
|
||||
fn embedding_count(&self) -> usize {
|
||||
self.embedding_count.load(atomic::Ordering::SeqCst)
|
||||
}
|
||||
|
||||
fn embed_sync(&self, span: &str) -> Embedding {
|
||||
let mut result = vec![1.0; 26];
|
||||
for letter in span.chars() {
|
||||
let letter = letter.to_ascii_lowercase();
|
||||
if letter as u32 >= 'a' as u32 {
|
||||
let ix = (letter as u32) - ('a' as u32);
|
||||
if ix < 26 {
|
||||
result[ix as usize] += 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let norm = result.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
for x in &mut result {
|
||||
*x /= norm;
|
||||
}
|
||||
|
||||
result.into()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl EmbeddingProvider for FakeEmbeddingProvider {
|
||||
async fn embed_batch(&self, spans: Vec<&str>) -> Result<Vec<Vec<f32>>> {
|
||||
fn truncate(&self, span: &str) -> (String, usize) {
|
||||
(span.to_string(), 1)
|
||||
}
|
||||
|
||||
fn max_tokens_per_batch(&self) -> usize {
|
||||
200
|
||||
}
|
||||
|
||||
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>> {
|
||||
self.embedding_count
|
||||
.fetch_add(spans.len(), atomic::Ordering::SeqCst);
|
||||
Ok(spans
|
||||
.iter()
|
||||
.map(|span| {
|
||||
let mut result = vec![1.0; 26];
|
||||
for letter in span.chars() {
|
||||
let letter = letter.to_ascii_lowercase();
|
||||
if letter as u32 >= 'a' as u32 {
|
||||
let ix = (letter as u32) - ('a' as u32);
|
||||
if ix < 26 {
|
||||
result[ix as usize] += 1.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let norm = result.iter().map(|x| x * x).sum::<f32>().sqrt();
|
||||
for x in &mut result {
|
||||
*x /= norm;
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
.collect())
|
||||
Ok(spans.iter().map(|span| self.embed_sync(span)).collect())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1684,3 +1722,11 @@ fn test_subtract_ranges() {
|
||||
|
||||
assert_eq!(subtract_ranges(&[0..5], &[1..2]), &[0..1, 2..5]);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
cx.set_global(SettingsStore::test(cx));
|
||||
settings::register::<SemanticIndexSettings>(cx);
|
||||
settings::register::<ProjectSettings>(cx);
|
||||
});
|
||||
}
|
||||
|
@ -417,6 +417,7 @@ pub struct Toolbar {
|
||||
pub height: f32,
|
||||
pub item_spacing: f32,
|
||||
pub toggleable_tool: Toggleable<Interactive<IconButton>>,
|
||||
pub toggleable_text_tool: Toggleable<Interactive<ContainedText>>,
|
||||
pub breadcrumb_height: f32,
|
||||
pub breadcrumbs: Interactive<ContainedText>,
|
||||
}
|
||||
|
@ -269,11 +269,22 @@ pub fn defer<F: FnOnce()>(f: F) -> Deferred<F> {
|
||||
Deferred(Some(f))
|
||||
}
|
||||
|
||||
pub struct RandomCharIter<T: Rng>(T);
|
||||
pub struct RandomCharIter<T: Rng> {
|
||||
rng: T,
|
||||
simple_text: bool,
|
||||
}
|
||||
|
||||
impl<T: Rng> RandomCharIter<T> {
|
||||
pub fn new(rng: T) -> Self {
|
||||
Self(rng)
|
||||
Self {
|
||||
rng,
|
||||
simple_text: std::env::var("SIMPLE_TEXT").map_or(false, |v| !v.is_empty()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_simple_text(mut self) -> Self {
|
||||
self.simple_text = true;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
@ -281,25 +292,27 @@ impl<T: Rng> Iterator for RandomCharIter<T> {
|
||||
type Item = char;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if std::env::var("SIMPLE_TEXT").map_or(false, |v| !v.is_empty()) {
|
||||
return if self.0.gen_range(0..100) < 5 {
|
||||
if self.simple_text {
|
||||
return if self.rng.gen_range(0..100) < 5 {
|
||||
Some('\n')
|
||||
} else {
|
||||
Some(self.0.gen_range(b'a'..b'z' + 1).into())
|
||||
Some(self.rng.gen_range(b'a'..b'z' + 1).into())
|
||||
};
|
||||
}
|
||||
|
||||
match self.0.gen_range(0..100) {
|
||||
match self.rng.gen_range(0..100) {
|
||||
// whitespace
|
||||
0..=19 => [' ', '\n', '\r', '\t'].choose(&mut self.0).copied(),
|
||||
0..=19 => [' ', '\n', '\r', '\t'].choose(&mut self.rng).copied(),
|
||||
// two-byte greek letters
|
||||
20..=32 => char::from_u32(self.0.gen_range(('α' as u32)..('ω' as u32 + 1))),
|
||||
20..=32 => char::from_u32(self.rng.gen_range(('α' as u32)..('ω' as u32 + 1))),
|
||||
// // three-byte characters
|
||||
33..=45 => ['✋', '✅', '❌', '❎', '⭐'].choose(&mut self.0).copied(),
|
||||
33..=45 => ['✋', '✅', '❌', '❎', '⭐']
|
||||
.choose(&mut self.rng)
|
||||
.copied(),
|
||||
// // four-byte characters
|
||||
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.0).copied(),
|
||||
46..=58 => ['🍐', '🏀', '🍗', '🎉'].choose(&mut self.rng).copied(),
|
||||
// ascii letters
|
||||
_ => Some(self.0.gen_range(b'a'..b'z' + 1).into()),
|
||||
_ => Some(self.rng.gen_range(b'a'..b'z' + 1).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,8 @@ use std::{cmp, sync::Arc};
|
||||
use editor::{
|
||||
char_kind,
|
||||
display_map::{DisplaySnapshot, FoldPoint, ToDisplayPoint},
|
||||
movement, Bias, CharKind, DisplayPoint, ToOffset,
|
||||
movement::{self, FindRange},
|
||||
Bias, CharKind, DisplayPoint, ToOffset,
|
||||
};
|
||||
use gpui::{actions, impl_actions, AppContext, WindowContext};
|
||||
use language::{Point, Selection, SelectionGoal};
|
||||
@ -592,7 +593,7 @@ pub(crate) fn next_word_start(
|
||||
let scope = map.buffer_snapshot.language_scope_at(point.to_point(map));
|
||||
for _ in 0..times {
|
||||
let mut crossed_newline = false;
|
||||
point = movement::find_boundary(map, point, |left, right| {
|
||||
point = movement::find_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
|
||||
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
|
||||
let at_newline = right == '\n';
|
||||
@ -616,8 +617,13 @@ fn next_word_end(
|
||||
) -> DisplayPoint {
|
||||
let scope = map.buffer_snapshot.language_scope_at(point.to_point(map));
|
||||
for _ in 0..times {
|
||||
*point.column_mut() += 1;
|
||||
point = movement::find_boundary(map, point, |left, right| {
|
||||
if point.column() < map.line_len(point.row()) {
|
||||
*point.column_mut() += 1;
|
||||
} else if point.row() < map.max_buffer_row() {
|
||||
*point.row_mut() += 1;
|
||||
*point.column_mut() = 0;
|
||||
}
|
||||
point = movement::find_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
|
||||
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
|
||||
|
||||
@ -649,12 +655,13 @@ fn previous_word_start(
|
||||
for _ in 0..times {
|
||||
// This works even though find_preceding_boundary is called for every character in the line containing
|
||||
// cursor because the newline is checked only once.
|
||||
point = movement::find_preceding_boundary(map, point, |left, right| {
|
||||
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
|
||||
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
|
||||
point =
|
||||
movement::find_preceding_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
|
||||
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
|
||||
|
||||
(left_kind != right_kind && !right.is_whitespace()) || left == '\n'
|
||||
});
|
||||
(left_kind != right_kind && !right.is_whitespace()) || left == '\n'
|
||||
});
|
||||
}
|
||||
point
|
||||
}
|
||||
|
@ -27,7 +27,6 @@ use self::{
|
||||
case::change_case,
|
||||
change::{change_motion, change_object},
|
||||
delete::{delete_motion, delete_object},
|
||||
substitute::substitute,
|
||||
yank::{yank_motion, yank_object},
|
||||
};
|
||||
|
||||
@ -44,7 +43,6 @@ actions!(
|
||||
ChangeToEndOfLine,
|
||||
DeleteToEndOfLine,
|
||||
Yank,
|
||||
Substitute,
|
||||
ChangeCase,
|
||||
]
|
||||
);
|
||||
@ -56,13 +54,8 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.add_action(insert_line_above);
|
||||
cx.add_action(insert_line_below);
|
||||
cx.add_action(change_case);
|
||||
substitute::init(cx);
|
||||
search::init(cx);
|
||||
cx.add_action(|_: &mut Workspace, _: &Substitute, cx| {
|
||||
Vim::update(cx, |vim, cx| {
|
||||
let times = vim.pop_number_operator(cx);
|
||||
substitute(vim, times, cx);
|
||||
})
|
||||
});
|
||||
cx.add_action(|_: &mut Workspace, _: &DeleteLeft, cx| {
|
||||
Vim::update(cx, |vim, cx| {
|
||||
let times = vim.pop_number_operator(cx);
|
||||
@ -445,7 +438,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_e(cx: &mut gpui::TestAppContext) {
|
||||
async fn test_end_of_word(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = NeovimBackedTestContext::new(cx).await.binding(["e"]);
|
||||
cx.assert_all(indoc! {"
|
||||
Thˇe quicˇkˇ-browˇn
|
||||
|
@ -1,7 +1,10 @@
|
||||
use crate::{motion::Motion, object::Object, state::Mode, utils::copy_selections_content, Vim};
|
||||
use editor::{
|
||||
char_kind, display_map::DisplaySnapshot, movement, scroll::autoscroll::Autoscroll, CharKind,
|
||||
DisplayPoint,
|
||||
char_kind,
|
||||
display_map::DisplaySnapshot,
|
||||
movement::{self, FindRange},
|
||||
scroll::autoscroll::Autoscroll,
|
||||
CharKind, DisplayPoint,
|
||||
};
|
||||
use gpui::WindowContext;
|
||||
use language::Selection;
|
||||
@ -96,12 +99,14 @@ fn expand_changed_word_selection(
|
||||
.unwrap_or_default();
|
||||
|
||||
if in_word {
|
||||
selection.end = movement::find_boundary(map, selection.end, |left, right| {
|
||||
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
|
||||
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
|
||||
selection.end =
|
||||
movement::find_boundary(map, selection.end, FindRange::MultiLine, |left, right| {
|
||||
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
|
||||
let right_kind =
|
||||
char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
|
||||
|
||||
left_kind != right_kind && left_kind != CharKind::Whitespace
|
||||
});
|
||||
left_kind != right_kind && left_kind != CharKind::Whitespace
|
||||
});
|
||||
true
|
||||
} else {
|
||||
Motion::NextWordStart { ignore_punctuation }
|
||||
|
@ -67,7 +67,8 @@ fn scroll_editor(editor: &mut Editor, amount: &ScrollAmount, cx: &mut ViewContex
|
||||
let top_anchor = editor.scroll_manager.anchor().anchor;
|
||||
|
||||
editor.change_selections(None, cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| {
|
||||
s.move_with(|map, selection| {
|
||||
let head = selection.head();
|
||||
let top = top_anchor.to_display_point(map);
|
||||
let min_row = top.row() + VERTICAL_SCROLL_MARGIN as u32;
|
||||
let max_row = top.row() + visible_rows - VERTICAL_SCROLL_MARGIN as u32 - 1;
|
||||
@ -79,7 +80,11 @@ fn scroll_editor(editor: &mut Editor, amount: &ScrollAmount, cx: &mut ViewContex
|
||||
} else {
|
||||
head
|
||||
};
|
||||
(new_head, goal)
|
||||
if selection.is_empty() {
|
||||
selection.collapse_to(new_head, selection.goal)
|
||||
} else {
|
||||
selection.set_head(new_head, selection.goal)
|
||||
};
|
||||
})
|
||||
});
|
||||
}
|
||||
@ -90,12 +95,35 @@ mod test {
|
||||
use crate::{state::Mode, test::VimTestContext};
|
||||
use gpui::geometry::vector::vec2f;
|
||||
use indoc::indoc;
|
||||
use language::Point;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_scroll(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = VimTestContext::new(cx, true).await;
|
||||
|
||||
cx.set_state(indoc! {"ˇa\nb\nc\nd\ne\n"}, Mode::Normal);
|
||||
let window = cx.window;
|
||||
let line_height =
|
||||
cx.editor(|editor, cx| editor.style(cx).text.line_height(cx.font_cache()));
|
||||
window.simulate_resize(vec2f(1000., 8.0 * line_height - 1.0), &mut cx);
|
||||
|
||||
cx.set_state(
|
||||
indoc!(
|
||||
"ˇone
|
||||
two
|
||||
three
|
||||
four
|
||||
five
|
||||
six
|
||||
seven
|
||||
eight
|
||||
nine
|
||||
ten
|
||||
eleven
|
||||
twelve
|
||||
"
|
||||
),
|
||||
Mode::Normal,
|
||||
);
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 0.))
|
||||
@ -112,5 +140,33 @@ mod test {
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 2.))
|
||||
});
|
||||
|
||||
// does not select in normal mode
|
||||
cx.simulate_keystrokes(["g", "g"]);
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 0.))
|
||||
});
|
||||
cx.simulate_keystrokes(["ctrl-d"]);
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 2.0));
|
||||
assert_eq!(
|
||||
editor.selections.newest(cx).range(),
|
||||
Point::new(5, 0)..Point::new(5, 0)
|
||||
)
|
||||
});
|
||||
|
||||
// does select in visual mode
|
||||
cx.simulate_keystrokes(["g", "g"]);
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 0.))
|
||||
});
|
||||
cx.simulate_keystrokes(["v", "ctrl-d"]);
|
||||
cx.update_editor(|editor, cx| {
|
||||
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 2.0));
|
||||
assert_eq!(
|
||||
editor.selections.newest(cx).range(),
|
||||
Point::new(0, 0)..Point::new(5, 1)
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,32 @@
|
||||
use gpui::WindowContext;
|
||||
use editor::movement;
|
||||
use gpui::{actions, AppContext, WindowContext};
|
||||
use language::Point;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::{motion::Motion, utils::copy_selections_content, Mode, Vim};
|
||||
|
||||
pub fn substitute(vim: &mut Vim, count: Option<usize>, cx: &mut WindowContext) {
|
||||
let line_mode = vim.state().mode == Mode::VisualLine;
|
||||
actions!(vim, [Substitute, SubstituteLine]);
|
||||
|
||||
pub(crate) fn init(cx: &mut AppContext) {
|
||||
cx.add_action(|_: &mut Workspace, _: &Substitute, cx| {
|
||||
Vim::update(cx, |vim, cx| {
|
||||
let count = vim.pop_number_operator(cx);
|
||||
substitute(vim, count, vim.state().mode == Mode::VisualLine, cx);
|
||||
})
|
||||
});
|
||||
|
||||
cx.add_action(|_: &mut Workspace, _: &SubstituteLine, cx| {
|
||||
Vim::update(cx, |vim, cx| {
|
||||
if matches!(vim.state().mode, Mode::VisualBlock | Mode::Visual) {
|
||||
vim.switch_mode(Mode::VisualLine, false, cx)
|
||||
}
|
||||
let count = vim.pop_number_operator(cx);
|
||||
substitute(vim, count, true, cx)
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn substitute(vim: &mut Vim, count: Option<usize>, line_mode: bool, cx: &mut WindowContext) {
|
||||
vim.update_active_editor(cx, |editor, cx| {
|
||||
editor.set_clip_at_line_ends(false, cx);
|
||||
editor.transact(cx, |editor, cx| {
|
||||
@ -14,6 +36,11 @@ pub fn substitute(vim: &mut Vim, count: Option<usize>, cx: &mut WindowContext) {
|
||||
Motion::Right.expand_selection(map, selection, count, true);
|
||||
}
|
||||
if line_mode {
|
||||
// in Visual mode when the selection contains the newline at the end
|
||||
// of the line, we should exclude it.
|
||||
if !selection.is_empty() && selection.end.column() == 0 {
|
||||
selection.end = movement::left(map, selection.end);
|
||||
}
|
||||
Motion::CurrentLine.expand_selection(map, selection, None, false);
|
||||
if let Some((point, _)) = (Motion::FirstNonWhitespace {
|
||||
display_lines: false,
|
||||
@ -166,4 +193,68 @@ mod test {
|
||||
the laˇzy dog"})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_substitute_line(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = NeovimBackedTestContext::new(cx).await;
|
||||
|
||||
let initial_state = indoc! {"
|
||||
The quick brown
|
||||
fox juˇmps over
|
||||
the lazy dog
|
||||
"};
|
||||
|
||||
// normal mode
|
||||
cx.set_shared_state(initial_state).await;
|
||||
cx.simulate_shared_keystrokes(["shift-s", "o"]).await;
|
||||
cx.assert_shared_state(indoc! {"
|
||||
The quick brown
|
||||
oˇ
|
||||
the lazy dog
|
||||
"})
|
||||
.await;
|
||||
|
||||
// visual mode
|
||||
cx.set_shared_state(initial_state).await;
|
||||
cx.simulate_shared_keystrokes(["v", "k", "shift-s", "o"])
|
||||
.await;
|
||||
cx.assert_shared_state(indoc! {"
|
||||
oˇ
|
||||
the lazy dog
|
||||
"})
|
||||
.await;
|
||||
|
||||
// visual block mode
|
||||
cx.set_shared_state(initial_state).await;
|
||||
cx.simulate_shared_keystrokes(["ctrl-v", "j", "shift-s", "o"])
|
||||
.await;
|
||||
cx.assert_shared_state(indoc! {"
|
||||
The quick brown
|
||||
oˇ
|
||||
"})
|
||||
.await;
|
||||
|
||||
// visual mode including newline
|
||||
cx.set_shared_state(initial_state).await;
|
||||
cx.simulate_shared_keystrokes(["v", "$", "shift-s", "o"])
|
||||
.await;
|
||||
cx.assert_shared_state(indoc! {"
|
||||
The quick brown
|
||||
oˇ
|
||||
the lazy dog
|
||||
"})
|
||||
.await;
|
||||
|
||||
// indentation
|
||||
cx.set_neovim_option("shiftwidth=4").await;
|
||||
cx.set_shared_state(initial_state).await;
|
||||
cx.simulate_shared_keystrokes([">", ">", "shift-s", "o"])
|
||||
.await;
|
||||
cx.assert_shared_state(indoc! {"
|
||||
The quick brown
|
||||
oˇ
|
||||
the lazy dog
|
||||
"})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,11 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use editor::{char_kind, display_map::DisplaySnapshot, movement, Bias, CharKind, DisplayPoint};
|
||||
use editor::{
|
||||
char_kind,
|
||||
display_map::DisplaySnapshot,
|
||||
movement::{self, FindRange},
|
||||
Bias, CharKind, DisplayPoint,
|
||||
};
|
||||
use gpui::{actions, impl_actions, AppContext, WindowContext};
|
||||
use language::Selection;
|
||||
use serde::Deserialize;
|
||||
@ -180,15 +185,17 @@ fn in_word(
|
||||
let scope = map
|
||||
.buffer_snapshot
|
||||
.language_scope_at(relative_to.to_point(map));
|
||||
let start = movement::find_preceding_boundary_in_line(
|
||||
let start = movement::find_preceding_boundary(
|
||||
map,
|
||||
right(map, relative_to, 1),
|
||||
movement::FindRange::SingleLine,
|
||||
|left, right| {
|
||||
char_kind(&scope, left).coerce_punctuation(ignore_punctuation)
|
||||
!= char_kind(&scope, right).coerce_punctuation(ignore_punctuation)
|
||||
},
|
||||
);
|
||||
let end = movement::find_boundary_in_line(map, relative_to, |left, right| {
|
||||
|
||||
let end = movement::find_boundary(map, relative_to, FindRange::SingleLine, |left, right| {
|
||||
char_kind(&scope, left).coerce_punctuation(ignore_punctuation)
|
||||
!= char_kind(&scope, right).coerce_punctuation(ignore_punctuation)
|
||||
});
|
||||
@ -247,9 +254,10 @@ fn around_next_word(
|
||||
.buffer_snapshot
|
||||
.language_scope_at(relative_to.to_point(map));
|
||||
// Get the start of the word
|
||||
let start = movement::find_preceding_boundary_in_line(
|
||||
let start = movement::find_preceding_boundary(
|
||||
map,
|
||||
right(map, relative_to, 1),
|
||||
FindRange::SingleLine,
|
||||
|left, right| {
|
||||
char_kind(&scope, left).coerce_punctuation(ignore_punctuation)
|
||||
!= char_kind(&scope, right).coerce_punctuation(ignore_punctuation)
|
||||
@ -257,7 +265,7 @@ fn around_next_word(
|
||||
);
|
||||
|
||||
let mut word_found = false;
|
||||
let end = movement::find_boundary(map, relative_to, |left, right| {
|
||||
let end = movement::find_boundary(map, relative_to, FindRange::MultiLine, |left, right| {
|
||||
let left_kind = char_kind(&scope, left).coerce_punctuation(ignore_punctuation);
|
||||
let right_kind = char_kind(&scope, right).coerce_punctuation(ignore_punctuation);
|
||||
|
||||
@ -572,11 +580,18 @@ mod test {
|
||||
async fn test_visual_word_object(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = NeovimBackedTestContext::new(cx).await;
|
||||
|
||||
cx.set_shared_state("The quick ˇbrown\nfox").await;
|
||||
/*
|
||||
cx.set_shared_state("The quick ˇbrown\nfox").await;
|
||||
cx.simulate_shared_keystrokes(["v"]).await;
|
||||
cx.assert_shared_state("The quick «bˇ»rown\nfox").await;
|
||||
cx.simulate_shared_keystrokes(["i", "w"]).await;
|
||||
cx.assert_shared_state("The quick «brownˇ»\nfox").await;
|
||||
*/
|
||||
cx.set_shared_state("The quick brown\nˇ\nfox").await;
|
||||
cx.simulate_shared_keystrokes(["v"]).await;
|
||||
cx.assert_shared_state("The quick «bˇ»rown\nfox").await;
|
||||
cx.assert_shared_state("The quick brown\n«\nˇ»fox").await;
|
||||
cx.simulate_shared_keystrokes(["i", "w"]).await;
|
||||
cx.assert_shared_state("The quick «brownˇ»\nfox").await;
|
||||
cx.assert_shared_state("The quick brown\n«\nˇ»fox").await;
|
||||
|
||||
cx.assert_binding_matches_all(["v", "i", "w"], WORD_LOCATIONS)
|
||||
.await;
|
||||
|
@ -431,6 +431,24 @@ async fn test_wrapped_lines(cx: &mut gpui::TestAppContext) {
|
||||
twelve char
|
||||
"})
|
||||
.await;
|
||||
|
||||
// line wraps as:
|
||||
// fourteen ch
|
||||
// ar
|
||||
// fourteen ch
|
||||
// ar
|
||||
cx.set_shared_state(indoc! { "
|
||||
fourteen chaˇr
|
||||
fourteen char
|
||||
"})
|
||||
.await;
|
||||
|
||||
cx.simulate_shared_keystrokes(["d", "i", "w"]).await;
|
||||
cx.assert_shared_state(indoc! {"
|
||||
fourteenˇ•
|
||||
fourteen char
|
||||
"})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -153,6 +153,7 @@ impl<'a> NeovimBackedTestContext<'a> {
|
||||
}
|
||||
|
||||
pub async fn assert_shared_state(&mut self, marked_text: &str) {
|
||||
let marked_text = marked_text.replace("•", " ");
|
||||
let neovim = self.neovim_state().await;
|
||||
let editor = self.editor_state();
|
||||
if neovim == marked_text && neovim == editor {
|
||||
@ -184,9 +185,9 @@ impl<'a> NeovimBackedTestContext<'a> {
|
||||
message,
|
||||
initial_state,
|
||||
self.recent_keystrokes.join(" "),
|
||||
marked_text,
|
||||
neovim,
|
||||
editor
|
||||
marked_text.replace(" \n", "•\n"),
|
||||
neovim.replace(" \n", "•\n"),
|
||||
editor.replace(" \n", "•\n")
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -237,6 +237,9 @@ impl NeovimConnection {
|
||||
|
||||
#[cfg(not(feature = "neovim"))]
|
||||
pub async fn set_option(&mut self, value: &str) {
|
||||
if let Some(NeovimData::Get { .. }) = self.data.front() {
|
||||
self.data.pop_front();
|
||||
};
|
||||
assert_eq!(
|
||||
self.data.pop_front(),
|
||||
Some(NeovimData::SetOption {
|
||||
|
32
crates/vim/test_data/test_end_of_word.json
Normal file
32
crates/vim/test_data/test_end_of_word.json
Normal file
@ -0,0 +1,32 @@
|
||||
{"Put":{"state":"Thˇe quick-brown\n\n\nfox_jumps over\nthe"}}
|
||||
{"Key":"e"}
|
||||
{"Get":{"state":"The quicˇk-brown\n\n\nfox_jumps over\nthe","mode":"Normal"}}
|
||||
{"Key":"e"}
|
||||
{"Get":{"state":"The quickˇ-brown\n\n\nfox_jumps over\nthe","mode":"Normal"}}
|
||||
{"Key":"e"}
|
||||
{"Get":{"state":"The quick-browˇn\n\n\nfox_jumps over\nthe","mode":"Normal"}}
|
||||
{"Key":"e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumpˇs over\nthe","mode":"Normal"}}
|
||||
{"Key":"e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumps oveˇr\nthe","mode":"Normal"}}
|
||||
{"Key":"e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumps over\nthˇe","mode":"Normal"}}
|
||||
{"Key":"e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumps over\nthˇe","mode":"Normal"}}
|
||||
{"Put":{"state":"Thˇe quick-brown\n\n\nfox_jumps over\nthe"}}
|
||||
{"Key":"shift-e"}
|
||||
{"Get":{"state":"The quick-browˇn\n\n\nfox_jumps over\nthe","mode":"Normal"}}
|
||||
{"Put":{"state":"The quicˇk-brown\n\n\nfox_jumps over\nthe"}}
|
||||
{"Key":"shift-e"}
|
||||
{"Get":{"state":"The quick-browˇn\n\n\nfox_jumps over\nthe","mode":"Normal"}}
|
||||
{"Put":{"state":"The quickˇ-brown\n\n\nfox_jumps over\nthe"}}
|
||||
{"Key":"shift-e"}
|
||||
{"Get":{"state":"The quick-browˇn\n\n\nfox_jumps over\nthe","mode":"Normal"}}
|
||||
{"Key":"shift-e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumpˇs over\nthe","mode":"Normal"}}
|
||||
{"Key":"shift-e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumps oveˇr\nthe","mode":"Normal"}}
|
||||
{"Key":"shift-e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumps over\nthˇe","mode":"Normal"}}
|
||||
{"Key":"shift-e"}
|
||||
{"Get":{"state":"The quick-brown\n\n\nfox_jumps over\nthˇe","mode":"Normal"}}
|
29
crates/vim/test_data/test_substitute_line.json
Normal file
29
crates/vim/test_data/test_substitute_line.json
Normal file
@ -0,0 +1,29 @@
|
||||
{"Put":{"state":"The quick brown\nfox juˇmps over\nthe lazy dog\n"}}
|
||||
{"Key":"shift-s"}
|
||||
{"Key":"o"}
|
||||
{"Get":{"state":"The quick brown\noˇ\nthe lazy dog\n","mode":"Insert"}}
|
||||
{"Put":{"state":"The quick brown\nfox juˇmps over\nthe lazy dog\n"}}
|
||||
{"Key":"v"}
|
||||
{"Key":"k"}
|
||||
{"Key":"shift-s"}
|
||||
{"Key":"o"}
|
||||
{"Get":{"state":"oˇ\nthe lazy dog\n","mode":"Insert"}}
|
||||
{"Put":{"state":"The quick brown\nfox juˇmps over\nthe lazy dog\n"}}
|
||||
{"Key":"ctrl-v"}
|
||||
{"Key":"j"}
|
||||
{"Key":"shift-s"}
|
||||
{"Key":"o"}
|
||||
{"Get":{"state":"The quick brown\noˇ\n","mode":"Insert"}}
|
||||
{"Put":{"state":"The quick brown\nfox juˇmps over\nthe lazy dog\n"}}
|
||||
{"Key":"v"}
|
||||
{"Key":"$"}
|
||||
{"Key":"shift-s"}
|
||||
{"Key":"o"}
|
||||
{"Get":{"state":"The quick brown\noˇ\nthe lazy dog\n","mode":"Insert"}}
|
||||
{"SetOption":{"value":"shiftwidth=4"}}
|
||||
{"Put":{"state":"The quick brown\nfox juˇmps over\nthe lazy dog\n"}}
|
||||
{"Key":">"}
|
||||
{"Key":">"}
|
||||
{"Key":"shift-s"}
|
||||
{"Key":"o"}
|
||||
{"Get":{"state":"The quick brown\n oˇ\nthe lazy dog\n","mode":"Insert"}}
|
@ -1,9 +1,9 @@
|
||||
{"Put":{"state":"The quick ˇbrown\nfox"}}
|
||||
{"Put":{"state":"The quick brown\nˇ\nfox"}}
|
||||
{"Key":"v"}
|
||||
{"Get":{"state":"The quick «bˇ»rown\nfox","mode":"Visual"}}
|
||||
{"Get":{"state":"The quick brown\n«\nˇ»fox","mode":"Visual"}}
|
||||
{"Key":"i"}
|
||||
{"Key":"w"}
|
||||
{"Get":{"state":"The quick «brownˇ»\nfox","mode":"Visual"}}
|
||||
{"Get":{"state":"The quick brown\n«\nˇ»fox","mode":"Visual"}}
|
||||
{"Put":{"state":"The quick ˇbrown \nfox jumps over\nthe lazy dog \n\n\n\nThe-quick brown \n \n \n fox-jumps over\nthe lazy dog \n\n"}}
|
||||
{"Key":"v"}
|
||||
{"Key":"i"}
|
||||
|
@ -48,3 +48,8 @@
|
||||
{"Key":"o"}
|
||||
{"Key":"escape"}
|
||||
{"Get":{"state":"twelve char\nˇo\ntwelve char twelve char\ntwelve char\n","mode":"Normal"}}
|
||||
{"Put":{"state":"fourteen chaˇr\nfourteen char\n"}}
|
||||
{"Key":"d"}
|
||||
{"Key":"i"}
|
||||
{"Key":"w"}
|
||||
{"Get":{"state":"fourteenˇ \nfourteen char\n","mode":"Normal"}}
|
||||
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
|
||||
description = "The fast, collaborative code editor."
|
||||
edition = "2021"
|
||||
name = "zed"
|
||||
version = "0.103.0"
|
||||
version = "0.104.0"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
|
@ -37,7 +37,7 @@ mod yaml;
|
||||
#[exclude = "*.rs"]
|
||||
struct LanguageDir;
|
||||
|
||||
pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
|
||||
pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<dyn NodeRuntime>) {
|
||||
let language = |name, grammar, adapters| {
|
||||
languages.register(name, load_config(name), grammar, adapters, load_queries)
|
||||
};
|
||||
|
@ -22,11 +22,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct CssLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl CssLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
CssLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -65,7 +65,7 @@ impl LspAdapter for CssLspAdapter {
|
||||
self.node
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
[("vscode-langservers-extracted", version.as_str())],
|
||||
&[("vscode-langservers-extracted", version.as_str())],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
@ -81,14 +81,14 @@ impl LspAdapter for CssLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
@ -100,7 +100,7 @@ impl LspAdapter for CssLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -22,11 +22,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct HtmlLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl HtmlLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
HtmlLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -65,7 +65,7 @@ impl LspAdapter for HtmlLspAdapter {
|
||||
self.node
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
[("vscode-langservers-extracted", version.as_str())],
|
||||
&[("vscode-langservers-extracted", version.as_str())],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
@ -81,14 +81,14 @@ impl LspAdapter for HtmlLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
@ -100,7 +100,7 @@ impl LspAdapter for HtmlLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -27,12 +27,12 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct JsonLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
}
|
||||
|
||||
impl JsonLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>, languages: Arc<LanguageRegistry>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>, languages: Arc<LanguageRegistry>) -> Self {
|
||||
JsonLspAdapter { node, languages }
|
||||
}
|
||||
}
|
||||
@ -71,7 +71,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||
self.node
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
[("vscode-json-languageserver", version.as_str())],
|
||||
&[("vscode-json-languageserver", version.as_str())],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
@ -87,14 +87,14 @@ impl LspAdapter for JsonLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
@ -148,7 +148,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -23,14 +23,14 @@ fn intelephense_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
pub struct IntelephenseVersion(String);
|
||||
|
||||
pub struct IntelephenseLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl IntelephenseLspAdapter {
|
||||
const SERVER_PATH: &'static str = "node_modules/intelephense/lib/intelephense.js";
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
Self { node }
|
||||
}
|
||||
}
|
||||
@ -65,7 +65,7 @@ impl LspAdapter for IntelephenseLspAdapter {
|
||||
|
||||
if fs::metadata(&server_path).await.is_err() {
|
||||
self.node
|
||||
.npm_install_packages(&container_dir, [("intelephense", version.0.as_str())])
|
||||
.npm_install_packages(&container_dir, &[("intelephense", version.0.as_str())])
|
||||
.await?;
|
||||
}
|
||||
Ok(LanguageServerBinary {
|
||||
@ -79,14 +79,14 @@ impl LspAdapter for IntelephenseLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn label_for_completion(
|
||||
@ -107,7 +107,7 @@ impl LspAdapter for IntelephenseLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -20,11 +20,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct PythonLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl PythonLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
PythonLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -57,7 +57,7 @@ impl LspAdapter for PythonLspAdapter {
|
||||
|
||||
if fs::metadata(&server_path).await.is_err() {
|
||||
self.node
|
||||
.npm_install_packages(&container_dir, [("pyright", version.as_str())])
|
||||
.npm_install_packages(&container_dir, &[("pyright", version.as_str())])
|
||||
.await?;
|
||||
}
|
||||
|
||||
@ -72,14 +72,14 @@ impl LspAdapter for PythonLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn process_completion(&self, item: &mut lsp::CompletionItem) {
|
||||
@ -162,7 +162,7 @@ impl LspAdapter for PythonLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -21,11 +21,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct SvelteLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl SvelteLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
SvelteLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -64,7 +64,7 @@ impl LspAdapter for SvelteLspAdapter {
|
||||
self.node
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
[("svelte-language-server", version.as_str())],
|
||||
&[("svelte-language-server", version.as_str())],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
@ -80,14 +80,14 @@ impl LspAdapter for SvelteLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
@ -99,7 +99,7 @@ impl LspAdapter for SvelteLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -26,11 +26,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct TailwindLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl TailwindLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
TailwindLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -69,7 +69,7 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
self.node
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
[("@tailwindcss/language-server", version.as_str())],
|
||||
&[("@tailwindcss/language-server", version.as_str())],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
@ -85,14 +85,14 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||
@ -131,7 +131,7 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -33,14 +33,14 @@ fn eslint_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct TypeScriptLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl TypeScriptLspAdapter {
|
||||
const OLD_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.js";
|
||||
const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs";
|
||||
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
TypeScriptLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -86,7 +86,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
self.node
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
[
|
||||
&[
|
||||
("typescript", version.typescript_version.as_str()),
|
||||
(
|
||||
"typescript-language-server",
|
||||
@ -108,14 +108,14 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_ts_server_binary(container_dir, &self.node).await
|
||||
get_cached_ts_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_ts_server_binary(container_dir, &self.node).await
|
||||
get_cached_ts_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||
@ -165,7 +165,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
|
||||
async fn get_cached_ts_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let old_server_path = container_dir.join(TypeScriptLspAdapter::OLD_SERVER_PATH);
|
||||
@ -192,14 +192,14 @@ async fn get_cached_ts_server_binary(
|
||||
}
|
||||
|
||||
pub struct EsLintLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl EsLintLspAdapter {
|
||||
const SERVER_PATH: &'static str = "vscode-eslint/server/out/eslintServer.js";
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
EsLintLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -288,14 +288,14 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_eslint_server_binary(container_dir, &self.node).await
|
||||
get_cached_eslint_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_eslint_server_binary(container_dir, &self.node).await
|
||||
get_cached_eslint_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn label_for_completion(
|
||||
@ -313,7 +313,7 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
|
||||
async fn get_cached_eslint_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
// This is unfortunate but we don't know what the version is to build a path directly
|
||||
|
@ -25,11 +25,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct YamlLspAdapter {
|
||||
node: Arc<NodeRuntime>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
}
|
||||
|
||||
impl YamlLspAdapter {
|
||||
pub fn new(node: Arc<NodeRuntime>) -> Self {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
YamlLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@ -66,7 +66,10 @@ impl LspAdapter for YamlLspAdapter {
|
||||
|
||||
if fs::metadata(&server_path).await.is_err() {
|
||||
self.node
|
||||
.npm_install_packages(&container_dir, [("yaml-language-server", version.as_str())])
|
||||
.npm_install_packages(
|
||||
&container_dir,
|
||||
&[("yaml-language-server", version.as_str())],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@ -81,14 +84,14 @@ impl LspAdapter for YamlLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
fn workspace_configuration(&self, cx: &mut AppContext) -> BoxFuture<'static, Value> {
|
||||
let tab_size = all_language_settings(None, cx)
|
||||
@ -109,7 +112,7 @@ impl LspAdapter for YamlLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &NodeRuntime,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
(|| async move {
|
||||
let mut last_version_dir = None;
|
||||
|
@ -19,7 +19,7 @@ use gpui::{Action, App, AppContext, AssetSource, AsyncAppContext, Task};
|
||||
use isahc::{config::Configurable, Request};
|
||||
use language::{LanguageRegistry, Point};
|
||||
use log::LevelFilter;
|
||||
use node_runtime::NodeRuntime;
|
||||
use node_runtime::RealNodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
use project::Fs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -138,7 +138,7 @@ fn main() {
|
||||
languages.set_executor(cx.background().clone());
|
||||
languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
|
||||
let languages = Arc::new(languages);
|
||||
let node_runtime = NodeRuntime::instance(http.clone());
|
||||
let node_runtime = RealNodeRuntime::new(http.clone());
|
||||
|
||||
languages::init(languages.clone(), node_runtime.clone());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
|
||||
|
@ -723,7 +723,6 @@ mod tests {
|
||||
AppContext, AssetSource, Element, Entity, TestAppContext, View, ViewHandle,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::{handle_settings_file_changes, watch_config_file, SettingsStore};
|
||||
@ -732,7 +731,6 @@ mod tests {
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use theme::{ThemeRegistry, ThemeSettings};
|
||||
use util::http::FakeHttpClient;
|
||||
use workspace::{
|
||||
item::{Item, ItemHandle},
|
||||
open_new, open_paths, pane, NewFile, SplitDirection, WorkspaceHandle,
|
||||
@ -2364,8 +2362,7 @@ mod tests {
|
||||
let mut languages = LanguageRegistry::test();
|
||||
languages.set_executor(cx.background().clone());
|
||||
let languages = Arc::new(languages);
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let node_runtime = NodeRuntime::instance(http);
|
||||
let node_runtime = node_runtime::FakeNodeRuntime::new();
|
||||
languages::init(languages.clone(), node_runtime);
|
||||
for name in languages.language_names() {
|
||||
languages.language_for_name(&name);
|
||||
|
@ -9,7 +9,6 @@ const CARGO_TEST_ARGS = [
|
||||
'--release',
|
||||
'--lib',
|
||||
'--package', 'collab',
|
||||
'random_collaboration',
|
||||
]
|
||||
|
||||
if (require.main === module) {
|
||||
@ -99,7 +98,7 @@ function buildTests() {
|
||||
}
|
||||
|
||||
function runTests(env) {
|
||||
const {status, stdout} = spawnSync('cargo', ['test', ...CARGO_TEST_ARGS], {
|
||||
const {status, stdout} = spawnSync('cargo', ['test', ...CARGO_TEST_ARGS, 'random_project_collaboration'], {
|
||||
stdio: 'pipe',
|
||||
encoding: 'utf8',
|
||||
env: {
|
||||
|
@ -1,5 +1,6 @@
|
||||
import { interactive, toggleable } from "../element"
|
||||
import {
|
||||
Border,
|
||||
TextProperties,
|
||||
background,
|
||||
foreground,
|
||||
@ -16,6 +17,7 @@ interface TextButtonOptions {
|
||||
margin?: Partial<Margin>
|
||||
disabled?: boolean
|
||||
text_properties?: TextProperties
|
||||
border?: Border
|
||||
}
|
||||
|
||||
type ToggleableTextButtonOptions = TextButtonOptions & {
|
||||
@ -29,6 +31,7 @@ export function text_button({
|
||||
margin,
|
||||
disabled,
|
||||
text_properties,
|
||||
border,
|
||||
}: TextButtonOptions = {}) {
|
||||
const theme = useTheme()
|
||||
if (!color) color = "base"
|
||||
@ -66,6 +69,7 @@ export function text_button({
|
||||
},
|
||||
state: {
|
||||
default: {
|
||||
border,
|
||||
background: background_color,
|
||||
color: disabled
|
||||
? foreground(layer ?? theme.lowest, "disabled")
|
||||
@ -74,6 +78,7 @@ export function text_button({
|
||||
hovered: disabled
|
||||
? {}
|
||||
: {
|
||||
border,
|
||||
background: background(
|
||||
layer ?? theme.lowest,
|
||||
color,
|
||||
@ -88,6 +93,7 @@ export function text_button({
|
||||
clicked: disabled
|
||||
? {}
|
||||
: {
|
||||
border,
|
||||
background: background(
|
||||
layer ?? theme.lowest,
|
||||
color,
|
||||
|
@ -48,7 +48,7 @@ export default function search(): any {
|
||||
}
|
||||
|
||||
return {
|
||||
padding: { top: 0, bottom: 0 },
|
||||
padding: { top: 4, bottom: 4 },
|
||||
|
||||
option_button: toggleable({
|
||||
base: interactive({
|
||||
@ -394,7 +394,7 @@ export default function search(): any {
|
||||
}),
|
||||
},
|
||||
}),
|
||||
search_bar_row_height: 32,
|
||||
search_bar_row_height: 34,
|
||||
search_row_spacing: 8,
|
||||
option_button_height: 22,
|
||||
modes_container: {},
|
||||
|
@ -1,14 +1,15 @@
|
||||
import { useTheme } from "../common"
|
||||
import { toggleable_icon_button } from "../component/icon_button"
|
||||
import { interactive } from "../element"
|
||||
import { interactive, toggleable } from "../element"
|
||||
import { background, border, foreground, text } from "./components"
|
||||
import { text_button } from "../component";
|
||||
|
||||
export const toolbar = () => {
|
||||
const theme = useTheme()
|
||||
|
||||
return {
|
||||
height: 32,
|
||||
padding: { left: 4, right: 4, top: 4, bottom: 4 },
|
||||
height: 42,
|
||||
padding: { left: 4, right: 4 },
|
||||
background: background(theme.highest),
|
||||
border: border(theme.highest, { bottom: true }),
|
||||
item_spacing: 4,
|
||||
@ -34,5 +35,24 @@ export const toolbar = () => {
|
||||
},
|
||||
},
|
||||
}),
|
||||
toggleable_text_tool: toggleable({
|
||||
state: {
|
||||
inactive: text_button({
|
||||
disabled: true,
|
||||
variant: "ghost",
|
||||
layer: theme.highest,
|
||||
margin: { left: 4 },
|
||||
text_properties: { size: "sm" },
|
||||
border: border(theme.middle),
|
||||
}),
|
||||
active: text_button({
|
||||
variant: "ghost",
|
||||
layer: theme.highest,
|
||||
margin: { left: 4 },
|
||||
text_properties: { size: "sm" },
|
||||
border: border(theme.middle),
|
||||
}),
|
||||
}
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user