Introduce channel notes (#2880)

![Screen Shot 2023-08-24 at 5 26 53
PM](https://github.com/zed-industries/zed/assets/326587/3e84c5a6-1aaf-4335-a880-4c32eb83332d)

### Todo

* [x] Snapshot channel buffers when everyone closes the buffer
* [x] Ensure that users who are in both a project and a channel note
have the same color in both places
* [x] Allow following project collaborators into channel notes
* [x] Expose notes for the current channel under "Current Call" section
of the collaboration panel
* [x] Offline state for the channel notes view
* [x] Make the channel context menu accessible to all members (to expose
the notes)
* [x] Wire in view and Item method overrides

Release Notes:

- N/A
This commit is contained in:
Mikayla Maki 2023-08-24 18:01:28 -07:00 committed by GitHub
commit fbc7b37b2f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
59 changed files with 3394 additions and 750 deletions

46
Cargo.lock generated
View File

@ -1063,6 +1063,7 @@ dependencies = [
"anyhow", "anyhow",
"async-broadcast", "async-broadcast",
"audio", "audio",
"channel",
"client", "client",
"collections", "collections",
"fs", "fs",
@ -1190,6 +1191,41 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "channel"
version = "0.1.0"
dependencies = [
"anyhow",
"client",
"collections",
"db",
"futures 0.3.28",
"gpui",
"image",
"language",
"lazy_static",
"log",
"parking_lot 0.11.2",
"postage",
"rand 0.8.5",
"rpc",
"schemars",
"serde",
"serde_derive",
"settings",
"smol",
"staff_mode",
"sum_tree",
"tempfile",
"text",
"thiserror",
"time 0.3.24",
"tiny_http",
"url",
"util",
"uuid 1.4.1",
]
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.4.26" version = "0.4.26"
@ -1354,6 +1390,7 @@ dependencies = [
"staff_mode", "staff_mode",
"sum_tree", "sum_tree",
"tempfile", "tempfile",
"text",
"thiserror", "thiserror",
"time 0.3.24", "time 0.3.24",
"tiny_http", "tiny_http",
@ -1418,8 +1455,11 @@ dependencies = [
"axum-extra", "axum-extra",
"base64 0.13.1", "base64 0.13.1",
"call", "call",
"channel",
"clap 3.2.25", "clap 3.2.25",
"client", "client",
"clock",
"collab_ui",
"collections", "collections",
"ctor", "ctor",
"dashmap", "dashmap",
@ -1444,6 +1484,7 @@ dependencies = [
"pretty_assertions", "pretty_assertions",
"project", "project",
"prometheus", "prometheus",
"prost 0.8.0",
"rand 0.8.5", "rand 0.8.5",
"reqwest", "reqwest",
"rpc", "rpc",
@ -1456,6 +1497,7 @@ dependencies = [
"settings", "settings",
"sha-1 0.9.8", "sha-1 0.9.8",
"sqlx", "sqlx",
"text",
"theme", "theme",
"time 0.3.24", "time 0.3.24",
"tokio", "tokio",
@ -1478,6 +1520,7 @@ dependencies = [
"anyhow", "anyhow",
"auto_update", "auto_update",
"call", "call",
"channel",
"client", "client",
"clock", "clock",
"collections", "collections",
@ -1488,6 +1531,7 @@ dependencies = [
"futures 0.3.28", "futures 0.3.28",
"fuzzy", "fuzzy",
"gpui", "gpui",
"language",
"log", "log",
"menu", "menu",
"picker", "picker",
@ -9534,6 +9578,7 @@ dependencies = [
"async-recursion 1.0.4", "async-recursion 1.0.4",
"bincode", "bincode",
"call", "call",
"channel",
"client", "client",
"collections", "collections",
"context_menu", "context_menu",
@ -9659,6 +9704,7 @@ dependencies = [
"backtrace", "backtrace",
"breadcrumbs", "breadcrumbs",
"call", "call",
"channel",
"chrono", "chrono",
"cli", "cli",
"client", "client",

View File

@ -6,6 +6,7 @@ members = [
"crates/auto_update", "crates/auto_update",
"crates/breadcrumbs", "crates/breadcrumbs",
"crates/call", "crates/call",
"crates/channel",
"crates/cli", "crates/cli",
"crates/client", "crates/client",
"crates/clock", "crates/clock",
@ -96,6 +97,7 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] }
ordered-float = { version = "2.1.1" } ordered-float = { version = "2.1.1" }
parking_lot = { version = "0.11.1" } parking_lot = { version = "0.11.1" }
postage = { version = "0.5", features = ["futures-traits"] } postage = { version = "0.5", features = ["futures-traits"] }
prost = { version = "0.8" }
rand = { version = "0.8.5" } rand = { version = "0.8.5" }
refineable = { path = "./crates/refineable" } refineable = { path = "./crates/refineable" }
regex = { version = "1.5" } regex = { version = "1.5" }

View File

@ -20,6 +20,7 @@ test-support = [
[dependencies] [dependencies]
audio = { path = "../audio" } audio = { path = "../audio" }
channel = { path = "../channel" }
client = { path = "../client" } client = { path = "../client" }
collections = { path = "../collections" } collections = { path = "../collections" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }

View File

@ -7,9 +7,8 @@ use std::sync::Arc;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use audio::Audio; use audio::Audio;
use call_settings::CallSettings; use call_settings::CallSettings;
use client::{ use channel::ChannelId;
proto, ChannelId, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore, use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore};
};
use collections::HashSet; use collections::HashSet;
use futures::{future::Shared, FutureExt}; use futures::{future::Shared, FutureExt};
use postage::watch; use postage::watch;

51
crates/channel/Cargo.toml Normal file
View File

@ -0,0 +1,51 @@
[package]
name = "channel"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/channel.rs"
doctest = false
[features]
test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
[dependencies]
client = { path = "../client" }
collections = { path = "../collections" }
db = { path = "../db" }
gpui = { path = "../gpui" }
util = { path = "../util" }
rpc = { path = "../rpc" }
text = { path = "../text" }
language = { path = "../language" }
settings = { path = "../settings" }
staff_mode = { path = "../staff_mode" }
sum_tree = { path = "../sum_tree" }
anyhow.workspace = true
futures.workspace = true
image = "0.23"
lazy_static.workspace = true
log.workspace = true
parking_lot.workspace = true
postage.workspace = true
rand.workspace = true
schemars.workspace = true
smol.workspace = true
thiserror.workspace = true
time.workspace = true
tiny_http = "0.8"
uuid = { version = "1.1.2", features = ["v4"] }
url = "2.2"
serde.workspace = true
serde_derive.workspace = true
tempfile = "3"
[dev-dependencies]
collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }

View File

@ -0,0 +1,14 @@
mod channel_store;
pub mod channel_buffer;
use std::sync::Arc;
pub use channel_store::*;
use client::Client;
#[cfg(test)]
mod channel_store_tests;
pub fn init(client: &Arc<Client>) {
channel_buffer::init(client);
}

View File

@ -0,0 +1,197 @@
use crate::Channel;
use anyhow::Result;
use client::Client;
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle};
use rpc::{proto, TypedEnvelope};
use std::sync::Arc;
use util::ResultExt;
pub(crate) fn init(client: &Arc<Client>) {
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
}
pub struct ChannelBuffer {
pub(crate) channel: Arc<Channel>,
connected: bool,
collaborators: Vec<proto::Collaborator>,
buffer: ModelHandle<language::Buffer>,
client: Arc<Client>,
subscription: Option<client::Subscription>,
}
pub enum Event {
CollaboratorsChanged,
Disconnected,
}
impl Entity for ChannelBuffer {
type Event = Event;
fn release(&mut self, _: &mut AppContext) {
if self.connected {
self.client
.send(proto::LeaveChannelBuffer {
channel_id: self.channel.id,
})
.log_err();
}
}
}
impl ChannelBuffer {
pub(crate) async fn new(
channel: Arc<Channel>,
client: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<ModelHandle<Self>> {
let response = client
.request(proto::JoinChannelBuffer {
channel_id: channel.id,
})
.await?;
let base_text = response.base_text;
let operations = response
.operations
.into_iter()
.map(language::proto::deserialize_operation)
.collect::<Result<Vec<_>, _>>()?;
let collaborators = response.collaborators;
let buffer = cx.add_model(|_| {
language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text)
});
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
let subscription = client.subscribe_to_entity(channel.id)?;
anyhow::Ok(cx.add_model(|cx| {
cx.subscribe(&buffer, Self::on_buffer_update).detach();
Self {
buffer,
client,
connected: true,
collaborators,
channel,
subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())),
}
}))
}
async fn handle_update_channel_buffer(
this: ModelHandle<Self>,
update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
let ops = update_channel_buffer
.payload
.operations
.into_iter()
.map(language::proto::deserialize_operation)
.collect::<Result<Vec<_>, _>>()?;
this.update(&mut cx, |this, cx| {
cx.notify();
this.buffer
.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
})?;
Ok(())
}
async fn handle_add_channel_buffer_collaborator(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::AddChannelBufferCollaborator>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
let collaborator = envelope.payload.collaborator.ok_or_else(|| {
anyhow::anyhow!(
"Should have gotten a collaborator in the AddChannelBufferCollaborator message"
)
})?;
this.update(&mut cx, |this, cx| {
this.collaborators.push(collaborator);
cx.emit(Event::CollaboratorsChanged);
cx.notify();
});
Ok(())
}
async fn handle_remove_channel_buffer_collaborator(
this: ModelHandle<Self>,
message: TypedEnvelope<proto::RemoveChannelBufferCollaborator>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
this.collaborators.retain(|collaborator| {
if collaborator.peer_id == message.payload.peer_id {
this.buffer.update(cx, |buffer, cx| {
buffer.remove_peer(collaborator.replica_id as u16, cx)
});
false
} else {
true
}
});
cx.emit(Event::CollaboratorsChanged);
cx.notify();
});
Ok(())
}
fn on_buffer_update(
&mut self,
_: ModelHandle<language::Buffer>,
event: &language::Event,
_: &mut ModelContext<Self>,
) {
if let language::Event::Operation(operation) = event {
let operation = language::proto::serialize_operation(operation);
self.client
.send(proto::UpdateChannelBuffer {
channel_id: self.channel.id,
operations: vec![operation],
})
.log_err();
}
}
pub fn buffer(&self) -> ModelHandle<language::Buffer> {
self.buffer.clone()
}
pub fn collaborators(&self) -> &[proto::Collaborator] {
&self.collaborators
}
pub fn channel(&self) -> Arc<Channel> {
self.channel.clone()
}
pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
if self.connected {
self.connected = false;
self.subscription.take();
cx.emit(Event::Disconnected);
cx.notify()
}
}
pub fn is_connected(&self) -> bool {
self.connected
}
pub fn replica_id(&self, cx: &AppContext) -> u16 {
self.buffer.read(cx).replica_id()
}
}

View File

@ -1,19 +1,14 @@
use crate::Status; use crate::channel_buffer::ChannelBuffer;
use crate::{Client, Subscription, User, UserStore}; use anyhow::{anyhow, Result};
use anyhow::anyhow; use client::{Client, Status, Subscription, User, UserId, UserStore};
use anyhow::Result; use collections::{hash_map, HashMap, HashSet};
use collections::HashMap; use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
use collections::HashSet; use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use futures::channel::mpsc;
use futures::Future;
use futures::StreamExt;
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
use rpc::{proto, TypedEnvelope}; use rpc::{proto, TypedEnvelope};
use std::sync::Arc; use std::sync::Arc;
use util::ResultExt; use util::ResultExt;
pub type ChannelId = u64; pub type ChannelId = u64;
pub type UserId = u64;
pub struct ChannelStore { pub struct ChannelStore {
channels_by_id: HashMap<ChannelId, Arc<Channel>>, channels_by_id: HashMap<ChannelId, Arc<Channel>>,
@ -23,6 +18,7 @@ pub struct ChannelStore {
channels_with_admin_privileges: HashSet<ChannelId>, channels_with_admin_privileges: HashSet<ChannelId>,
outgoing_invites: HashSet<(ChannelId, UserId)>, outgoing_invites: HashSet<(ChannelId, UserId)>,
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>, update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
opened_buffers: HashMap<ChannelId, OpenedChannelBuffer>,
client: Arc<Client>, client: Arc<Client>,
user_store: ModelHandle<UserStore>, user_store: ModelHandle<UserStore>,
_rpc_subscription: Subscription, _rpc_subscription: Subscription,
@ -57,6 +53,11 @@ pub enum ChannelMemberStatus {
NotMember, NotMember,
} }
enum OpenedChannelBuffer {
Open(WeakModelHandle<ChannelBuffer>),
Loading(Shared<Task<Result<ModelHandle<ChannelBuffer>, Arc<anyhow::Error>>>>),
}
impl ChannelStore { impl ChannelStore {
pub fn new( pub fn new(
client: Arc<Client>, client: Arc<Client>,
@ -70,16 +71,14 @@ impl ChannelStore {
let mut connection_status = client.status(); let mut connection_status = client.status();
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move { let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
while let Some(status) = connection_status.next().await { while let Some(status) = connection_status.next().await {
if matches!(status, Status::ConnectionLost | Status::SignedOut) { if !status.is_connected() {
if let Some(this) = this.upgrade(&cx) { if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.channels_by_id.clear(); if matches!(status, Status::ConnectionLost | Status::SignedOut) {
this.channel_invitations.clear(); this.handle_disconnect(cx);
this.channel_participants.clear(); } else {
this.channels_with_admin_privileges.clear(); this.disconnect_buffers(cx);
this.channel_paths.clear(); }
this.outgoing_invites.clear();
cx.notify();
}); });
} else { } else {
break; break;
@ -87,6 +86,7 @@ impl ChannelStore {
} }
} }
}); });
Self { Self {
channels_by_id: HashMap::default(), channels_by_id: HashMap::default(),
channel_invitations: Vec::default(), channel_invitations: Vec::default(),
@ -94,6 +94,7 @@ impl ChannelStore {
channel_participants: Default::default(), channel_participants: Default::default(),
channels_with_admin_privileges: Default::default(), channels_with_admin_privileges: Default::default(),
outgoing_invites: Default::default(), outgoing_invites: Default::default(),
opened_buffers: Default::default(),
update_channels_tx, update_channels_tx,
client, client,
user_store, user_store,
@ -151,6 +152,74 @@ impl ChannelStore {
self.channels_by_id.get(&channel_id) self.channels_by_id.get(&channel_id)
} }
pub fn open_channel_buffer(
&mut self,
channel_id: ChannelId,
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<ChannelBuffer>>> {
// Make sure that a given channel buffer is only opened once per
// app instance, even if this method is called multiple times
// with the same channel id while the first task is still running.
let task = loop {
match self.opened_buffers.entry(channel_id) {
hash_map::Entry::Occupied(e) => match e.get() {
OpenedChannelBuffer::Open(buffer) => {
if let Some(buffer) = buffer.upgrade(cx) {
break Task::ready(Ok(buffer)).shared();
} else {
self.opened_buffers.remove(&channel_id);
continue;
}
}
OpenedChannelBuffer::Loading(task) => break task.clone(),
},
hash_map::Entry::Vacant(e) => {
let client = self.client.clone();
let task = cx
.spawn(|this, cx| async move {
let channel = this.read_with(&cx, |this, _| {
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
Arc::new(anyhow!("no channel for id: {}", channel_id))
})
})?;
ChannelBuffer::new(channel, client, cx)
.await
.map_err(Arc::new)
})
.shared();
e.insert(OpenedChannelBuffer::Loading(task.clone()));
cx.spawn({
let task = task.clone();
|this, mut cx| async move {
let result = task.await;
this.update(&mut cx, |this, cx| match result {
Ok(buffer) => {
cx.observe_release(&buffer, move |this, _, _| {
this.opened_buffers.remove(&channel_id);
})
.detach();
this.opened_buffers.insert(
channel_id,
OpenedChannelBuffer::Open(buffer.downgrade()),
);
}
Err(error) => {
log::error!("failed to open channel buffer {error:?}");
this.opened_buffers.remove(&channel_id);
}
});
}
})
.detach();
break task;
}
}
};
cx.foreground()
.spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
}
pub fn is_user_admin(&self, channel_id: ChannelId) -> bool { pub fn is_user_admin(&self, channel_id: ChannelId) -> bool {
self.channel_paths.iter().any(|path| { self.channel_paths.iter().any(|path| {
if let Some(ix) = path.iter().position(|id| *id == channel_id) { if let Some(ix) = path.iter().position(|id| *id == channel_id) {
@ -413,6 +482,27 @@ impl ChannelStore {
Ok(()) Ok(())
} }
fn handle_disconnect(&mut self, cx: &mut ModelContext<'_, ChannelStore>) {
self.disconnect_buffers(cx);
self.channels_by_id.clear();
self.channel_invitations.clear();
self.channel_participants.clear();
self.channels_with_admin_privileges.clear();
self.channel_paths.clear();
self.outgoing_invites.clear();
cx.notify();
}
fn disconnect_buffers(&mut self, cx: &mut ModelContext<ChannelStore>) {
for (_, buffer) in self.opened_buffers.drain() {
if let OpenedChannelBuffer::Open(buffer) = buffer {
if let Some(buffer) = buffer.upgrade(cx) {
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
}
}
}
}
pub(crate) fn update_channels( pub(crate) fn update_channels(
&mut self, &mut self,
payload: proto::UpdateChannels, payload: proto::UpdateChannels,
@ -447,25 +537,30 @@ impl ChannelStore {
.retain(|channel_id, _| !payload.remove_channels.contains(channel_id)); .retain(|channel_id, _| !payload.remove_channels.contains(channel_id));
self.channels_with_admin_privileges self.channels_with_admin_privileges
.retain(|channel_id| !payload.remove_channels.contains(channel_id)); .retain(|channel_id| !payload.remove_channels.contains(channel_id));
for channel_id in &payload.remove_channels {
let channel_id = *channel_id;
if let Some(OpenedChannelBuffer::Open(buffer)) =
self.opened_buffers.remove(&channel_id)
{
if let Some(buffer) = buffer.upgrade(cx) {
buffer.update(cx, ChannelBuffer::disconnect);
}
}
}
} }
for channel in payload.channels { for channel_proto in payload.channels {
if let Some(existing_channel) = self.channels_by_id.get_mut(&channel.id) { if let Some(existing_channel) = self.channels_by_id.get_mut(&channel_proto.id) {
// FIXME: We may be missing a path for this existing channel in certain cases Arc::make_mut(existing_channel).name = channel_proto.name;
let existing_channel = Arc::make_mut(existing_channel); } else {
existing_channel.name = channel.name; let channel = Arc::new(Channel {
continue; id: channel_proto.id,
} name: channel_proto.name,
});
self.channels_by_id.insert(channel.id, channel.clone());
self.channels_by_id.insert( if let Some(parent_id) = channel_proto.parent_id {
channel.id,
Arc::new(Channel {
id: channel.id,
name: channel.name,
}),
);
if let Some(parent_id) = channel.parent_id {
let mut ix = 0; let mut ix = 0;
while ix < self.channel_paths.len() { while ix < self.channel_paths.len() {
let path = &self.channel_paths[ix]; let path = &self.channel_paths[ix];
@ -481,6 +576,7 @@ impl ChannelStore {
self.channel_paths.push(vec![channel.id]); self.channel_paths.push(vec![channel.id]);
} }
} }
}
self.channel_paths.sort_by(|a, b| { self.channel_paths.sort_by(|a, b| {
let a = Self::channel_path_sorting_key(a, &self.channels_by_id); let a = Self::channel_path_sorting_key(a, &self.channels_by_id);

View File

@ -1,4 +1,7 @@
use super::*; use super::*;
use client::{Client, UserStore};
use gpui::{AppContext, ModelHandle};
use rpc::proto;
use util::http::FakeHttpClient; use util::http::FakeHttpClient;
#[gpui::test] #[gpui::test]

View File

@ -17,6 +17,7 @@ db = { path = "../db" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }
util = { path = "../util" } util = { path = "../util" }
rpc = { path = "../rpc" } rpc = { path = "../rpc" }
text = { path = "../text" }
settings = { path = "../settings" } settings = { path = "../settings" }
staff_mode = { path = "../staff_mode" } staff_mode = { path = "../staff_mode" }
sum_tree = { path = "../sum_tree" } sum_tree = { path = "../sum_tree" }

View File

@ -1,10 +1,6 @@
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub mod test; pub mod test;
#[cfg(test)]
mod channel_store_tests;
pub mod channel_store;
pub mod telemetry; pub mod telemetry;
pub mod user; pub mod user;
@ -48,7 +44,6 @@ use util::channel::ReleaseChannel;
use util::http::HttpClient; use util::http::HttpClient;
use util::{ResultExt, TryFutureExt}; use util::{ResultExt, TryFutureExt};
pub use channel_store::*;
pub use rpc::*; pub use rpc::*;
pub use telemetry::ClickhouseEvent; pub use telemetry::ClickhouseEvent;
pub use user::*; pub use user::*;

View File

@ -10,9 +10,11 @@ use std::sync::{Arc, Weak};
use util::http::HttpClient; use util::http::HttpClient;
use util::TryFutureExt as _; use util::TryFutureExt as _;
pub type UserId = u64;
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub struct User { pub struct User {
pub id: u64, pub id: UserId,
pub github_login: String, pub github_login: String,
pub avatar: Option<Arc<ImageData>>, pub avatar: Option<Arc<ImageData>>,
} }

View File

@ -14,8 +14,10 @@ name = "seed"
required-features = ["seed-support"] required-features = ["seed-support"]
[dependencies] [dependencies]
clock = { path = "../clock" }
collections = { path = "../collections" } collections = { path = "../collections" }
live_kit_server = { path = "../live_kit_server" } live_kit_server = { path = "../live_kit_server" }
text = { path = "../text" }
rpc = { path = "../rpc" } rpc = { path = "../rpc" }
util = { path = "../util" } util = { path = "../util" }
@ -35,6 +37,7 @@ log.workspace = true
nanoid = "0.4" nanoid = "0.4"
parking_lot.workspace = true parking_lot.workspace = true
prometheus = "0.13" prometheus = "0.13"
prost.workspace = true
rand.workspace = true rand.workspace = true
reqwest = { version = "0.11", features = ["json"], optional = true } reqwest = { version = "0.11", features = ["json"], optional = true }
scrypt = "0.7" scrypt = "0.7"
@ -62,6 +65,7 @@ collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] }
call = { path = "../call", features = ["test-support"] } call = { path = "../call", features = ["test-support"] }
client = { path = "../client", features = ["test-support"] } client = { path = "../client", features = ["test-support"] }
channel = { path = "../channel" }
editor = { path = "../editor", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] } language = { path = "../language", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] } fs = { path = "../fs", features = ["test-support"] }
@ -74,6 +78,7 @@ rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] } settings = { path = "../settings", features = ["test-support"] }
theme = { path = "../theme" } theme = { path = "../theme" }
workspace = { path = "../workspace", features = ["test-support"] } workspace = { path = "../workspace", features = ["test-support"] }
collab_ui = { path = "../collab_ui", features = ["test-support"] }
ctor.workspace = true ctor.workspace = true
env_logger.workspace = true env_logger.workspace = true

View File

@ -208,3 +208,44 @@ CREATE TABLE "channel_members" (
); );
CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id"); CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id");
CREATE TABLE "buffers" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL DEFAULT 0
);
CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
CREATE TABLE "buffer_operations" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"replica_id" INTEGER NOT NULL,
"lamport_timestamp" INTEGER NOT NULL,
"value" BLOB NOT NULL,
PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
);
CREATE TABLE "buffer_snapshots" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"text" TEXT NOT NULL,
"operation_serialization_version" INTEGER NOT NULL,
PRIMARY KEY(buffer_id, epoch)
);
CREATE TABLE "channel_buffer_collaborators" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"connection_id" INTEGER NOT NULL,
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
"connection_lost" BOOLEAN NOT NULL DEFAULT false,
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
"replica_id" INTEGER NOT NULL
);
CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");

View File

@ -0,0 +1,40 @@
CREATE TABLE "buffers" (
"id" SERIAL PRIMARY KEY,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL DEFAULT 0
);
CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
CREATE TABLE "buffer_operations" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"replica_id" INTEGER NOT NULL,
"lamport_timestamp" INTEGER NOT NULL,
"value" BYTEA NOT NULL,
PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
);
CREATE TABLE "buffer_snapshots" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"text" TEXT NOT NULL,
"operation_serialization_version" INTEGER NOT NULL,
PRIMARY KEY(buffer_id, epoch)
);
CREATE TABLE "channel_buffer_collaborators" (
"id" SERIAL PRIMARY KEY,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"connection_id" INTEGER NOT NULL,
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
"connection_lost" BOOLEAN NOT NULL DEFAULT FALSE,
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
"replica_id" INTEGER NOT NULL
);
CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");

View File

@ -1,7 +1,8 @@
#[cfg(test)] #[cfg(test)]
mod db_tests; pub mod tests;
#[cfg(test)] #[cfg(test)]
pub mod test_db; pub use tests::TestDb;
mod ids; mod ids;
mod queries; mod queries;
@ -52,6 +53,8 @@ pub struct Database {
runtime: Option<tokio::runtime::Runtime>, runtime: Option<tokio::runtime::Runtime>,
} }
// The `Database` type has so many methods that its impl blocks are split into
// separate files in the `queries` folder.
impl Database { impl Database {
pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> { pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> {
Ok(Self { Ok(Self {

View File

@ -110,6 +110,7 @@ fn value_to_integer(v: Value) -> Result<i32, ValueTypeErr> {
} }
} }
id_type!(BufferId);
id_type!(AccessTokenId); id_type!(AccessTokenId);
id_type!(ChannelId); id_type!(ChannelId);
id_type!(ChannelMemberId); id_type!(ChannelMemberId);
@ -123,3 +124,4 @@ id_type!(ReplicaId);
id_type!(ServerId); id_type!(ServerId);
id_type!(SignupId); id_type!(SignupId);
id_type!(UserId); id_type!(UserId);
id_type!(ChannelBufferCollaboratorId);

View File

@ -1,6 +1,7 @@
use super::*; use super::*;
pub mod access_tokens; pub mod access_tokens;
pub mod buffers;
pub mod channels; pub mod channels;
pub mod contacts; pub mod contacts;
pub mod projects; pub mod projects;

View File

@ -0,0 +1,588 @@
use super::*;
use prost::Message;
use text::{EditOperation, InsertionTimestamp, UndoOperation};
impl Database {
pub async fn join_channel_buffer(
&self,
channel_id: ChannelId,
user_id: UserId,
connection: ConnectionId,
) -> Result<proto::JoinChannelBufferResponse> {
self.transaction(|tx| async move {
let tx = tx;
self.check_user_is_channel_member(channel_id, user_id, &tx)
.await?;
let buffer = channel::Model {
id: channel_id,
..Default::default()
}
.find_related(buffer::Entity)
.one(&*tx)
.await?;
let buffer = if let Some(buffer) = buffer {
buffer
} else {
let buffer = buffer::ActiveModel {
channel_id: ActiveValue::Set(channel_id),
..Default::default()
}
.insert(&*tx)
.await?;
buffer_snapshot::ActiveModel {
buffer_id: ActiveValue::Set(buffer.id),
epoch: ActiveValue::Set(0),
text: ActiveValue::Set(String::new()),
operation_serialization_version: ActiveValue::Set(
storage::SERIALIZATION_VERSION,
),
}
.insert(&*tx)
.await?;
buffer
};
// Join the collaborators
let mut collaborators = channel_buffer_collaborator::Entity::find()
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
.all(&*tx)
.await?;
let replica_ids = collaborators
.iter()
.map(|c| c.replica_id)
.collect::<HashSet<_>>();
let mut replica_id = ReplicaId(0);
while replica_ids.contains(&replica_id) {
replica_id.0 += 1;
}
let collaborator = channel_buffer_collaborator::ActiveModel {
channel_id: ActiveValue::Set(channel_id),
connection_id: ActiveValue::Set(connection.id as i32),
connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)),
user_id: ActiveValue::Set(user_id),
replica_id: ActiveValue::Set(replica_id),
..Default::default()
}
.insert(&*tx)
.await?;
collaborators.push(collaborator);
// Assemble the buffer state
let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?;
Ok(proto::JoinChannelBufferResponse {
buffer_id: buffer.id.to_proto(),
replica_id: replica_id.to_proto() as u32,
base_text,
operations,
collaborators: collaborators
.into_iter()
.map(|collaborator| proto::Collaborator {
peer_id: Some(collaborator.connection().into()),
user_id: collaborator.user_id.to_proto(),
replica_id: collaborator.replica_id.0 as u32,
})
.collect(),
})
})
.await
}
pub async fn leave_channel_buffer(
&self,
channel_id: ChannelId,
connection: ConnectionId,
) -> Result<Vec<ConnectionId>> {
self.transaction(|tx| async move {
self.leave_channel_buffer_internal(channel_id, connection, &*tx)
.await
})
.await
}
pub async fn leave_channel_buffer_internal(
&self,
channel_id: ChannelId,
connection: ConnectionId,
tx: &DatabaseTransaction,
) -> Result<Vec<ConnectionId>> {
let result = channel_buffer_collaborator::Entity::delete_many()
.filter(
Condition::all()
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
.add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32))
.add(
channel_buffer_collaborator::Column::ConnectionServerId
.eq(connection.owner_id as i32),
),
)
.exec(&*tx)
.await?;
if result.rows_affected == 0 {
Err(anyhow!("not a collaborator on this project"))?;
}
let mut connections = Vec::new();
let mut rows = channel_buffer_collaborator::Entity::find()
.filter(
Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
)
.stream(&*tx)
.await?;
while let Some(row) = rows.next().await {
let row = row?;
connections.push(ConnectionId {
id: row.connection_id as u32,
owner_id: row.connection_server_id.0 as u32,
});
}
drop(rows);
if connections.is_empty() {
self.snapshot_buffer(channel_id, &tx).await?;
}
Ok(connections)
}
pub async fn leave_channel_buffers(
&self,
connection: ConnectionId,
) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
self.transaction(|tx| async move {
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
enum QueryChannelIds {
ChannelId,
}
let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
.select_only()
.column(channel_buffer_collaborator::Column::ChannelId)
.filter(Condition::all().add(
channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
))
.into_values::<_, QueryChannelIds>()
.all(&*tx)
.await?;
let mut result = Vec::new();
for channel_id in channel_ids {
let collaborators = self
.leave_channel_buffer_internal(channel_id, connection, &*tx)
.await?;
result.push((channel_id, collaborators));
}
Ok(result)
})
.await
}
#[cfg(debug_assertions)]
pub async fn get_channel_buffer_collaborators(
&self,
channel_id: ChannelId,
) -> Result<Vec<UserId>> {
self.transaction(|tx| async move {
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
enum QueryUserIds {
UserId,
}
let users: Vec<UserId> = channel_buffer_collaborator::Entity::find()
.select_only()
.column(channel_buffer_collaborator::Column::UserId)
.filter(
Condition::all()
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
)
.into_values::<_, QueryUserIds>()
.all(&*tx)
.await?;
Ok(users)
})
.await
}
pub async fn update_channel_buffer(
&self,
channel_id: ChannelId,
user: UserId,
operations: &[proto::Operation],
) -> Result<Vec<ConnectionId>> {
self.transaction(move |tx| async move {
self.check_user_is_channel_member(channel_id, user, &*tx)
.await?;
let buffer = buffer::Entity::find()
.filter(buffer::Column::ChannelId.eq(channel_id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such buffer"))?;
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
enum QueryVersion {
OperationSerializationVersion,
}
let serialization_version: i32 = buffer
.find_related(buffer_snapshot::Entity)
.select_only()
.column(buffer_snapshot::Column::OperationSerializationVersion)
.filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch))
.into_values::<_, QueryVersion>()
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("missing buffer snapshot"))?;
let operations = operations
.iter()
.filter_map(|op| operation_to_storage(op, &buffer, serialization_version))
.collect::<Vec<_>>();
if !operations.is_empty() {
buffer_operation::Entity::insert_many(operations)
.exec(&*tx)
.await?;
}
let mut connections = Vec::new();
let mut rows = channel_buffer_collaborator::Entity::find()
.filter(
Condition::all()
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
)
.stream(&*tx)
.await?;
while let Some(row) = rows.next().await {
let row = row?;
connections.push(ConnectionId {
id: row.connection_id as u32,
owner_id: row.connection_server_id.0 as u32,
});
}
Ok(connections)
})
.await
}
async fn get_buffer_state(
&self,
buffer: &buffer::Model,
tx: &DatabaseTransaction,
) -> Result<(String, Vec<proto::Operation>)> {
let id = buffer.id;
let (base_text, version) = if buffer.epoch > 0 {
let snapshot = buffer_snapshot::Entity::find()
.filter(
buffer_snapshot::Column::BufferId
.eq(id)
.and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)),
)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such snapshot"))?;
let version = snapshot.operation_serialization_version;
(snapshot.text, version)
} else {
(String::new(), storage::SERIALIZATION_VERSION)
};
let mut rows = buffer_operation::Entity::find()
.filter(
buffer_operation::Column::BufferId
.eq(id)
.and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
)
.stream(&*tx)
.await?;
let mut operations = Vec::new();
while let Some(row) = rows.next().await {
let row = row?;
let operation = operation_from_storage(row, version)?;
operations.push(proto::Operation {
variant: Some(operation),
})
}
Ok((base_text, operations))
}
async fn snapshot_buffer(&self, channel_id: ChannelId, tx: &DatabaseTransaction) -> Result<()> {
let buffer = channel::Model {
id: channel_id,
..Default::default()
}
.find_related(buffer::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such buffer"))?;
let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?;
if operations.is_empty() {
return Ok(());
}
let mut text_buffer = text::Buffer::new(0, 0, base_text);
text_buffer
.apply_ops(operations.into_iter().filter_map(operation_from_wire))
.unwrap();
let base_text = text_buffer.text();
let epoch = buffer.epoch + 1;
buffer_snapshot::Model {
buffer_id: buffer.id,
epoch,
text: base_text,
operation_serialization_version: storage::SERIALIZATION_VERSION,
}
.into_active_model()
.insert(tx)
.await?;
buffer::ActiveModel {
id: ActiveValue::Unchanged(buffer.id),
epoch: ActiveValue::Set(epoch),
..Default::default()
}
.save(tx)
.await?;
Ok(())
}
}
fn operation_to_storage(
operation: &proto::Operation,
buffer: &buffer::Model,
_format: i32,
) -> Option<buffer_operation::ActiveModel> {
let (replica_id, lamport_timestamp, value) = match operation.variant.as_ref()? {
proto::operation::Variant::Edit(operation) => (
operation.replica_id,
operation.lamport_timestamp,
storage::Operation {
local_timestamp: operation.local_timestamp,
version: version_to_storage(&operation.version),
is_undo: false,
edit_ranges: operation
.ranges
.iter()
.map(|range| storage::Range {
start: range.start,
end: range.end,
})
.collect(),
edit_texts: operation.new_text.clone(),
undo_counts: Vec::new(),
},
),
proto::operation::Variant::Undo(operation) => (
operation.replica_id,
operation.lamport_timestamp,
storage::Operation {
local_timestamp: operation.local_timestamp,
version: version_to_storage(&operation.version),
is_undo: true,
edit_ranges: Vec::new(),
edit_texts: Vec::new(),
undo_counts: operation
.counts
.iter()
.map(|entry| storage::UndoCount {
replica_id: entry.replica_id,
local_timestamp: entry.local_timestamp,
count: entry.count,
})
.collect(),
},
),
_ => None?,
};
Some(buffer_operation::ActiveModel {
buffer_id: ActiveValue::Set(buffer.id),
epoch: ActiveValue::Set(buffer.epoch),
replica_id: ActiveValue::Set(replica_id as i32),
lamport_timestamp: ActiveValue::Set(lamport_timestamp as i32),
value: ActiveValue::Set(value.encode_to_vec()),
})
}
fn operation_from_storage(
row: buffer_operation::Model,
_format_version: i32,
) -> Result<proto::operation::Variant, Error> {
let operation =
storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?;
let version = version_from_storage(&operation.version);
Ok(if operation.is_undo {
proto::operation::Variant::Undo(proto::operation::Undo {
replica_id: row.replica_id as u32,
local_timestamp: operation.local_timestamp as u32,
lamport_timestamp: row.lamport_timestamp as u32,
version,
counts: operation
.undo_counts
.iter()
.map(|entry| proto::UndoCount {
replica_id: entry.replica_id,
local_timestamp: entry.local_timestamp,
count: entry.count,
})
.collect(),
})
} else {
proto::operation::Variant::Edit(proto::operation::Edit {
replica_id: row.replica_id as u32,
local_timestamp: operation.local_timestamp as u32,
lamport_timestamp: row.lamport_timestamp as u32,
version,
ranges: operation
.edit_ranges
.into_iter()
.map(|range| proto::Range {
start: range.start,
end: range.end,
})
.collect(),
new_text: operation.edit_texts,
})
})
}
fn version_to_storage(version: &Vec<proto::VectorClockEntry>) -> Vec<storage::VectorClockEntry> {
version
.iter()
.map(|entry| storage::VectorClockEntry {
replica_id: entry.replica_id,
timestamp: entry.timestamp,
})
.collect()
}
fn version_from_storage(version: &Vec<storage::VectorClockEntry>) -> Vec<proto::VectorClockEntry> {
version
.iter()
.map(|entry| proto::VectorClockEntry {
replica_id: entry.replica_id,
timestamp: entry.timestamp,
})
.collect()
}
// This is currently a manual copy of the deserialization code in the client's langauge crate
pub fn operation_from_wire(operation: proto::Operation) -> Option<text::Operation> {
match operation.variant? {
proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation {
timestamp: InsertionTimestamp {
replica_id: edit.replica_id as text::ReplicaId,
local: edit.local_timestamp,
lamport: edit.lamport_timestamp,
},
version: version_from_wire(&edit.version),
ranges: edit
.ranges
.into_iter()
.map(|range| {
text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize)
})
.collect(),
new_text: edit.new_text.into_iter().map(Arc::from).collect(),
})),
proto::operation::Variant::Undo(undo) => Some(text::Operation::Undo {
lamport_timestamp: clock::Lamport {
replica_id: undo.replica_id as text::ReplicaId,
value: undo.lamport_timestamp,
},
undo: UndoOperation {
id: clock::Local {
replica_id: undo.replica_id as text::ReplicaId,
value: undo.local_timestamp,
},
version: version_from_wire(&undo.version),
counts: undo
.counts
.into_iter()
.map(|c| {
(
clock::Local {
replica_id: c.replica_id as text::ReplicaId,
value: c.local_timestamp,
},
c.count,
)
})
.collect(),
},
}),
_ => None,
}
}
fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global {
let mut version = clock::Global::new();
for entry in message {
version.observe(clock::Local {
replica_id: entry.replica_id as text::ReplicaId,
value: entry.timestamp,
});
}
version
}
mod storage {
#![allow(non_snake_case)]
use prost::Message;
pub const SERIALIZATION_VERSION: i32 = 1;
#[derive(Message)]
pub struct Operation {
#[prost(uint32, tag = "1")]
pub local_timestamp: u32,
#[prost(message, repeated, tag = "2")]
pub version: Vec<VectorClockEntry>,
#[prost(bool, tag = "3")]
pub is_undo: bool,
#[prost(message, repeated, tag = "4")]
pub edit_ranges: Vec<Range>,
#[prost(string, repeated, tag = "5")]
pub edit_texts: Vec<String>,
#[prost(message, repeated, tag = "6")]
pub undo_counts: Vec<UndoCount>,
}
#[derive(Message)]
pub struct VectorClockEntry {
#[prost(uint32, tag = "1")]
pub replica_id: u32,
#[prost(uint32, tag = "2")]
pub timestamp: u32,
}
#[derive(Message)]
pub struct Range {
#[prost(uint64, tag = "1")]
pub start: u64,
#[prost(uint64, tag = "2")]
pub end: u64,
}
#[derive(Message)]
pub struct UndoCount {
#[prost(uint32, tag = "1")]
pub replica_id: u32,
#[prost(uint32, tag = "2")]
pub local_timestamp: u32,
#[prost(uint32, tag = "3")]
pub count: u32,
}
}

View File

@ -903,15 +903,35 @@ impl Database {
), ),
) )
.one(&*tx) .one(&*tx)
.await? .await?;
.ok_or_else(|| anyhow!("not a participant in any room"))?;
if let Some(participant) = participant {
room_participant::Entity::update(room_participant::ActiveModel { room_participant::Entity::update(room_participant::ActiveModel {
answering_connection_lost: ActiveValue::set(true), answering_connection_lost: ActiveValue::set(true),
..participant.into_active_model() ..participant.into_active_model()
}) })
.exec(&*tx) .exec(&*tx)
.await?; .await?;
}
channel_buffer_collaborator::Entity::update_many()
.filter(
Condition::all()
.add(
channel_buffer_collaborator::Column::ConnectionId
.eq(connection.id as i32),
)
.add(
channel_buffer_collaborator::Column::ConnectionServerId
.eq(connection.owner_id as i32),
),
)
.set(channel_buffer_collaborator::ActiveModel {
connection_lost: ActiveValue::set(true),
..Default::default()
})
.exec(&*tx)
.await?;
Ok(()) Ok(())
}) })

View File

@ -1,5 +1,9 @@
pub mod access_token; pub mod access_token;
pub mod buffer;
pub mod buffer_operation;
pub mod buffer_snapshot;
pub mod channel; pub mod channel;
pub mod channel_buffer_collaborator;
pub mod channel_member; pub mod channel_member;
pub mod channel_path; pub mod channel_path;
pub mod contact; pub mod contact;

View File

@ -0,0 +1,45 @@
use crate::db::{BufferId, ChannelId};
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "buffers")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: BufferId,
pub epoch: i32,
pub channel_id: ChannelId,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::buffer_operation::Entity")]
Operations,
#[sea_orm(has_many = "super::buffer_snapshot::Entity")]
Snapshots,
#[sea_orm(
belongs_to = "super::channel::Entity",
from = "Column::ChannelId",
to = "super::channel::Column::Id"
)]
Channel,
}
impl Related<super::buffer_operation::Entity> for Entity {
fn to() -> RelationDef {
Relation::Operations.def()
}
}
impl Related<super::buffer_snapshot::Entity> for Entity {
fn to() -> RelationDef {
Relation::Snapshots.def()
}
}
impl Related<super::channel::Entity> for Entity {
fn to() -> RelationDef {
Relation::Channel.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -0,0 +1,34 @@
use crate::db::BufferId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "buffer_operations")]
pub struct Model {
#[sea_orm(primary_key)]
pub buffer_id: BufferId,
#[sea_orm(primary_key)]
pub epoch: i32,
#[sea_orm(primary_key)]
pub lamport_timestamp: i32,
#[sea_orm(primary_key)]
pub replica_id: i32,
pub value: Vec<u8>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::buffer::Entity",
from = "Column::BufferId",
to = "super::buffer::Column::Id"
)]
Buffer,
}
impl Related<super::buffer::Entity> for Entity {
fn to() -> RelationDef {
Relation::Buffer.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -0,0 +1,31 @@
use crate::db::BufferId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "buffer_snapshots")]
pub struct Model {
#[sea_orm(primary_key)]
pub buffer_id: BufferId,
#[sea_orm(primary_key)]
pub epoch: i32,
pub text: String,
pub operation_serialization_version: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::buffer::Entity",
from = "Column::BufferId",
to = "super::buffer::Column::Id"
)]
Buffer,
}
impl Related<super::buffer::Entity> for Entity {
fn to() -> RelationDef {
Relation::Buffer.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -15,8 +15,12 @@ impl ActiveModelBehavior for ActiveModel {}
pub enum Relation { pub enum Relation {
#[sea_orm(has_one = "super::room::Entity")] #[sea_orm(has_one = "super::room::Entity")]
Room, Room,
#[sea_orm(has_one = "super::buffer::Entity")]
Buffer,
#[sea_orm(has_many = "super::channel_member::Entity")] #[sea_orm(has_many = "super::channel_member::Entity")]
Member, Member,
#[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")]
BufferCollaborators,
} }
impl Related<super::channel_member::Entity> for Entity { impl Related<super::channel_member::Entity> for Entity {
@ -30,3 +34,15 @@ impl Related<super::room::Entity> for Entity {
Relation::Room.def() Relation::Room.def()
} }
} }
impl Related<super::buffer::Entity> for Entity {
fn to() -> RelationDef {
Relation::Buffer.def()
}
}
impl Related<super::channel_buffer_collaborator::Entity> for Entity {
fn to() -> RelationDef {
Relation::BufferCollaborators.def()
}
}

View File

@ -0,0 +1,43 @@
use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId};
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "channel_buffer_collaborators")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: ChannelBufferCollaboratorId,
pub channel_id: ChannelId,
pub connection_id: i32,
pub connection_server_id: ServerId,
pub connection_lost: bool,
pub user_id: UserId,
pub replica_id: ReplicaId,
}
impl Model {
pub fn connection(&self) -> ConnectionId {
ConnectionId {
owner_id: self.connection_server_id.0 as u32,
id: self.connection_id as u32,
}
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::channel::Entity",
from = "Column::ChannelId",
to = "super::channel::Column::Id"
)]
Channel,
}
impl Related<super::channel::Entity> for Entity {
fn to() -> RelationDef {
Relation::Channel.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,3 +1,6 @@
mod buffer_tests;
mod db_tests;
use super::*; use super::*;
use gpui::executor::Background; use gpui::executor::Background;
use parking_lot::Mutex; use parking_lot::Mutex;
@ -91,6 +94,26 @@ impl TestDb {
} }
} }
#[macro_export]
macro_rules! test_both_dbs {
($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => {
#[gpui::test]
async fn $postgres_test_name() {
let test_db = crate::db::TestDb::postgres(
gpui::executor::Deterministic::new(0).build_background(),
);
$test_name(test_db.db()).await;
}
#[gpui::test]
async fn $sqlite_test_name() {
let test_db =
crate::db::TestDb::sqlite(gpui::executor::Deterministic::new(0).build_background());
$test_name(test_db.db()).await;
}
};
}
impl Drop for TestDb { impl Drop for TestDb {
fn drop(&mut self) { fn drop(&mut self) {
let db = self.db.take().unwrap(); let db = self.db.take().unwrap();

View File

@ -0,0 +1,165 @@
use super::*;
use crate::test_both_dbs;
use language::proto;
use text::Buffer;
test_both_dbs!(
test_channel_buffers,
test_channel_buffers_postgres,
test_channel_buffers_sqlite
);
async fn test_channel_buffers(db: &Arc<Database>) {
let a_id = db
.create_user(
"user_a@example.com",
false,
NewUserParams {
github_login: "user_a".into(),
github_user_id: 101,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
let b_id = db
.create_user(
"user_b@example.com",
false,
NewUserParams {
github_login: "user_b".into(),
github_user_id: 102,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
// This user will not be a part of the channel
let c_id = db
.create_user(
"user_c@example.com",
false,
NewUserParams {
github_login: "user_c".into(),
github_user_id: 102,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
let owner_id = db.create_server("production").await.unwrap().0 as u32;
let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap();
db.invite_channel_member(zed_id, b_id, a_id, false)
.await
.unwrap();
db.respond_to_channel_invite(zed_id, b_id, true)
.await
.unwrap();
let connection_id_a = ConnectionId { owner_id, id: 1 };
let _ = db
.join_channel_buffer(zed_id, a_id, connection_id_a)
.await
.unwrap();
let mut buffer_a = Buffer::new(0, 0, "".to_string());
let mut operations = Vec::new();
operations.push(buffer_a.edit([(0..0, "hello world")]));
operations.push(buffer_a.edit([(5..5, ", cruel")]));
operations.push(buffer_a.edit([(0..5, "goodbye")]));
operations.push(buffer_a.undo().unwrap().1);
assert_eq!(buffer_a.text(), "hello, cruel world");
let operations = operations
.into_iter()
.map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
.collect::<Vec<_>>();
db.update_channel_buffer(zed_id, a_id, &operations)
.await
.unwrap();
let connection_id_b = ConnectionId { owner_id, id: 2 };
let buffer_response_b = db
.join_channel_buffer(zed_id, b_id, connection_id_b)
.await
.unwrap();
let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text);
buffer_b
.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
let operation = proto::deserialize_operation(operation).unwrap();
if let language::Operation::Buffer(operation) = operation {
operation
} else {
unreachable!()
}
}))
.unwrap();
assert_eq!(buffer_b.text(), "hello, cruel world");
// Ensure that C fails to open the buffer
assert!(db
.join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 })
.await
.is_err());
// Ensure that both collaborators have shown up
assert_eq!(
buffer_response_b.collaborators,
&[
rpc::proto::Collaborator {
user_id: a_id.to_proto(),
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
replica_id: 0,
},
rpc::proto::Collaborator {
user_id: b_id.to_proto(),
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
replica_id: 1,
}
]
);
// Ensure that get_channel_buffer_collaborators works
let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
assert_eq!(zed_collaborats, &[a_id, b_id]);
let collaborators = db
.leave_channel_buffer(zed_id, connection_id_b)
.await
.unwrap();
assert_eq!(collaborators, &[connection_id_a],);
let cargo_id = db.create_root_channel("cargo", "2", a_id).await.unwrap();
let _ = db
.join_channel_buffer(cargo_id, a_id, connection_id_a)
.await
.unwrap();
db.leave_channel_buffers(connection_id_a).await.unwrap();
let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap();
assert_eq!(zed_collaborators, &[]);
assert_eq!(cargo_collaborators, &[]);
// When everyone has left the channel, the operations are collapsed into
// a new base text.
let buffer_response_b = db
.join_channel_buffer(zed_id, b_id, connection_id_b)
.await
.unwrap();
assert_eq!(buffer_response_b.base_text, "hello, cruel world");
assert_eq!(buffer_response_b.operations, &[]);
}

View File

@ -1,32 +1,17 @@
use super::*; use super::*;
use crate::test_both_dbs;
use gpui::executor::{Background, Deterministic}; use gpui::executor::{Background, Deterministic};
use pretty_assertions::{assert_eq, assert_ne}; use pretty_assertions::{assert_eq, assert_ne};
use std::sync::Arc; use std::sync::Arc;
use test_db::TestDb; use tests::TestDb;
macro_rules! test_both_dbs {
($postgres_test_name:ident, $sqlite_test_name:ident, $db:ident, $body:block) => {
#[gpui::test]
async fn $postgres_test_name() {
let test_db = TestDb::postgres(Deterministic::new(0).build_background());
let $db = test_db.db();
$body
}
#[gpui::test]
async fn $sqlite_test_name() {
let test_db = TestDb::sqlite(Deterministic::new(0).build_background());
let $db = test_db.db();
$body
}
};
}
test_both_dbs!( test_both_dbs!(
test_get_users,
test_get_users_by_ids_postgres, test_get_users_by_ids_postgres,
test_get_users_by_ids_sqlite, test_get_users_by_ids_sqlite
db, );
{
async fn test_get_users(db: &Arc<Database>) {
let mut user_ids = Vec::new(); let mut user_ids = Vec::new();
let mut user_metric_ids = Vec::new(); let mut user_metric_ids = Vec::new();
for i in 1..=4 { for i in 1..=4 {
@ -88,13 +73,14 @@ test_both_dbs!(
] ]
); );
} }
);
test_both_dbs!( test_both_dbs!(
test_get_or_create_user_by_github_account,
test_get_or_create_user_by_github_account_postgres, test_get_or_create_user_by_github_account_postgres,
test_get_or_create_user_by_github_account_sqlite, test_get_or_create_user_by_github_account_sqlite
db, );
{
async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
let user_id1 = db let user_id1 = db
.create_user( .create_user(
"user1@example.com", "user1@example.com",
@ -155,13 +141,14 @@ test_both_dbs!(
assert_eq!(user.github_user_id, Some(103)); assert_eq!(user.github_user_id, Some(103));
assert_eq!(user.email_address, Some("user3@example.com".into())); assert_eq!(user.email_address, Some("user3@example.com".into()));
} }
);
test_both_dbs!( test_both_dbs!(
test_create_access_tokens,
test_create_access_tokens_postgres, test_create_access_tokens_postgres,
test_create_access_tokens_sqlite, test_create_access_tokens_sqlite
db, );
{
async fn test_create_access_tokens(db: &Arc<Database>) {
let user = db let user = db
.create_user( .create_user(
"u1@example.com", "u1@example.com",
@ -234,9 +221,14 @@ test_both_dbs!(
assert!(db.get_access_token(token_2).await.is_err()); assert!(db.get_access_token(token_2).await.is_err());
assert!(db.get_access_token(token_1).await.is_err()); assert!(db.get_access_token(token_1).await.is_err());
} }
test_both_dbs!(
test_add_contacts,
test_add_contacts_postgres,
test_add_contacts_sqlite
); );
test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, { async fn test_add_contacts(db: &Arc<Database>) {
let mut user_ids = Vec::new(); let mut user_ids = Vec::new();
for i in 0..3 { for i in 0..3 {
user_ids.push( user_ids.push(
@ -403,9 +395,15 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
busy: false, busy: false,
}], }],
); );
}); }
test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, { test_both_dbs!(
test_metrics_id,
test_metrics_id_postgres,
test_metrics_id_sqlite
);
async fn test_metrics_id(db: &Arc<Database>) {
let NewUserResult { let NewUserResult {
user_id: user1, user_id: user1,
metrics_id: metrics_id1, metrics_id: metrics_id1,
@ -444,13 +442,15 @@ test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, {
assert_eq!(metrics_id1.len(), 36); assert_eq!(metrics_id1.len(), 36);
assert_eq!(metrics_id2.len(), 36); assert_eq!(metrics_id2.len(), 36);
assert_ne!(metrics_id1, metrics_id2); assert_ne!(metrics_id1, metrics_id2);
}); }
test_both_dbs!( test_both_dbs!(
test_project_count,
test_project_count_postgres, test_project_count_postgres,
test_project_count_sqlite, test_project_count_sqlite
db, );
{
async fn test_project_count(db: &Arc<Database>) {
let owner_id = db.create_server("test").await.unwrap().0 as u32; let owner_id = db.create_server("test").await.unwrap().0 as u32;
let user1 = db let user1 = db
@ -519,7 +519,6 @@ test_both_dbs!(
.unwrap(); .unwrap();
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
} }
);
#[test] #[test]
fn test_fuzzy_like_string() { fn test_fuzzy_like_string() {
@ -878,7 +877,9 @@ async fn test_invite_codes() {
assert!(db.has_contact(user5, user1).await.unwrap()); assert!(db.has_contact(user5, user1).await.unwrap());
} }
test_both_dbs!(test_channels_postgres, test_channels_sqlite, db, { test_both_dbs!(test_channels, test_channels_postgres, test_channels_sqlite);
async fn test_channels(db: &Arc<Database>) {
let a_id = db let a_id = db
.create_user( .create_user(
"user1@example.com", "user1@example.com",
@ -1063,13 +1064,15 @@ test_both_dbs!(test_channels_postgres, test_channels_sqlite, db, {
assert!(db.get_channel(rust_id, a_id).await.unwrap().is_none()); assert!(db.get_channel(rust_id, a_id).await.unwrap().is_none());
assert!(db.get_channel(cargo_id, a_id).await.unwrap().is_none()); assert!(db.get_channel(cargo_id, a_id).await.unwrap().is_none());
assert!(db.get_channel(cargo_ra_id, a_id).await.unwrap().is_none()); assert!(db.get_channel(cargo_ra_id, a_id).await.unwrap().is_none());
}); }
test_both_dbs!( test_both_dbs!(
test_joining_channels,
test_joining_channels_postgres, test_joining_channels_postgres,
test_joining_channels_sqlite, test_joining_channels_sqlite
db, );
{
async fn test_joining_channels(db: &Arc<Database>) {
let owner_id = db.create_server("test").await.unwrap().0 as u32; let owner_id = db.create_server("test").await.unwrap().0 as u32;
let user_1 = db let user_1 = db
@ -1119,13 +1122,14 @@ test_both_dbs!(
.await .await
.is_err()); .is_err());
} }
);
test_both_dbs!( test_both_dbs!(
test_channel_invites,
test_channel_invites_postgres, test_channel_invites_postgres,
test_channel_invites_sqlite, test_channel_invites_sqlite
db, );
{
async fn test_channel_invites(db: &Arc<Database>) {
db.create_server("test").await.unwrap(); db.create_server("test").await.unwrap();
let user_1 = db let user_1 = db
@ -1263,13 +1267,14 @@ test_both_dbs!(
] ]
); );
} }
);
test_both_dbs!( test_both_dbs!(
test_channel_renames,
test_channel_renames_postgres, test_channel_renames_postgres,
test_channel_renames_sqlite, test_channel_renames_sqlite
db, );
{
async fn test_channel_renames(db: &Arc<Database>) {
db.create_server("test").await.unwrap(); db.create_server("test").await.unwrap();
let user_1 = db let user_1 = db
@ -1323,7 +1328,6 @@ test_both_dbs!(
let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await; let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await;
assert!(bad_name_rename.is_err()) assert!(bad_name_rename.is_err())
} }
);
#[gpui::test] #[gpui::test]
async fn test_multiple_signup_overwrite() { async fn test_multiple_signup_overwrite() {

View File

@ -35,8 +35,8 @@ use lazy_static::lazy_static;
use prometheus::{register_int_gauge, IntGauge}; use prometheus::{register_int_gauge, IntGauge};
use rpc::{ use rpc::{
proto::{ proto::{
self, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo, self, Ack, AddChannelBufferCollaborator, AnyTypedEnvelope, EntityMessage, EnvelopedMessage,
RequestMessage, LiveKitConnectionInfo, RequestMessage,
}, },
Connection, ConnectionId, Peer, Receipt, TypedEnvelope, Connection, ConnectionId, Peer, Receipt, TypedEnvelope,
}; };
@ -248,6 +248,9 @@ impl Server {
.add_request_handler(remove_channel_member) .add_request_handler(remove_channel_member)
.add_request_handler(set_channel_member_admin) .add_request_handler(set_channel_member_admin)
.add_request_handler(rename_channel) .add_request_handler(rename_channel)
.add_request_handler(join_channel_buffer)
.add_request_handler(leave_channel_buffer)
.add_message_handler(update_channel_buffer)
.add_request_handler(get_channel_members) .add_request_handler(get_channel_members)
.add_request_handler(respond_to_channel_invite) .add_request_handler(respond_to_channel_invite)
.add_request_handler(join_channel) .add_request_handler(join_channel)
@ -851,6 +854,10 @@ async fn connection_lost(
.await .await
.trace_err(); .trace_err();
leave_channel_buffers_for_session(&session)
.await
.trace_err();
futures::select_biased! { futures::select_biased! {
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => { _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
leave_room_for_session(&session).await.trace_err(); leave_room_for_session(&session).await.trace_err();
@ -866,6 +873,8 @@ async fn connection_lost(
} }
} }
update_user_contacts(session.user_id, &session).await?; update_user_contacts(session.user_id, &session).await?;
} }
_ = teardown.changed().fuse() => {} _ = teardown.changed().fuse() => {}
} }
@ -2478,6 +2487,104 @@ async fn join_channel(
Ok(()) Ok(())
} }
async fn join_channel_buffer(
request: proto::JoinChannelBuffer,
response: Response<proto::JoinChannelBuffer>,
session: Session,
) -> Result<()> {
let db = session.db().await;
let channel_id = ChannelId::from_proto(request.channel_id);
let open_response = db
.join_channel_buffer(channel_id, session.user_id, session.connection_id)
.await?;
let replica_id = open_response.replica_id;
let collaborators = open_response.collaborators.clone();
response.send(open_response)?;
let update = AddChannelBufferCollaborator {
channel_id: channel_id.to_proto(),
collaborator: Some(proto::Collaborator {
user_id: session.user_id.to_proto(),
peer_id: Some(session.connection_id.into()),
replica_id,
}),
};
channel_buffer_updated(
session.connection_id,
collaborators
.iter()
.filter_map(|collaborator| Some(collaborator.peer_id?.into())),
&update,
&session.peer,
);
Ok(())
}
async fn update_channel_buffer(
request: proto::UpdateChannelBuffer,
session: Session,
) -> Result<()> {
let db = session.db().await;
let channel_id = ChannelId::from_proto(request.channel_id);
let collaborators = db
.update_channel_buffer(channel_id, session.user_id, &request.operations)
.await?;
channel_buffer_updated(
session.connection_id,
collaborators,
&proto::UpdateChannelBuffer {
channel_id: channel_id.to_proto(),
operations: request.operations,
},
&session.peer,
);
Ok(())
}
async fn leave_channel_buffer(
request: proto::LeaveChannelBuffer,
response: Response<proto::LeaveChannelBuffer>,
session: Session,
) -> Result<()> {
let db = session.db().await;
let channel_id = ChannelId::from_proto(request.channel_id);
let collaborators_to_notify = db
.leave_channel_buffer(channel_id, session.connection_id)
.await?;
response.send(Ack {})?;
channel_buffer_updated(
session.connection_id,
collaborators_to_notify,
&proto::RemoveChannelBufferCollaborator {
channel_id: channel_id.to_proto(),
peer_id: Some(session.connection_id.into()),
},
&session.peer,
);
Ok(())
}
fn channel_buffer_updated<T: EnvelopedMessage>(
sender_id: ConnectionId,
collaborators: impl IntoIterator<Item = ConnectionId>,
message: &T,
peer: &Peer,
) {
broadcast(Some(sender_id), collaborators.into_iter(), |peer_id| {
peer.send(peer_id.into(), message.clone())
});
}
async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> { async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> {
let project_id = ProjectId::from_proto(request.project_id); let project_id = ProjectId::from_proto(request.project_id);
let project_connection_ids = session let project_connection_ids = session
@ -2803,6 +2910,28 @@ async fn leave_room_for_session(session: &Session) -> Result<()> {
Ok(()) Ok(())
} }
async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> {
let left_channel_buffers = session
.db()
.await
.leave_channel_buffers(session.connection_id)
.await?;
for (channel_id, connections) in left_channel_buffers {
channel_buffer_updated(
session.connection_id,
connections,
&proto::RemoveChannelBufferCollaborator {
channel_id: channel_id.to_proto(),
peer_id: Some(session.connection_id.into()),
},
&session.peer,
);
}
Ok(())
}
fn project_left(project: &db::LeftProject, session: &Session) { fn project_left(project: &db::LeftProject, session: &Session) {
for connection_id in &project.connection_ids { for connection_id in &project.connection_ids {
if project.host_user_id == session.user_id { if project.host_user_id == session.user_id {

View File

@ -1,14 +1,14 @@
use crate::{ use crate::{
db::{test_db::TestDb, NewUserParams, UserId}, db::{tests::TestDb, NewUserParams, UserId},
executor::Executor, executor::Executor,
rpc::{Server, CLEANUP_TIMEOUT}, rpc::{Server, CLEANUP_TIMEOUT},
AppState, AppState,
}; };
use anyhow::anyhow; use anyhow::anyhow;
use call::{ActiveCall, Room}; use call::{ActiveCall, Room};
use channel::ChannelStore;
use client::{ use client::{
self, proto::PeerId, ChannelStore, Client, Connection, Credentials, EstablishConnectionError, self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
UserStore,
}; };
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use fs::FakeFs; use fs::FakeFs;
@ -31,6 +31,7 @@ use std::{
use util::http::FakeHttpClient; use util::http::FakeHttpClient;
use workspace::Workspace; use workspace::Workspace;
mod channel_buffer_tests;
mod channel_tests; mod channel_tests;
mod integration_tests; mod integration_tests;
mod randomized_integration_tests; mod randomized_integration_tests;
@ -210,6 +211,7 @@ impl TestServer {
workspace::init(app_state.clone(), cx); workspace::init(app_state.clone(), cx);
audio::init((), cx); audio::init((), cx);
call::init(client.clone(), user_store.clone(), cx); call::init(client.clone(), user_store.clone(), cx);
channel::init(&client);
}); });
client client

View File

@ -0,0 +1,426 @@
use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
use call::ActiveCall;
use channel::Channel;
use client::UserId;
use collab_ui::channel_view::ChannelView;
use collections::HashMap;
use futures::future;
use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
use rpc::{proto, RECEIVE_TIMEOUT};
use serde_json::json;
use std::sync::Arc;
#[gpui::test]
async fn test_core_channel_buffers(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let zed_id = server
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
.await;
// Client A joins the channel buffer
let channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
.await
.unwrap();
// Client A edits the buffer
let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
buffer_a.update(cx_a, |buffer, cx| {
buffer.edit([(0..0, "hello world")], None, cx)
});
buffer_a.update(cx_a, |buffer, cx| {
buffer.edit([(5..5, ", cruel")], None, cx)
});
buffer_a.update(cx_a, |buffer, cx| {
buffer.edit([(0..5, "goodbye")], None, cx)
});
buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
deterministic.run_until_parked();
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
// Client B joins the channel buffer
let channel_buffer_b = client_b
.channel_store()
.update(cx_b, |channel, cx| channel.open_channel_buffer(zed_id, cx))
.await
.unwrap();
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(
buffer.collaborators(),
&[client_a.user_id(), client_b.user_id()],
);
});
// Client B sees the correct text, and then edits it
let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer());
assert_eq!(
buffer_b.read_with(cx_b, |buffer, _| buffer.remote_id()),
buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id())
);
assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world");
buffer_b.update(cx_b, |buffer, cx| {
buffer.edit([(7..12, "beautiful")], None, cx)
});
// Both A and B see the new edit
deterministic.run_until_parked();
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world");
assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world");
// Client A closes the channel buffer.
cx_a.update(|_| drop(channel_buffer_a));
deterministic.run_until_parked();
// Client B sees that client A is gone from the channel buffer.
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
});
// Client A rejoins the channel buffer
let _channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channels, cx| {
channels.open_channel_buffer(zed_id, cx)
})
.await
.unwrap();
deterministic.run_until_parked();
// Sanity test, make sure we saw A rejoining
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(
&buffer.collaborators(),
&[client_b.user_id(), client_a.user_id()],
);
});
// Client A loses connection.
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
// Client B observes A disconnect
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
});
// TODO:
// - Test synchronizing offline updates, what happens to A's channel buffer when A disconnects
// - Test interaction with channel deletion while buffer is open
}
#[gpui::test]
async fn test_channel_buffer_replica_ids(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
let channel_id = server
.make_channel(
"zed",
(&client_a, cx_a),
&mut [(&client_b, cx_b), (&client_c, cx_c)],
)
.await;
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b = cx_b.read(ActiveCall::global);
let active_call_c = cx_c.read(ActiveCall::global);
// Clients A and B join a channel.
active_call_a
.update(cx_a, |call, cx| call.join_channel(channel_id, cx))
.await
.unwrap();
active_call_b
.update(cx_b, |call, cx| call.join_channel(channel_id, cx))
.await
.unwrap();
// Clients A, B, and C join a channel buffer
// C first so that the replica IDs in the project and the channel buffer are different
let channel_buffer_c = client_c
.channel_store()
.update(cx_c, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
let channel_buffer_b = client_b
.channel_store()
.update(cx_b, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
let channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
// Client B shares a project
client_b
.fs()
.insert_tree("/dir", json!({ "file.txt": "contents" }))
.await;
let (project_b, _) = client_b.build_local_project("/dir", cx_b).await;
let shared_project_id = active_call_b
.update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
.await
.unwrap();
// Client A joins the project
let project_a = client_a.build_remote_project(shared_project_id, cx_a).await;
deterministic.run_until_parked();
// Client C is in a separate project.
client_c.fs().insert_tree("/dir", json!({})).await;
let (separate_project_c, _) = client_c.build_local_project("/dir", cx_c).await;
// Note that each user has a different replica id in the projects vs the
// channel buffer.
channel_buffer_a.read_with(cx_a, |channel_buffer, cx| {
assert_eq!(project_a.read(cx).replica_id(), 1);
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 2);
});
channel_buffer_b.read_with(cx_b, |channel_buffer, cx| {
assert_eq!(project_b.read(cx).replica_id(), 0);
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 1);
});
channel_buffer_c.read_with(cx_c, |channel_buffer, cx| {
// C is not in the project
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 0);
});
let channel_window_a =
cx_a.add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), cx));
let channel_window_b =
cx_b.add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), cx));
let channel_window_c = cx_c.add_window(|cx| {
ChannelView::new(separate_project_c.clone(), channel_buffer_c.clone(), cx)
});
let channel_view_a = channel_window_a.root(cx_a);
let channel_view_b = channel_window_b.root(cx_b);
let channel_view_c = channel_window_c.root(cx_c);
// For clients A and B, the replica ids in the channel buffer are mapped
// so that they match the same users' replica ids in their shared project.
channel_view_a.read_with(cx_a, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1)].into_iter().collect::<HashMap<_, _>>()
);
});
channel_view_b.read_with(cx_b, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1)].into_iter().collect::<HashMap<u16, u16>>(),
)
});
// Client C only sees themself, as they're not part of any shared project
channel_view_c.read_with(cx_c, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(0, 0)].into_iter().collect::<HashMap<u16, u16>>(),
);
});
// Client C joins the project that clients A and B are in.
active_call_c
.update(cx_c, |call, cx| call.join_channel(channel_id, cx))
.await
.unwrap();
let project_c = client_c.build_remote_project(shared_project_id, cx_c).await;
deterministic.run_until_parked();
project_c.read_with(cx_c, |project, _| {
assert_eq!(project.replica_id(), 2);
});
// For clients A and B, client C's replica id in the channel buffer is
// now mapped to their replica id in the shared project.
channel_view_a.read_with(cx_a, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1), (0, 2)]
.into_iter()
.collect::<HashMap<_, _>>()
);
});
channel_view_b.read_with(cx_b, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1), (0, 2)]
.into_iter()
.collect::<HashMap<_, _>>(),
)
});
}
#[gpui::test]
async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mut TestAppContext) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let zed_id = server.make_channel("zed", (&client_a, cx_a), &mut []).await;
let channel_buffer_1 = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
let channel_buffer_2 = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
let channel_buffer_3 = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
// All concurrent tasks for opening a channel buffer return the same model handle.
let (channel_buffer_1, channel_buffer_2, channel_buffer_3) =
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
.await
.unwrap();
let model_id = channel_buffer_1.id();
assert_eq!(channel_buffer_1, channel_buffer_2);
assert_eq!(channel_buffer_1, channel_buffer_3);
channel_buffer_1.update(cx_a, |buffer, cx| {
buffer.buffer().update(cx, |buffer, cx| {
buffer.edit([(0..0, "hello")], None, cx);
})
});
deterministic.run_until_parked();
cx_a.update(|_| {
drop(channel_buffer_1);
drop(channel_buffer_2);
drop(channel_buffer_3);
});
deterministic.run_until_parked();
// The channel buffer can be reopened after dropping it.
let channel_buffer = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
.await
.unwrap();
assert_ne!(channel_buffer.id(), model_id);
channel_buffer.update(cx_a, |buffer, cx| {
buffer.buffer().update(cx, |buffer, _| {
assert_eq!(buffer.text(), "hello");
})
});
}
#[gpui::test]
async fn test_channel_buffer_disconnect(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let channel_id = server
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
.await;
let channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
let channel_buffer_b = client_b
.channel_store()
.update(cx_b, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
channel_buffer_a.update(cx_a, |buffer, _| {
assert_eq!(
buffer.channel().as_ref(),
&Channel {
id: channel_id,
name: "zed".to_string()
}
);
assert!(!buffer.is_connected());
});
deterministic.run_until_parked();
server.allow_connections();
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
deterministic.run_until_parked();
client_a
.channel_store()
.update(cx_a, |channel_store, _| {
channel_store.remove_channel(channel_id)
})
.await
.unwrap();
deterministic.run_until_parked();
// Channel buffer observed the deletion
channel_buffer_b.update(cx_b, |buffer, _| {
assert_eq!(
buffer.channel().as_ref(),
&Channel {
id: channel_id,
name: "zed".to_string()
}
);
assert!(!buffer.is_connected());
});
}
#[track_caller]
fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option<UserId>]) {
assert_eq!(
collaborators
.into_iter()
.map(|collaborator| collaborator.user_id)
.collect::<Vec<_>>(),
ids.into_iter().map(|id| id.unwrap()).collect::<Vec<_>>()
);
}
fn buffer_text(channel_buffer: &ModelHandle<language::Buffer>, cx: &mut TestAppContext) -> String {
channel_buffer.read_with(cx, |buffer, _| buffer.text())
}

View File

@ -3,7 +3,8 @@ use crate::{
tests::{room_participants, RoomParticipants, TestServer}, tests::{room_participants, RoomParticipants, TestServer},
}; };
use call::ActiveCall; use call::ActiveCall;
use client::{ChannelId, ChannelMembership, ChannelStore, User}; use channel::{ChannelId, ChannelMembership, ChannelStore};
use client::User;
use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
use rpc::{proto, RECEIVE_TIMEOUT}; use rpc::{proto, RECEIVE_TIMEOUT};
use std::sync::Arc; use std::sync::Arc;
@ -798,7 +799,7 @@ async fn test_lost_channel_creation(
deterministic.run_until_parked(); deterministic.run_until_parked();
// Sanity check // Sanity check, B has the invitation
assert_channel_invitations( assert_channel_invitations(
client_b.channel_store(), client_b.channel_store(),
cx_b, cx_b,
@ -810,6 +811,7 @@ async fn test_lost_channel_creation(
}], }],
); );
// A creates a subchannel while the invite is still pending.
let subchannel_id = client_a let subchannel_id = client_a
.channel_store() .channel_store()
.update(cx_a, |channel_store, cx| { .update(cx_a, |channel_store, cx| {
@ -840,7 +842,7 @@ async fn test_lost_channel_creation(
], ],
); );
// Accept the invite // Client B accepts the invite
client_b client_b
.channel_store() .channel_store()
.update(cx_b, |channel_store, _| { .update(cx_b, |channel_store, _| {
@ -851,7 +853,7 @@ async fn test_lost_channel_creation(
deterministic.run_until_parked(); deterministic.run_until_parked();
// B should now see the channel // Client B should now see the channel
assert_channels( assert_channels(
client_b.channel_store(), client_b.channel_store(),
cx_b, cx_b,

View File

@ -26,6 +26,7 @@ auto_update = { path = "../auto_update" }
db = { path = "../db" } db = { path = "../db" }
call = { path = "../call" } call = { path = "../call" }
client = { path = "../client" } client = { path = "../client" }
channel = { path = "../channel" }
clock = { path = "../clock" } clock = { path = "../clock" }
collections = { path = "../collections" } collections = { path = "../collections" }
context_menu = { path = "../context_menu" } context_menu = { path = "../context_menu" }
@ -33,6 +34,7 @@ editor = { path = "../editor" }
feedback = { path = "../feedback" } feedback = { path = "../feedback" }
fuzzy = { path = "../fuzzy" } fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }
language = { path = "../language" }
menu = { path = "../menu" } menu = { path = "../menu" }
picker = { path = "../picker" } picker = { path = "../picker" }
project = { path = "../project" } project = { path = "../project" }

View File

@ -0,0 +1,351 @@
use anyhow::{anyhow, Result};
use channel::{
channel_buffer::{self, ChannelBuffer},
ChannelId,
};
use client::proto;
use clock::ReplicaId;
use collections::HashMap;
use editor::Editor;
use gpui::{
actions,
elements::{ChildView, Label},
geometry::vector::Vector2F,
AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, Task, View,
ViewContext, ViewHandle,
};
use project::Project;
use std::any::Any;
use workspace::{
item::{FollowableItem, Item, ItemHandle},
register_followable_item,
searchable::SearchableItemHandle,
ItemNavHistory, Pane, ViewId, Workspace, WorkspaceId,
};
actions!(channel_view, [Deploy]);
pub(crate) fn init(cx: &mut AppContext) {
register_followable_item::<ChannelView>(cx)
}
pub struct ChannelView {
pub editor: ViewHandle<Editor>,
project: ModelHandle<Project>,
channel_buffer: ModelHandle<ChannelBuffer>,
remote_id: Option<ViewId>,
_editor_event_subscription: Subscription,
}
impl ChannelView {
pub fn open(
channel_id: ChannelId,
pane: ViewHandle<Pane>,
workspace: ViewHandle<Workspace>,
cx: &mut AppContext,
) -> Task<Result<ViewHandle<Self>>> {
let workspace = workspace.read(cx);
let project = workspace.project().to_owned();
let channel_store = workspace.app_state().channel_store.clone();
let markdown = workspace
.app_state()
.languages
.language_for_name("Markdown");
let channel_buffer =
channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx));
cx.spawn(|mut cx| async move {
let channel_buffer = channel_buffer.await?;
let markdown = markdown.await?;
channel_buffer.update(&mut cx, |buffer, cx| {
buffer.buffer().update(cx, |buffer, cx| {
buffer.set_language(Some(markdown), cx);
})
});
pane.update(&mut cx, |pane, cx| {
pane.items_of_type::<Self>()
.find(|channel_view| channel_view.read(cx).channel_buffer == channel_buffer)
.unwrap_or_else(|| cx.add_view(|cx| Self::new(project, channel_buffer, cx)))
})
.ok_or_else(|| anyhow!("pane was dropped"))
})
}
pub fn new(
project: ModelHandle<Project>,
channel_buffer: ModelHandle<ChannelBuffer>,
cx: &mut ViewContext<Self>,
) -> Self {
let buffer = channel_buffer.read(cx).buffer();
// buffer.update(cx, |buffer, cx| buffer.set_language(language, cx));
let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx));
let _editor_event_subscription = cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone()));
cx.subscribe(&project, Self::handle_project_event).detach();
cx.subscribe(&channel_buffer, Self::handle_channel_buffer_event)
.detach();
let this = Self {
editor,
project,
channel_buffer,
remote_id: None,
_editor_event_subscription,
};
this.refresh_replica_id_map(cx);
this
}
fn handle_project_event(
&mut self,
_: ModelHandle<Project>,
event: &project::Event,
cx: &mut ViewContext<Self>,
) {
match event {
project::Event::RemoteIdChanged(_) => {}
project::Event::DisconnectedFromHost => {}
project::Event::Closed => {}
project::Event::CollaboratorUpdated { .. } => {}
project::Event::CollaboratorLeft(_) => {}
project::Event::CollaboratorJoined(_) => {}
_ => return,
}
self.refresh_replica_id_map(cx);
}
fn handle_channel_buffer_event(
&mut self,
_: ModelHandle<ChannelBuffer>,
event: &channel_buffer::Event,
cx: &mut ViewContext<Self>,
) {
match event {
channel_buffer::Event::CollaboratorsChanged => {
self.refresh_replica_id_map(cx);
}
channel_buffer::Event::Disconnected => self.editor.update(cx, |editor, cx| {
editor.set_read_only(true);
cx.notify();
}),
}
}
/// Build a mapping of channel buffer replica ids to the corresponding
/// replica ids in the current project.
///
/// Using this mapping, a given user can be displayed with the same color
/// in the channel buffer as in other files in the project. Users who are
/// in the channel buffer but not the project will not have a color.
fn refresh_replica_id_map(&self, cx: &mut ViewContext<Self>) {
let mut project_replica_ids_by_channel_buffer_replica_id = HashMap::default();
let project = self.project.read(cx);
let channel_buffer = self.channel_buffer.read(cx);
project_replica_ids_by_channel_buffer_replica_id
.insert(channel_buffer.replica_id(cx), project.replica_id());
project_replica_ids_by_channel_buffer_replica_id.extend(
channel_buffer
.collaborators()
.iter()
.filter_map(|channel_buffer_collaborator| {
project
.collaborators()
.values()
.find_map(|project_collaborator| {
(project_collaborator.user_id == channel_buffer_collaborator.user_id)
.then_some((
channel_buffer_collaborator.replica_id as ReplicaId,
project_collaborator.replica_id,
))
})
}),
);
self.editor.update(cx, |editor, cx| {
editor.set_replica_id_map(Some(project_replica_ids_by_channel_buffer_replica_id), cx)
});
}
}
impl Entity for ChannelView {
type Event = editor::Event;
}
impl View for ChannelView {
fn ui_name() -> &'static str {
"ChannelView"
}
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
ChildView::new(self.editor.as_any(), cx).into_any()
}
fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
if cx.is_self_focused() {
cx.focus(self.editor.as_any())
}
}
}
impl Item for ChannelView {
fn tab_content<V: 'static>(
&self,
_: Option<usize>,
style: &theme::Tab,
cx: &gpui::AppContext,
) -> AnyElement<V> {
let channel_name = &self.channel_buffer.read(cx).channel().name;
let label = if self.channel_buffer.read(cx).is_connected() {
format!("#{}", channel_name)
} else {
format!("#{} (disconnected)", channel_name)
};
Label::new(label, style.label.to_owned()).into_any()
}
fn clone_on_split(&self, _: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self> {
Some(Self::new(
self.project.clone(),
self.channel_buffer.clone(),
cx,
))
}
fn is_singleton(&self, _cx: &AppContext) -> bool {
true
}
fn navigate(&mut self, data: Box<dyn Any>, cx: &mut ViewContext<Self>) -> bool {
self.editor
.update(cx, |editor, cx| editor.navigate(data, cx))
}
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
self.editor
.update(cx, |editor, cx| Item::deactivated(editor, cx))
}
fn set_nav_history(&mut self, history: ItemNavHistory, cx: &mut ViewContext<Self>) {
self.editor
.update(cx, |editor, cx| Item::set_nav_history(editor, history, cx))
}
fn as_searchable(&self, _: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
Some(Box::new(self.editor.clone()))
}
fn show_toolbar(&self) -> bool {
true
}
fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
self.editor.read(cx).pixel_position_of_cursor(cx)
}
}
impl FollowableItem for ChannelView {
fn remote_id(&self) -> Option<workspace::ViewId> {
self.remote_id
}
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
let channel = self.channel_buffer.read(cx).channel();
Some(proto::view::Variant::ChannelView(
proto::view::ChannelView {
channel_id: channel.id,
editor: if let Some(proto::view::Variant::Editor(proto)) =
self.editor.read(cx).to_state_proto(cx)
{
Some(proto)
} else {
None
},
},
))
}
fn from_state_proto(
pane: ViewHandle<workspace::Pane>,
workspace: ViewHandle<workspace::Workspace>,
remote_id: workspace::ViewId,
state: &mut Option<proto::view::Variant>,
cx: &mut AppContext,
) -> Option<gpui::Task<anyhow::Result<ViewHandle<Self>>>> {
let Some(proto::view::Variant::ChannelView(_)) = state else { return None };
let Some(proto::view::Variant::ChannelView(state)) = state.take() else { unreachable!() };
let open = ChannelView::open(state.channel_id, pane, workspace, cx);
Some(cx.spawn(|mut cx| async move {
let this = open.await?;
let task = this
.update(&mut cx, |this, cx| {
this.remote_id = Some(remote_id);
if let Some(state) = state.editor {
Some(this.editor.update(cx, |editor, cx| {
editor.apply_update_proto(
&this.project,
proto::update_view::Variant::Editor(proto::update_view::Editor {
selections: state.selections,
pending_selection: state.pending_selection,
scroll_top_anchor: state.scroll_top_anchor,
scroll_x: state.scroll_x,
scroll_y: state.scroll_y,
..Default::default()
}),
cx,
)
}))
} else {
None
}
})
.ok_or_else(|| anyhow!("window was closed"))?;
if let Some(task) = task {
task.await?;
}
Ok(this)
}))
}
fn add_event_to_update_proto(
&self,
event: &Self::Event,
update: &mut Option<proto::update_view::Variant>,
cx: &AppContext,
) -> bool {
self.editor
.read(cx)
.add_event_to_update_proto(event, update, cx)
}
fn apply_update_proto(
&mut self,
project: &ModelHandle<Project>,
message: proto::update_view::Variant,
cx: &mut ViewContext<Self>,
) -> gpui::Task<anyhow::Result<()>> {
self.editor.update(cx, |editor, cx| {
editor.apply_update_proto(project, message, cx)
})
}
fn set_leader_replica_id(
&mut self,
leader_replica_id: Option<u16>,
cx: &mut ViewContext<Self>,
) {
self.editor.update(cx, |editor, cx| {
editor.set_leader_replica_id(leader_replica_id, cx)
})
}
fn should_unfollow_on_event(event: &Self::Event, cx: &AppContext) -> bool {
Editor::should_unfollow_on_event(event, cx)
}
}

View File

@ -4,10 +4,8 @@ mod panel_settings;
use anyhow::Result; use anyhow::Result;
use call::ActiveCall; use call::ActiveCall;
use client::{ use channel::{Channel, ChannelEvent, ChannelId, ChannelStore};
proto::PeerId, Channel, ChannelEvent, ChannelId, ChannelStore, Client, Contact, User, UserStore, use client::{proto::PeerId, Client, Contact, User, UserStore};
};
use context_menu::{ContextMenu, ContextMenuItem}; use context_menu::{ContextMenu, ContextMenuItem};
use db::kvp::KEY_VALUE_STORE; use db::kvp::KEY_VALUE_STORE;
use editor::{Cancel, Editor}; use editor::{Cancel, Editor};
@ -20,13 +18,14 @@ use gpui::{
MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, SafeStylable, MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, SafeStylable,
Stack, Svg, Stack, Svg,
}, },
fonts::TextStyle,
geometry::{ geometry::{
rect::RectF, rect::RectF,
vector::{vec2f, Vector2F}, vector::{vec2f, Vector2F},
}, },
impl_actions, impl_actions,
platform::{CursorStyle, MouseButton, PromptLevel}, platform::{CursorStyle, MouseButton, PromptLevel},
serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, ModelHandle, serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, FontCache, ModelHandle,
Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
}; };
use menu::{Confirm, SelectNext, SelectPrev}; use menu::{Confirm, SelectNext, SelectPrev};
@ -44,7 +43,10 @@ use workspace::{
Workspace, Workspace,
}; };
use crate::face_pile::FacePile; use crate::{
channel_view::{self, ChannelView},
face_pile::FacePile,
};
use channel_modal::ChannelModal; use channel_modal::ChannelModal;
use self::contact_finder::ContactFinder; use self::contact_finder::ContactFinder;
@ -79,6 +81,11 @@ struct RenameChannel {
channel_id: u64, channel_id: u64,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
struct OpenChannelBuffer {
channel_id: u64,
}
actions!( actions!(
collab_panel, collab_panel,
[ [
@ -98,7 +105,8 @@ impl_actions!(
InviteMembers, InviteMembers,
ManageMembers, ManageMembers,
RenameChannel, RenameChannel,
ToggleCollapse ToggleCollapse,
OpenChannelBuffer
] ]
); );
@ -108,6 +116,7 @@ pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
settings::register::<panel_settings::CollaborationPanelSettings>(cx); settings::register::<panel_settings::CollaborationPanelSettings>(cx);
contact_finder::init(cx); contact_finder::init(cx);
channel_modal::init(cx); channel_modal::init(cx);
channel_view::init(cx);
cx.add_action(CollabPanel::cancel); cx.add_action(CollabPanel::cancel);
cx.add_action(CollabPanel::select_next); cx.add_action(CollabPanel::select_next);
@ -123,7 +132,8 @@ pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
cx.add_action(CollabPanel::rename_channel); cx.add_action(CollabPanel::rename_channel);
cx.add_action(CollabPanel::toggle_channel_collapsed); cx.add_action(CollabPanel::toggle_channel_collapsed);
cx.add_action(CollabPanel::collapse_selected_channel); cx.add_action(CollabPanel::collapse_selected_channel);
cx.add_action(CollabPanel::expand_selected_channel) cx.add_action(CollabPanel::expand_selected_channel);
cx.add_action(CollabPanel::open_channel_buffer);
} }
#[derive(Debug)] #[derive(Debug)]
@ -174,6 +184,7 @@ pub struct CollabPanel {
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
struct SerializedChannelsPanel { struct SerializedChannelsPanel {
width: Option<f32>, width: Option<f32>,
collapsed_channels: Vec<ChannelId>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -218,6 +229,9 @@ enum ListEntry {
channel: Arc<Channel>, channel: Arc<Channel>,
depth: usize, depth: usize,
}, },
ChannelNotes {
channel_id: ChannelId,
},
ChannelEditor { ChannelEditor {
depth: usize, depth: usize,
}, },
@ -361,6 +375,12 @@ impl CollabPanel {
return channel_row; return channel_row;
} }
} }
ListEntry::ChannelNotes { channel_id } => this.render_channel_notes(
*channel_id,
&theme.collab_panel,
is_selected,
cx,
),
ListEntry::ChannelInvite(channel) => Self::render_channel_invite( ListEntry::ChannelInvite(channel) => Self::render_channel_invite(
channel.clone(), channel.clone(),
this.channel_store.clone(), this.channel_store.clone(),
@ -500,6 +520,7 @@ impl CollabPanel {
if let Some(serialized_panel) = serialized_panel { if let Some(serialized_panel) = serialized_panel {
panel.update(cx, |panel, cx| { panel.update(cx, |panel, cx| {
panel.width = serialized_panel.width; panel.width = serialized_panel.width;
panel.collapsed_channels = serialized_panel.collapsed_channels;
cx.notify(); cx.notify();
}); });
} }
@ -510,12 +531,16 @@ impl CollabPanel {
fn serialize(&mut self, cx: &mut ViewContext<Self>) { fn serialize(&mut self, cx: &mut ViewContext<Self>) {
let width = self.width; let width = self.width;
let collapsed_channels = self.collapsed_channels.clone();
self.pending_serialization = cx.background().spawn( self.pending_serialization = cx.background().spawn(
async move { async move {
KEY_VALUE_STORE KEY_VALUE_STORE
.write_kvp( .write_kvp(
COLLABORATION_PANEL_KEY.into(), COLLABORATION_PANEL_KEY.into(),
serde_json::to_string(&SerializedChannelsPanel { width })?, serde_json::to_string(&SerializedChannelsPanel {
width,
collapsed_channels,
})?,
) )
.await?; .await?;
anyhow::Ok(()) anyhow::Ok(())
@ -539,6 +564,10 @@ impl CollabPanel {
if !self.collapsed_sections.contains(&Section::ActiveCall) { if !self.collapsed_sections.contains(&Section::ActiveCall) {
let room = room.read(cx); let room = room.read(cx);
if let Some(channel_id) = room.channel_id() {
self.entries.push(ListEntry::ChannelNotes { channel_id })
}
// Populate the active user. // Populate the active user.
if let Some(user) = user_store.current_user() { if let Some(user) = user_store.current_user() {
self.match_candidates.clear(); self.match_candidates.clear();
@ -998,25 +1027,19 @@ impl CollabPanel {
) -> AnyElement<Self> { ) -> AnyElement<Self> {
enum JoinProject {} enum JoinProject {}
let font_cache = cx.font_cache(); let host_avatar_width = theme
let host_avatar_height = theme
.contact_avatar .contact_avatar
.width .width
.or(theme.contact_avatar.height) .or(theme.contact_avatar.height)
.unwrap_or(0.); .unwrap_or(0.);
let row = &theme.project_row.inactive_state().default;
let tree_branch = theme.tree_branch; let tree_branch = theme.tree_branch;
let line_height = row.name.text.line_height(font_cache);
let cap_height = row.name.text.cap_height(font_cache);
let baseline_offset =
row.name.text.baseline_offset(font_cache) + (theme.row_height - line_height) / 2.;
let project_name = if worktree_root_names.is_empty() { let project_name = if worktree_root_names.is_empty() {
"untitled".to_string() "untitled".to_string()
} else { } else {
worktree_root_names.join(", ") worktree_root_names.join(", ")
}; };
MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, _| { MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, cx| {
let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state); let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
let row = theme let row = theme
.project_row .project_row
@ -1024,39 +1047,20 @@ impl CollabPanel {
.style_for(mouse_state); .style_for(mouse_state);
Flex::row() Flex::row()
.with_child(render_tree_branch(
tree_branch,
&row.name.text,
is_last,
vec2f(host_avatar_width, theme.row_height),
cx.font_cache(),
))
.with_child( .with_child(
Stack::new() Svg::new("icons/file_icons/folder.svg")
.with_child(Canvas::new(move |scene, bounds, _, _, _| { .with_color(theme.channel_hash.color)
let start_x =
bounds.min_x() + (bounds.width() / 2.) - (tree_branch.width / 2.);
let end_x = bounds.max_x();
let start_y = bounds.min_y();
let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, start_y),
vec2f(
start_x + tree_branch.width,
if is_last { end_y } else { bounds.max_y() },
),
),
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, end_y),
vec2f(end_x, end_y + tree_branch.width),
),
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
}))
.constrained() .constrained()
.with_width(host_avatar_height), .with_width(theme.channel_hash.width)
.aligned()
.left(),
) )
.with_child( .with_child(
Label::new(project_name, row.name.text.clone()) Label::new(project_name, row.name.text.clone())
@ -1231,7 +1235,7 @@ impl CollabPanel {
}); });
if let Some(name) = channel_name { if let Some(name) = channel_name {
Cow::Owned(format!("Current Call - #{}", name)) Cow::Owned(format!("#{}", name))
} else { } else {
Cow::Borrowed("Current Call") Cow::Borrowed("Current Call")
} }
@ -1667,6 +1671,61 @@ impl CollabPanel {
.into_any() .into_any()
} }
fn render_channel_notes(
&self,
channel_id: ChannelId,
theme: &theme::CollabPanel,
is_selected: bool,
cx: &mut ViewContext<Self>,
) -> AnyElement<Self> {
enum ChannelNotes {}
let host_avatar_width = theme
.contact_avatar
.width
.or(theme.contact_avatar.height)
.unwrap_or(0.);
MouseEventHandler::new::<ChannelNotes, _>(channel_id as usize, cx, |state, cx| {
let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
let row = theme.project_row.in_state(is_selected).style_for(state);
Flex::<Self>::row()
.with_child(render_tree_branch(
tree_branch,
&row.name.text,
true,
vec2f(host_avatar_width, theme.row_height),
cx.font_cache(),
))
.with_child(
Svg::new("icons/radix/file.svg")
.with_color(theme.channel_hash.color)
.constrained()
.with_width(theme.channel_hash.width)
.aligned()
.left(),
)
.with_child(
Label::new("notes", theme.channel_name.text.clone())
.contained()
.with_style(theme.channel_name.container)
.aligned()
.left()
.flex(1., true),
)
.constrained()
.with_height(theme.row_height)
.contained()
.with_style(*theme.channel_row.style_for(is_selected, state))
.with_padding_left(theme.channel_row.default_style().padding.left)
})
.on_click(MouseButton::Left, move |_, this, cx| {
this.open_channel_buffer(&OpenChannelBuffer { channel_id }, cx);
})
.with_cursor_style(CursorStyle::PointingHand)
.into_any()
}
fn render_channel_invite( fn render_channel_invite(
channel: Arc<Channel>, channel: Arc<Channel>,
channel_store: ModelHandle<ChannelStore>, channel_store: ModelHandle<ChannelStore>,
@ -1864,7 +1923,6 @@ impl CollabPanel {
channel_id: u64, channel_id: u64,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
if self.channel_store.read(cx).is_user_admin(channel_id) {
self.context_menu_on_selected = position.is_none(); self.context_menu_on_selected = position.is_none();
self.context_menu.update(cx, |context_menu, cx| { self.context_menu.update(cx, |context_menu, cx| {
@ -1880,6 +1938,24 @@ impl CollabPanel {
"Collapse Subchannels" "Collapse Subchannels"
}; };
let mut items = vec![
ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }),
ContextMenuItem::action("Open Notes", OpenChannelBuffer { channel_id }),
];
if self.channel_store.read(cx).is_user_admin(channel_id) {
items.extend([
ContextMenuItem::Separator,
ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
ContextMenuItem::action("Rename", RenameChannel { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Invite Members", InviteMembers { channel_id }),
ContextMenuItem::action("Manage Members", ManageMembers { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
]);
}
context_menu.show( context_menu.show(
position.unwrap_or_default(), position.unwrap_or_default(),
if self.context_menu_on_selected { if self.context_menu_on_selected {
@ -1887,24 +1963,13 @@ impl CollabPanel {
} else { } else {
gpui::elements::AnchorCorner::BottomLeft gpui::elements::AnchorCorner::BottomLeft
}, },
vec![ items,
ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }),
ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Invite to Channel", InviteMembers { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Rename", RenameChannel { channel_id }),
ContextMenuItem::action("Manage", ManageMembers { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
],
cx, cx,
); );
}); });
cx.notify(); cx.notify();
} }
}
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) { fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
if self.take_editing_state(cx) { if self.take_editing_state(cx) {
@ -2104,6 +2169,7 @@ impl CollabPanel {
self.collapsed_channels.insert(ix, channel_id); self.collapsed_channels.insert(ix, channel_id);
} }
}; };
self.serialize(cx);
self.update_entries(true, cx); self.update_entries(true, cx);
cx.notify(); cx.notify();
cx.focus_self(); cx.focus_self();
@ -2209,6 +2275,21 @@ impl CollabPanel {
} }
} }
fn open_channel_buffer(&mut self, action: &OpenChannelBuffer, cx: &mut ViewContext<Self>) {
if let Some(workspace) = self.workspace.upgrade(cx) {
let pane = workspace.read(cx).active_pane().clone();
let channel_view = ChannelView::open(action.channel_id, pane.clone(), workspace, cx);
cx.spawn(|_, mut cx| async move {
let channel_view = channel_view.await?;
pane.update(&mut cx, |pane, cx| {
pane.add_item(Box::new(channel_view), true, true, None, cx)
});
anyhow::Ok(())
})
.detach();
}
}
fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext<Self>) { fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext<Self>) {
let Some(channel) = self.selected_channel() else { let Some(channel) = self.selected_channel() else {
return; return;
@ -2367,6 +2448,51 @@ impl CollabPanel {
} }
} }
fn render_tree_branch(
branch_style: theme::TreeBranch,
row_style: &TextStyle,
is_last: bool,
size: Vector2F,
font_cache: &FontCache,
) -> gpui::elements::ConstrainedBox<CollabPanel> {
let line_height = row_style.line_height(font_cache);
let cap_height = row_style.cap_height(font_cache);
let baseline_offset = row_style.baseline_offset(font_cache) + (size.y() - line_height) / 2.;
Canvas::new(move |scene, bounds, _, _, _| {
scene.paint_layer(None, |scene| {
let start_x = bounds.min_x() + (bounds.width() / 2.) - (branch_style.width / 2.);
let end_x = bounds.max_x();
let start_y = bounds.min_y();
let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, start_y),
vec2f(
start_x + branch_style.width,
if is_last { end_y } else { bounds.max_y() },
),
),
background: Some(branch_style.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, end_y),
vec2f(end_x, end_y + branch_style.width),
),
background: Some(branch_style.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
})
})
.constrained()
.with_width(size.x())
}
impl View for CollabPanel { impl View for CollabPanel {
fn ui_name() -> &'static str { fn ui_name() -> &'static str {
"CollabPanel" "CollabPanel"
@ -2576,6 +2702,14 @@ impl PartialEq for ListEntry {
return channel_1.id == channel_2.id && depth_1 == depth_2; return channel_1.id == channel_2.id && depth_1 == depth_2;
} }
} }
ListEntry::ChannelNotes { channel_id } => {
if let ListEntry::ChannelNotes {
channel_id: other_id,
} = other
{
return channel_id == other_id;
}
}
ListEntry::ChannelInvite(channel_1) => { ListEntry::ChannelInvite(channel_1) => {
if let ListEntry::ChannelInvite(channel_2) = other { if let ListEntry::ChannelInvite(channel_2) = other {
return channel_1.id == channel_2.id; return channel_1.id == channel_2.id;

View File

@ -1,4 +1,5 @@
use client::{proto, ChannelId, ChannelMembership, ChannelStore, User, UserId, UserStore}; use channel::{ChannelId, ChannelMembership, ChannelStore};
use client::{proto, User, UserId, UserStore};
use context_menu::{ContextMenu, ContextMenuItem}; use context_menu::{ContextMenu, ContextMenuItem};
use fuzzy::{match_strings, StringMatchCandidate}; use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{ use gpui::{

View File

@ -1,3 +1,4 @@
pub mod channel_view;
pub mod collab_panel; pub mod collab_panel;
mod collab_titlebar_item; mod collab_titlebar_item;
mod contact_notification; mod contact_notification;

View File

@ -559,6 +559,7 @@ pub struct Editor {
blink_manager: ModelHandle<BlinkManager>, blink_manager: ModelHandle<BlinkManager>,
show_local_selections: bool, show_local_selections: bool,
mode: EditorMode, mode: EditorMode,
replica_id_mapping: Option<HashMap<ReplicaId, ReplicaId>>,
show_gutter: bool, show_gutter: bool,
show_wrap_guides: Option<bool>, show_wrap_guides: Option<bool>,
placeholder_text: Option<Arc<str>>, placeholder_text: Option<Arc<str>>,
@ -1394,6 +1395,7 @@ impl Editor {
blink_manager: blink_manager.clone(), blink_manager: blink_manager.clone(),
show_local_selections: true, show_local_selections: true,
mode, mode,
replica_id_mapping: None,
show_gutter: mode == EditorMode::Full, show_gutter: mode == EditorMode::Full,
show_wrap_guides: None, show_wrap_guides: None,
placeholder_text: None, placeholder_text: None,
@ -1604,6 +1606,19 @@ impl Editor {
self.read_only = read_only; self.read_only = read_only;
} }
pub fn replica_id_map(&self) -> Option<&HashMap<ReplicaId, ReplicaId>> {
self.replica_id_mapping.as_ref()
}
pub fn set_replica_id_map(
&mut self,
mapping: Option<HashMap<ReplicaId, ReplicaId>>,
cx: &mut ViewContext<Self>,
) {
self.replica_id_mapping = mapping;
cx.notify();
}
fn selections_did_change( fn selections_did_change(
&mut self, &mut self,
local: bool, local: bool,

View File

@ -6384,7 +6384,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
.update(|cx| { .update(|cx| {
Editor::from_state_proto( Editor::from_state_proto(
pane.clone(), pane.clone(),
project.clone(), workspace.clone(),
ViewId { ViewId {
creator: Default::default(), creator: Default::default(),
id: 0, id: 0,
@ -6479,7 +6479,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
.update(|cx| { .update(|cx| {
Editor::from_state_proto( Editor::from_state_proto(
pane.clone(), pane.clone(),
project.clone(), workspace.clone(),
ViewId { ViewId {
creator: Default::default(), creator: Default::default(),
id: 0, id: 0,

View File

@ -62,6 +62,7 @@ struct SelectionLayout {
head: DisplayPoint, head: DisplayPoint,
cursor_shape: CursorShape, cursor_shape: CursorShape,
is_newest: bool, is_newest: bool,
is_local: bool,
range: Range<DisplayPoint>, range: Range<DisplayPoint>,
active_rows: Range<u32>, active_rows: Range<u32>,
} }
@ -73,6 +74,7 @@ impl SelectionLayout {
cursor_shape: CursorShape, cursor_shape: CursorShape,
map: &DisplaySnapshot, map: &DisplaySnapshot,
is_newest: bool, is_newest: bool,
is_local: bool,
) -> Self { ) -> Self {
let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot)); let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot));
let display_selection = point_selection.map(|p| p.to_display_point(map)); let display_selection = point_selection.map(|p| p.to_display_point(map));
@ -109,6 +111,7 @@ impl SelectionLayout {
head, head,
cursor_shape, cursor_shape,
is_newest, is_newest,
is_local,
range, range,
active_rows, active_rows,
} }
@ -763,7 +766,6 @@ impl EditorElement {
cx: &mut PaintContext<Editor>, cx: &mut PaintContext<Editor>,
) { ) {
let style = &self.style; let style = &self.style;
let local_replica_id = editor.replica_id(cx);
let scroll_position = layout.position_map.snapshot.scroll_position(); let scroll_position = layout.position_map.snapshot.scroll_position();
let start_row = layout.visible_display_row_range.start; let start_row = layout.visible_display_row_range.start;
let scroll_top = scroll_position.y() * layout.position_map.line_height; let scroll_top = scroll_position.y() * layout.position_map.line_height;
@ -852,15 +854,13 @@ impl EditorElement {
for (replica_id, selections) in &layout.selections { for (replica_id, selections) in &layout.selections {
let replica_id = *replica_id; let replica_id = *replica_id;
let selection_style = style.replica_selection_style(replica_id); let selection_style = if let Some(replica_id) = replica_id {
style.replica_selection_style(replica_id)
} else {
&style.absent_selection
};
for selection in selections { for selection in selections {
if !selection.range.is_empty()
&& (replica_id == local_replica_id
|| Some(replica_id) == editor.leader_replica_id)
{
invisible_display_ranges.push(selection.range.clone());
}
self.paint_highlighted_range( self.paint_highlighted_range(
scene, scene,
selection.range.clone(), selection.range.clone(),
@ -874,7 +874,10 @@ impl EditorElement {
bounds, bounds,
); );
if editor.show_local_cursors(cx) || replica_id != local_replica_id { if selection.is_local && !selection.range.is_empty() {
invisible_display_ranges.push(selection.range.clone());
}
if !selection.is_local || editor.show_local_cursors(cx) {
let cursor_position = selection.head; let cursor_position = selection.head;
if layout if layout
.visible_display_row_range .visible_display_row_range
@ -2124,7 +2127,7 @@ impl Element<Editor> for EditorElement {
.anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right)) .anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right))
}; };
let mut selections: Vec<(ReplicaId, Vec<SelectionLayout>)> = Vec::new(); let mut selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)> = Vec::new();
let mut active_rows = BTreeMap::new(); let mut active_rows = BTreeMap::new();
let mut fold_ranges = Vec::new(); let mut fold_ranges = Vec::new();
let is_singleton = editor.is_singleton(cx); let is_singleton = editor.is_singleton(cx);
@ -2155,8 +2158,14 @@ impl Element<Editor> for EditorElement {
.buffer_snapshot .buffer_snapshot
.remote_selections_in_range(&(start_anchor..end_anchor)) .remote_selections_in_range(&(start_anchor..end_anchor))
{ {
let replica_id = if let Some(mapping) = &editor.replica_id_mapping {
mapping.get(&replica_id).copied()
} else {
None
};
// The local selections match the leader's selections. // The local selections match the leader's selections.
if Some(replica_id) == editor.leader_replica_id { if replica_id.is_some() && replica_id == editor.leader_replica_id {
continue; continue;
} }
remote_selections remote_selections
@ -2168,6 +2177,7 @@ impl Element<Editor> for EditorElement {
cursor_shape, cursor_shape,
&snapshot.display_snapshot, &snapshot.display_snapshot,
false, false,
false,
)); ));
} }
selections.extend(remote_selections); selections.extend(remote_selections);
@ -2191,6 +2201,7 @@ impl Element<Editor> for EditorElement {
editor.cursor_shape, editor.cursor_shape,
&snapshot.display_snapshot, &snapshot.display_snapshot,
is_newest, is_newest,
true,
); );
if is_newest { if is_newest {
newest_selection_head = Some(layout.head); newest_selection_head = Some(layout.head);
@ -2206,11 +2217,18 @@ impl Element<Editor> for EditorElement {
} }
// Render the local selections in the leader's color when following. // Render the local selections in the leader's color when following.
let local_replica_id = editor let local_replica_id = if let Some(leader_replica_id) = editor.leader_replica_id {
.leader_replica_id leader_replica_id
.unwrap_or_else(|| editor.replica_id(cx)); } else {
let replica_id = editor.replica_id(cx);
if let Some(mapping) = &editor.replica_id_mapping {
mapping.get(&replica_id).copied().unwrap_or(replica_id)
} else {
replica_id
}
};
selections.push((local_replica_id, layouts)); selections.push((Some(local_replica_id), layouts));
} }
let scrollbar_settings = &settings::get::<EditorSettings>(cx).scrollbar; let scrollbar_settings = &settings::get::<EditorSettings>(cx).scrollbar;
@ -2591,7 +2609,7 @@ pub struct LayoutState {
blocks: Vec<BlockLayout>, blocks: Vec<BlockLayout>,
highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>, highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>,
fold_ranges: Vec<(BufferRow, Range<DisplayPoint>, Color)>, fold_ranges: Vec<(BufferRow, Range<DisplayPoint>, Color)>,
selections: Vec<(ReplicaId, Vec<SelectionLayout>)>, selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)>,
scrollbar_row_range: Range<f32>, scrollbar_row_range: Range<f32>,
show_scrollbars: bool, show_scrollbars: bool,
is_singleton: bool, is_singleton: bool,

View File

@ -49,11 +49,12 @@ impl FollowableItem for Editor {
fn from_state_proto( fn from_state_proto(
pane: ViewHandle<workspace::Pane>, pane: ViewHandle<workspace::Pane>,
project: ModelHandle<Project>, workspace: ViewHandle<Workspace>,
remote_id: ViewId, remote_id: ViewId,
state: &mut Option<proto::view::Variant>, state: &mut Option<proto::view::Variant>,
cx: &mut AppContext, cx: &mut AppContext,
) -> Option<Task<Result<ViewHandle<Self>>>> { ) -> Option<Task<Result<ViewHandle<Self>>>> {
let project = workspace.read(cx).project().to_owned();
let Some(proto::view::Variant::Editor(_)) = state else { return None }; let Some(proto::view::Variant::Editor(_)) = state else { return None };
let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() }; let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() };
@ -753,7 +754,7 @@ impl Item for Editor {
Some(Box::new(handle.clone())) Some(Box::new(handle.clone()))
} }
fn pixel_position_of_cursor(&self) -> Option<Vector2F> { fn pixel_position_of_cursor(&self, _: &AppContext) -> Option<Vector2F> {
self.pixel_position_of_newest_cursor self.pixel_position_of_newest_cursor
} }

View File

@ -4687,12 +4687,13 @@ impl AnyWeakModelHandle {
} }
} }
#[derive(Copy)]
pub struct WeakViewHandle<T> { pub struct WeakViewHandle<T> {
any_handle: AnyWeakViewHandle, any_handle: AnyWeakViewHandle,
view_type: PhantomData<T>, view_type: PhantomData<T>,
} }
impl<T> Copy for WeakViewHandle<T> {}
impl<T> Debug for WeakViewHandle<T> { impl<T> Debug for WeakViewHandle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct(&format!("WeakViewHandle<{}>", type_name::<T>())) f.debug_struct(&format!("WeakViewHandle<{}>", type_name::<T>()))

View File

@ -359,6 +359,14 @@ impl Buffer {
) )
} }
pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
Self::build(
TextBuffer::new(replica_id, remote_id, base_text),
None,
None,
)
}
pub fn from_proto( pub fn from_proto(
replica_id: ReplicaId, replica_id: ReplicaId,
message: proto::BufferState, message: proto::BufferState,

View File

@ -207,6 +207,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
} }
} }
// This behavior is currently copied in the collab database, for snapshotting channel notes
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> { pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
Ok( Ok(
match message match message

View File

@ -11,7 +11,7 @@ mod project_tests;
mod worktree_tests; mod worktree_tests;
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use client::{proto, Client, TypedEnvelope, UserStore}; use client::{proto, Client, TypedEnvelope, UserId, UserStore};
use clock::ReplicaId; use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet}; use collections::{hash_map, BTreeMap, HashMap, HashSet};
use copilot::Copilot; use copilot::Copilot;
@ -250,6 +250,7 @@ enum ProjectClientState {
pub struct Collaborator { pub struct Collaborator {
pub peer_id: proto::PeerId, pub peer_id: proto::PeerId,
pub replica_id: ReplicaId, pub replica_id: ReplicaId,
pub user_id: UserId,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
@ -281,6 +282,7 @@ pub enum Event {
old_peer_id: proto::PeerId, old_peer_id: proto::PeerId,
new_peer_id: proto::PeerId, new_peer_id: proto::PeerId,
}, },
CollaboratorJoined(proto::PeerId),
CollaboratorLeft(proto::PeerId), CollaboratorLeft(proto::PeerId),
RefreshInlayHints, RefreshInlayHints,
} }
@ -5930,6 +5932,7 @@ impl Project {
let collaborator = Collaborator::from_proto(collaborator)?; let collaborator = Collaborator::from_proto(collaborator)?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.shared_buffers.remove(&collaborator.peer_id); this.shared_buffers.remove(&collaborator.peer_id);
cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
this.collaborators this.collaborators
.insert(collaborator.peer_id, collaborator); .insert(collaborator.peer_id, collaborator);
cx.notify(); cx.notify();
@ -7756,6 +7759,7 @@ impl Collaborator {
Ok(Self { Ok(Self {
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?, peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
replica_id: message.replica_id as ReplicaId, replica_id: message.replica_id as ReplicaId,
user_id: message.user_id as UserId,
}) })
} }
} }

View File

@ -23,7 +23,7 @@ async-tungstenite = "0.16"
base64 = "0.13" base64 = "0.13"
futures.workspace = true futures.workspace = true
parking_lot.workspace = true parking_lot.workspace = true
prost = "0.8" prost.workspace = true
rand.workspace = true rand.workspace = true
rsa = "0.4" rsa = "0.4"
serde.workspace = true serde.workspace = true

View File

@ -142,6 +142,13 @@ message Envelope {
GetChannelMembersResponse get_channel_members_response = 128; GetChannelMembersResponse get_channel_members_response = 128;
SetChannelMemberAdmin set_channel_member_admin = 129; SetChannelMemberAdmin set_channel_member_admin = 129;
RenameChannel rename_channel = 130; RenameChannel rename_channel = 130;
JoinChannelBuffer join_channel_buffer = 131;
JoinChannelBufferResponse join_channel_buffer_response = 132;
UpdateChannelBuffer update_channel_buffer = 133;
LeaveChannelBuffer leave_channel_buffer = 134;
AddChannelBufferCollaborator add_channel_buffer_collaborator = 135;
RemoveChannelBufferCollaborator remove_channel_buffer_collaborator = 136;
} }
} }
@ -411,6 +418,16 @@ message RemoveProjectCollaborator {
PeerId peer_id = 2; PeerId peer_id = 2;
} }
message AddChannelBufferCollaborator {
uint64 channel_id = 1;
Collaborator collaborator = 2;
}
message RemoveChannelBufferCollaborator {
uint64 channel_id = 1;
PeerId peer_id = 2;
}
message GetDefinition { message GetDefinition {
uint64 project_id = 1; uint64 project_id = 1;
uint64 buffer_id = 2; uint64 buffer_id = 2;
@ -540,6 +557,11 @@ message UpdateBuffer {
repeated Operation operations = 3; repeated Operation operations = 3;
} }
message UpdateChannelBuffer {
uint64 channel_id = 1;
repeated Operation operations = 2;
}
message UpdateBufferFile { message UpdateBufferFile {
uint64 project_id = 1; uint64 project_id = 1;
uint64 buffer_id = 2; uint64 buffer_id = 2;
@ -948,6 +970,22 @@ message RenameChannel {
string name = 2; string name = 2;
} }
message JoinChannelBuffer {
uint64 channel_id = 1;
}
message JoinChannelBufferResponse {
uint64 buffer_id = 1;
uint32 replica_id = 2;
string base_text = 3;
repeated Operation operations = 4;
repeated Collaborator collaborators = 5;
}
message LeaveChannelBuffer {
uint64 channel_id = 1;
}
message RespondToChannelInvite { message RespondToChannelInvite {
uint64 channel_id = 1; uint64 channel_id = 1;
bool accept = 2; bool accept = 2;
@ -1082,6 +1120,7 @@ message View {
oneof variant { oneof variant {
Editor editor = 3; Editor editor = 3;
ChannelView channel_view = 4;
} }
message Editor { message Editor {
@ -1094,6 +1133,11 @@ message View {
float scroll_x = 7; float scroll_x = 7;
float scroll_y = 8; float scroll_y = 8;
} }
message ChannelView {
uint64 channel_id = 1;
Editor editor = 2;
}
} }
message Collaborator { message Collaborator {
@ -1144,7 +1188,6 @@ enum GitStatus {
Conflict = 2; Conflict = 2;
} }
message BufferState { message BufferState {
uint64 id = 1; uint64 id = 1;
optional File file = 2; optional File file = 2;

View File

@ -248,7 +248,13 @@ messages!(
(GetPrivateUserInfo, Foreground), (GetPrivateUserInfo, Foreground),
(GetPrivateUserInfoResponse, Foreground), (GetPrivateUserInfoResponse, Foreground),
(GetChannelMembers, Foreground), (GetChannelMembers, Foreground),
(GetChannelMembersResponse, Foreground) (GetChannelMembersResponse, Foreground),
(JoinChannelBuffer, Foreground),
(JoinChannelBufferResponse, Foreground),
(LeaveChannelBuffer, Background),
(UpdateChannelBuffer, Foreground),
(RemoveChannelBufferCollaborator, Foreground),
(AddChannelBufferCollaborator, Foreground),
); );
request_messages!( request_messages!(
@ -315,6 +321,8 @@ request_messages!(
(UpdateParticipantLocation, Ack), (UpdateParticipantLocation, Ack),
(UpdateProject, Ack), (UpdateProject, Ack),
(UpdateWorktree, Ack), (UpdateWorktree, Ack),
(JoinChannelBuffer, JoinChannelBufferResponse),
(LeaveChannelBuffer, Ack)
); );
entity_messages!( entity_messages!(
@ -370,6 +378,13 @@ entity_messages!(
UpdateDiffBase UpdateDiffBase
); );
entity_messages!(
channel_id,
UpdateChannelBuffer,
RemoveChannelBufferCollaborator,
AddChannelBufferCollaborator
);
const KIB: usize = 1024; const KIB: usize = 1024;
const MIB: usize = KIB * 1024; const MIB: usize = KIB * 1024;
const MAX_BUFFER_LEN: usize = MIB; const MAX_BUFFER_LEN: usize = MIB;

View File

@ -2,7 +2,7 @@ use std::{cmp::Ordering, fmt::Debug};
use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary}; use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary};
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]
pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>) pub struct TreeMap<K, V>(SumTree<MapEntry<K, V>>)
where where
K: Clone + Debug + Default + Ord, K: Clone + Debug + Default + Ord,
@ -162,6 +162,16 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
} }
} }
impl<K: Debug, V: Debug> Debug for TreeMap<K, V>
where
K: Clone + Debug + Default + Ord,
V: Clone + Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
#[derive(Debug)] #[derive(Debug)]
struct MapSeekTargetAdaptor<'a, T>(&'a T); struct MapSeekTargetAdaptor<'a, T>(&'a T);

View File

@ -12,7 +12,7 @@ mod undo_map;
pub use anchor::*; pub use anchor::*;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use clock::ReplicaId; pub use clock::ReplicaId;
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use fs::LineEnding; use fs::LineEnding;
use locator::Locator; use locator::Locator;

View File

@ -756,6 +756,7 @@ pub struct Editor {
pub line_number: Color, pub line_number: Color,
pub line_number_active: Color, pub line_number_active: Color,
pub guest_selections: Vec<SelectionStyle>, pub guest_selections: Vec<SelectionStyle>,
pub absent_selection: SelectionStyle,
pub syntax: Arc<SyntaxTheme>, pub syntax: Arc<SyntaxTheme>,
pub hint: HighlightStyle, pub hint: HighlightStyle,
pub suggestion: HighlightStyle, pub suggestion: HighlightStyle,

View File

@ -391,7 +391,7 @@ mod test {
the lazy dog" the lazy dog"
}) })
.await; .await;
let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor()); let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx));
// entering visual mode should select the character // entering visual mode should select the character
// under cursor // under cursor
@ -400,7 +400,7 @@ mod test {
fox jumps over fox jumps over
the lazy dog"}) the lazy dog"})
.await; .await;
cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor())); cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx)));
// forwards motions should extend the selection // forwards motions should extend the selection
cx.simulate_shared_keystrokes(["w", "j"]).await; cx.simulate_shared_keystrokes(["w", "j"]).await;
@ -430,7 +430,7 @@ mod test {
b b
"}) "})
.await; .await;
let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor()); let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx));
cx.simulate_shared_keystrokes(["v"]).await; cx.simulate_shared_keystrokes(["v"]).await;
cx.assert_shared_state(indoc! {" cx.assert_shared_state(indoc! {"
a a
@ -438,7 +438,7 @@ mod test {
ˇ»b ˇ»b
"}) "})
.await; .await;
cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor())); cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx)));
// toggles off again // toggles off again
cx.simulate_shared_keystrokes(["v"]).await; cx.simulate_shared_keystrokes(["v"]).await;
@ -510,7 +510,7 @@ mod test {
b b
ˇ"}) ˇ"})
.await; .await;
let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor()); let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx));
cx.simulate_shared_keystrokes(["shift-v"]).await; cx.simulate_shared_keystrokes(["shift-v"]).await;
cx.assert_shared_state(indoc! {" cx.assert_shared_state(indoc! {"
a a
@ -518,7 +518,7 @@ mod test {
ˇ"}) ˇ"})
.await; .await;
assert_eq!(cx.mode(), cx.neovim_mode().await); assert_eq!(cx.mode(), cx.neovim_mode().await);
cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor())); cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx)));
cx.simulate_shared_keystrokes(["x"]).await; cx.simulate_shared_keystrokes(["x"]).await;
cx.assert_shared_state(indoc! {" cx.assert_shared_state(indoc! {"
a a

View File

@ -22,6 +22,7 @@ test-support = [
db = { path = "../db" } db = { path = "../db" }
call = { path = "../call" } call = { path = "../call" }
client = { path = "../client" } client = { path = "../client" }
channel = { path = "../channel" }
collections = { path = "../collections" } collections = { path = "../collections" }
context_menu = { path = "../context_menu" } context_menu = { path = "../context_menu" }
drag_and_drop = { path = "../drag_and_drop" } drag_and_drop = { path = "../drag_and_drop" }

View File

@ -158,9 +158,7 @@ pub trait Item: View {
fn should_update_tab_on_event(_: &Self::Event) -> bool { fn should_update_tab_on_event(_: &Self::Event) -> bool {
false false
} }
fn is_edit_event(_: &Self::Event) -> bool {
false
}
fn act_as_type<'a>( fn act_as_type<'a>(
&'a self, &'a self,
type_id: TypeId, type_id: TypeId,
@ -205,7 +203,7 @@ pub trait Item: View {
fn show_toolbar(&self) -> bool { fn show_toolbar(&self) -> bool {
true true
} }
fn pixel_position_of_cursor(&self) -> Option<Vector2F> { fn pixel_position_of_cursor(&self, _: &AppContext) -> Option<Vector2F> {
None None
} }
} }
@ -623,7 +621,7 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
} }
fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> { fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
self.read(cx).pixel_position_of_cursor() self.read(cx).pixel_position_of_cursor(cx)
} }
} }
@ -674,7 +672,7 @@ pub trait FollowableItem: Item {
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant>; fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant>;
fn from_state_proto( fn from_state_proto(
pane: ViewHandle<Pane>, pane: ViewHandle<Pane>,
project: ModelHandle<Project>, project: ViewHandle<Workspace>,
id: ViewId, id: ViewId,
state: &mut Option<proto::view::Variant>, state: &mut Option<proto::view::Variant>,
cx: &mut AppContext, cx: &mut AppContext,

View File

@ -12,9 +12,10 @@ mod workspace_settings;
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use call::ActiveCall; use call::ActiveCall;
use channel::ChannelStore;
use client::{ use client::{
proto::{self, PeerId}, proto::{self, PeerId},
ChannelStore, Client, TypedEnvelope, UserStore, Client, TypedEnvelope, UserStore,
}; };
use collections::{hash_map, HashMap, HashSet}; use collections::{hash_map, HashMap, HashSet};
use drag_and_drop::DragAndDrop; use drag_and_drop::DragAndDrop;
@ -344,7 +345,7 @@ pub fn register_project_item<I: ProjectItem>(cx: &mut AppContext) {
type FollowableItemBuilder = fn( type FollowableItemBuilder = fn(
ViewHandle<Pane>, ViewHandle<Pane>,
ModelHandle<Project>, ViewHandle<Workspace>,
ViewId, ViewId,
&mut Option<proto::view::Variant>, &mut Option<proto::view::Variant>,
&mut AppContext, &mut AppContext,
@ -361,8 +362,8 @@ pub fn register_followable_item<I: FollowableItem>(cx: &mut AppContext) {
builders.insert( builders.insert(
TypeId::of::<I>(), TypeId::of::<I>(),
( (
|pane, project, id, state, cx| { |pane, workspace, id, state, cx| {
I::from_state_proto(pane, project, id, state, cx).map(|task| { I::from_state_proto(pane, workspace, id, state, cx).map(|task| {
cx.foreground() cx.foreground()
.spawn(async move { Ok(Box::new(task.await?) as Box<_>) }) .spawn(async move { Ok(Box::new(task.await?) as Box<_>) })
}) })
@ -2847,7 +2848,13 @@ impl Workspace {
views: Vec<proto::View>, views: Vec<proto::View>,
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<()> { ) -> Result<()> {
let project = this.read_with(cx, |this, _| this.project.clone())?; let this = this
.upgrade(cx)
.ok_or_else(|| anyhow!("workspace dropped"))?;
let project = this
.read_with(cx, |this, _| this.project.clone())
.ok_or_else(|| anyhow!("window dropped"))?;
let replica_id = project let replica_id = project
.read_with(cx, |project, _| { .read_with(cx, |project, _| {
project project
@ -2873,12 +2880,11 @@ impl Workspace {
let id = ViewId::from_proto(id.clone())?; let id = ViewId::from_proto(id.clone())?;
let mut variant = view.variant.clone(); let mut variant = view.variant.clone();
if variant.is_none() { if variant.is_none() {
Err(anyhow!("missing variant"))?; Err(anyhow!("missing view variant"))?;
} }
for build_item in &item_builders { for build_item in &item_builders {
let task = cx.update(|cx| { let task = cx
build_item(pane.clone(), project.clone(), id, &mut variant, cx) .update(|cx| build_item(pane.clone(), this.clone(), id, &mut variant, cx));
});
if let Some(task) = task { if let Some(task) = task {
item_tasks.push(task); item_tasks.push(task);
leader_view_ids.push(id); leader_view_ids.push(id);
@ -2906,7 +2912,7 @@ impl Workspace {
} }
Some(()) Some(())
})?; });
} }
Ok(()) Ok(())
} }

View File

@ -21,6 +21,7 @@ activity_indicator = { path = "../activity_indicator" }
auto_update = { path = "../auto_update" } auto_update = { path = "../auto_update" }
breadcrumbs = { path = "../breadcrumbs" } breadcrumbs = { path = "../breadcrumbs" }
call = { path = "../call" } call = { path = "../call" }
channel = { path = "../channel" }
cli = { path = "../cli" } cli = { path = "../cli" }
collab_ui = { path = "../collab_ui" } collab_ui = { path = "../collab_ui" }
collections = { path = "../collections" } collections = { path = "../collections" }

View File

@ -3,13 +3,12 @@
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use backtrace::Backtrace; use backtrace::Backtrace;
use channel::ChannelStore;
use cli::{ use cli::{
ipc::{self, IpcSender}, ipc::{self, IpcSender},
CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME, CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME,
}; };
use client::{ use client::{self, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN};
self, ChannelStore, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN,
};
use db::kvp::KEY_VALUE_STORE; use db::kvp::KEY_VALUE_STORE;
use editor::{scroll::autoscroll::Autoscroll, Editor}; use editor::{scroll::autoscroll::Autoscroll, Editor};
use futures::{ use futures::{
@ -159,6 +158,7 @@ fn main() {
outline::init(cx); outline::init(cx);
project_symbols::init(cx); project_symbols::init(cx);
project_panel::init(Assets, cx); project_panel::init(Assets, cx);
channel::init(&client);
diagnostics::init(cx); diagnostics::init(cx);
search::init(cx); search::init(cx);
semantic_index::init(fs.clone(), http.clone(), languages.clone(), cx); semantic_index::init(fs.clone(), http.clone(), languages.clone(), cx);

View File

@ -184,6 +184,7 @@ export default function editor(): any {
theme.players[6], theme.players[6],
theme.players[7], theme.players[7],
], ],
absent_selection: theme.players[7],
autocomplete: { autocomplete: {
background: background(theme.middle), background: background(theme.middle),
corner_radius: 8, corner_radius: 8,