Handle unshared projects when rejoining a room

Also, construct remote projects via the room, to guarantee
that the room can manage the projects' sharing lifecycle.

Co-authored-by: Antonio Scandurra <antonio@zed.dev>
This commit is contained in:
Max Brunsfeld 2022-12-20 11:10:46 -08:00
parent 9d15b3d295
commit 55ebfe8321
7 changed files with 120 additions and 48 deletions

2
Cargo.lock generated
View File

@ -820,8 +820,10 @@ dependencies = [
"async-broadcast", "async-broadcast",
"client", "client",
"collections", "collections",
"fs",
"futures 0.3.25", "futures 0.3.25",
"gpui", "gpui",
"language",
"live_kit_client", "live_kit_client",
"log", "log",
"media", "media",

View File

@ -23,6 +23,8 @@ collections = { path = "../collections" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }
log = "0.4" log = "0.4"
live_kit_client = { path = "../live_kit_client" } live_kit_client = { path = "../live_kit_client" }
fs = { path = "../fs" }
language = { path = "../language" }
media = { path = "../media" } media = { path = "../media" }
project = { path = "../project" } project = { path = "../project" }
util = { path = "../util" } util = { path = "../util" }
@ -34,6 +36,8 @@ postage = { version = "0.4.1", features = ["futures-traits"] }
[dev-dependencies] [dev-dependencies]
client = { path = "../client", features = ["test-support"] } client = { path = "../client", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] } collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] }
live_kit_client = { path = "../live_kit_client", features = ["test-support"] } live_kit_client = { path = "../live_kit_client", features = ["test-support"] }

View File

@ -8,10 +8,12 @@ use client::{
Client, TypedEnvelope, User, UserStore, Client, TypedEnvelope, User, UserStore,
}; };
use collections::{BTreeMap, HashMap, HashSet}; use collections::{BTreeMap, HashMap, HashSet};
use fs::Fs;
use futures::{FutureExt, StreamExt}; use futures::{FutureExt, StreamExt};
use gpui::{ use gpui::{
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
}; };
use language::LanguageRegistry;
use live_kit_client::{LocalTrackPublication, LocalVideoTrack, RemoteVideoTrackUpdate}; use live_kit_client::{LocalTrackPublication, LocalVideoTrack, RemoteVideoTrackUpdate};
use postage::stream::Stream; use postage::stream::Stream;
use project::Project; use project::Project;
@ -523,6 +525,20 @@ impl Room {
} }
for unshared_project_id in old_projects.difference(&new_projects) { for unshared_project_id in old_projects.difference(&new_projects) {
this.joined_projects.retain(|project| {
if let Some(project) = project.upgrade(cx) {
project.update(cx, |project, cx| {
if project.remote_id() == Some(*unshared_project_id) {
project.disconnected_from_host(cx);
false
} else {
true
}
})
} else {
false
}
});
cx.emit(Event::RemoteProjectUnshared { cx.emit(Event::RemoteProjectUnshared {
project_id: *unshared_project_id, project_id: *unshared_project_id,
}); });
@ -699,15 +715,30 @@ impl Room {
}) })
} }
pub fn joined_project(&mut self, project: ModelHandle<Project>, cx: &mut ModelContext<Self>) { pub fn join_project(
self.joined_projects.retain(|project| { &mut self,
if let Some(project) = project.upgrade(cx) { id: u64,
!project.read(cx).is_read_only() language_registry: Arc<LanguageRegistry>,
} else { fs: Arc<dyn Fs>,
false cx: &mut ModelContext<Self>,
} ) -> Task<Result<ModelHandle<Project>>> {
}); let client = self.client.clone();
self.joined_projects.insert(project.downgrade()); let user_store = self.user_store.clone();
cx.spawn(|this, mut cx| async move {
let project =
Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
this.update(&mut cx, |this, cx| {
this.joined_projects.retain(|project| {
if let Some(project) = project.upgrade(cx) {
!project.read(cx).is_read_only()
} else {
false
}
});
this.joined_projects.insert(project.downgrade());
});
Ok(project)
})
} }
pub(crate) fn share_project( pub(crate) fn share_project(

View File

@ -1440,9 +1440,20 @@ impl Database {
}); });
} }
// TODO: handle unshared projects project::Entity::delete_many()
// TODO: handle left projects .filter(
Condition::all()
.add(project::Column::RoomId.eq(room_id))
.add(project::Column::HostUserId.eq(user_id))
.add(
project::Column::Id
.is_not_in(reshared_projects.iter().map(|project| project.id)),
),
)
.exec(&*tx)
.await?;
// TODO: handle left projects
let room = self.get_room(room_id, &tx).await?; let room = self.get_room(room_id, &tx).await?;
Ok(( Ok((
room_id, room_id,
@ -2971,6 +2982,7 @@ impl ProjectCollaborator {
} }
} }
#[derive(Debug)]
pub struct LeftProject { pub struct LeftProject {
pub id: ProjectId, pub id: ProjectId,
pub host_user_id: UserId, pub host_user_id: UserId,

View File

@ -1324,7 +1324,7 @@ async fn test_host_reconnect(
client_a client_a
.fs .fs
.insert_tree( .insert_tree(
"/root", "/root-1",
json!({ json!({
"dir1": { "dir1": {
"a.txt": "a-contents", "a.txt": "a-contents",
@ -1343,17 +1343,32 @@ async fn test_host_reconnect(
}), }),
) )
.await; .await;
client_a
.fs
.insert_tree(
"/root-2",
json!({
"1.txt": "1-contents",
}),
)
.await;
let active_call_a = cx_a.read(ActiveCall::global); let active_call_a = cx_a.read(ActiveCall::global);
let (project_a, _) = client_a.build_local_project("/root/dir1", cx_a).await; let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await;
let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await;
let worktree_a1 = let worktree_a1 =
project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap()); project_a1.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
let project_id = active_call_a let project1_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) .update(cx_a, |call, cx| call.share_project(project_a1.clone(), cx))
.await
.unwrap();
let project2_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a2.clone(), cx))
.await .await
.unwrap(); .unwrap();
let project_b = client_b.build_remote_project(project_id, cx_b).await; let project_b1 = client_b.build_remote_project(project1_id, cx_b).await;
let project_b2 = client_b.build_remote_project(project2_id, cx_b).await;
deterministic.run_until_parked(); deterministic.run_until_parked();
let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| { let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| {
@ -1365,11 +1380,11 @@ async fn test_host_reconnect(
server.forbid_connections(); server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap()); server.disconnect_client(client_a.peer_id().unwrap());
deterministic.advance_clock(RECEIVE_TIMEOUT); deterministic.advance_clock(RECEIVE_TIMEOUT);
project_a.read_with(cx_a, |project, _| { project_a1.read_with(cx_a, |project, _| {
assert!(project.is_shared()); assert!(project.is_shared());
assert_eq!(project.collaborators().len(), 1); assert_eq!(project.collaborators().len(), 1);
}); });
project_b.read_with(cx_b, |project, _| { project_b1.read_with(cx_b, |project, _| {
assert!(!project.is_read_only()); assert!(!project.is_read_only());
assert_eq!(project.collaborators().len(), 1); assert_eq!(project.collaborators().len(), 1);
}); });
@ -1377,11 +1392,11 @@ async fn test_host_reconnect(
assert!(tree.as_local().unwrap().is_shared()) assert!(tree.as_local().unwrap().is_shared())
}); });
// While disconnected, add/remove files and worktrees from client A's project. // While disconnected, add and remove files from client A's project.
client_a client_a
.fs .fs
.insert_tree( .insert_tree(
"/root/dir1/subdir2", "/root-1/dir1/subdir2",
json!({ json!({
"f.txt": "f-contents", "f.txt": "f-contents",
"g.txt": "g-contents", "g.txt": "g-contents",
@ -1393,7 +1408,7 @@ async fn test_host_reconnect(
client_a client_a
.fs .fs
.remove_dir( .remove_dir(
"/root/dir1/subdir1".as_ref(), "/root-1/dir1/subdir1".as_ref(),
RemoveOptions { RemoveOptions {
recursive: true, recursive: true,
..Default::default() ..Default::default()
@ -1401,9 +1416,11 @@ async fn test_host_reconnect(
) )
.await .await
.unwrap(); .unwrap();
let (worktree_a2, _) = project_a
// While disconnected, add a worktree to client A's project.
let (worktree_a2, _) = project_a1
.update(cx_a, |p, cx| { .update(cx_a, |p, cx| {
p.find_or_create_local_worktree("/root/dir2", true, cx) p.find_or_create_local_worktree("/root-1/dir2", true, cx)
}) })
.await .await
.unwrap(); .unwrap();
@ -1416,6 +1433,9 @@ async fn test_host_reconnect(
}); });
deterministic.run_until_parked(); deterministic.run_until_parked();
// While disconnected, close project 2
cx_a.update(|_| drop(project_a2));
// Client A reconnects. Their project is re-shared, and client B re-joins it. // Client A reconnects. Their project is re-shared, and client B re-joins it.
server.allow_connections(); server.allow_connections();
client_a client_a
@ -1423,7 +1443,7 @@ async fn test_host_reconnect(
.await .await
.unwrap(); .unwrap();
deterministic.run_until_parked(); deterministic.run_until_parked();
project_a.read_with(cx_a, |project, cx| { project_a1.read_with(cx_a, |project, cx| {
assert!(project.is_shared()); assert!(project.is_shared());
assert_eq!( assert_eq!(
worktree_a1 worktree_a1
@ -1456,7 +1476,7 @@ async fn test_host_reconnect(
vec!["x", "y", "z"] vec!["x", "y", "z"]
); );
}); });
project_b.read_with(cx_b, |project, cx| { project_b1.read_with(cx_b, |project, cx| {
assert!(!project.is_read_only()); assert!(!project.is_read_only());
assert_eq!( assert_eq!(
project project
@ -1493,6 +1513,7 @@ async fn test_host_reconnect(
vec!["x", "y", "z"] vec!["x", "y", "z"]
); );
}); });
project_b2.read_with(cx_b, |project, _| assert!(project.is_read_only()));
} }
#[gpui::test(iterations = 10)] #[gpui::test(iterations = 10)]
@ -6930,17 +6951,18 @@ impl TestClient {
host_project_id: u64, host_project_id: u64,
guest_cx: &mut TestAppContext, guest_cx: &mut TestAppContext,
) -> ModelHandle<Project> { ) -> ModelHandle<Project> {
let project_b = guest_cx.spawn(|cx| { let active_call = guest_cx.read(ActiveCall::global);
Project::remote( let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
room.update(guest_cx, |room, cx| {
room.join_project(
host_project_id, host_project_id,
self.client.clone(),
self.user_store.clone(),
self.language_registry.clone(), self.language_registry.clone(),
FakeFs::new(cx.background()), self.fs.clone(),
cx, cx,
) )
}); })
project_b.await.unwrap() .await
.unwrap()
} }
fn build_workspace( fn build_workspace(

View File

@ -7,10 +7,10 @@ mod incoming_call_notification;
mod notifications; mod notifications;
mod project_shared_notification; mod project_shared_notification;
use anyhow::anyhow;
use call::ActiveCall; use call::ActiveCall;
pub use collab_titlebar_item::{CollabTitlebarItem, ToggleCollaborationMenu}; pub use collab_titlebar_item::{CollabTitlebarItem, ToggleCollaborationMenu};
use gpui::MutableAppContext; use gpui::MutableAppContext;
use project::Project;
use std::sync::Arc; use std::sync::Arc;
use workspace::{AppState, JoinProject, ToggleFollow, Workspace}; use workspace::{AppState, JoinProject, ToggleFollow, Workspace};
@ -39,15 +39,20 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
let workspace = if let Some(existing_workspace) = existing_workspace { let workspace = if let Some(existing_workspace) = existing_workspace {
existing_workspace existing_workspace
} else { } else {
let project = Project::remote( let active_call = cx.read(ActiveCall::global);
project_id, let room = active_call
app_state.client.clone(), .read_with(&cx, |call, _| call.room().cloned())
app_state.user_store.clone(), .ok_or_else(|| anyhow!("not in a call"))?;
app_state.languages.clone(), let project = room
app_state.fs.clone(), .update(&mut cx, |room, cx| {
cx.clone(), room.join_project(
) project_id,
.await?; app_state.languages.clone(),
app_state.fs.clone(),
cx,
)
})
.await?;
let (_, workspace) = cx.add_window((app_state.build_window_options)(), |cx| { let (_, workspace) = cx.add_window((app_state.build_window_options)(), |cx| {
let mut workspace = Workspace::new( let mut workspace = Workspace::new(
@ -68,10 +73,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
workspace.update(&mut cx, |workspace, cx| { workspace.update(&mut cx, |workspace, cx| {
if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() { if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
room.update(cx, |room, cx| {
room.joined_project(workspace.project().clone(), cx);
});
let follow_peer_id = room let follow_peer_id = room
.read(cx) .read(cx)
.remote_participants() .remote_participants()

View File

@ -1146,7 +1146,7 @@ impl Project {
} }
} }
fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) { pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
if let Some(ProjectClientState::Remote { if let Some(ProjectClientState::Remote {
sharing_has_stopped, sharing_has_stopped,
.. ..