mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-26 19:05:08 +03:00
Added center group deserialization
This commit is contained in:
parent
75d3d46b1b
commit
6530658c3e
16
Cargo.lock
generated
16
Cargo.lock
generated
@ -327,6 +327,17 @@ dependencies = [
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-recursion"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2cda8f4bcc10624c4e85bc66b3f452cca98cfa5ca002dc83a16aad2367641bea"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-stream"
|
||||
version = "0.3.3"
|
||||
@ -943,7 +954,7 @@ name = "client"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-recursion",
|
||||
"async-recursion 0.3.2",
|
||||
"async-tungstenite",
|
||||
"collections",
|
||||
"db",
|
||||
@ -7624,6 +7635,7 @@ name = "workspace"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-recursion 1.0.0",
|
||||
"bincode",
|
||||
"call",
|
||||
"client",
|
||||
@ -7697,7 +7709,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"async-compression",
|
||||
"async-recursion",
|
||||
"async-recursion 0.3.2",
|
||||
"async-trait",
|
||||
"auto_update",
|
||||
"backtrace",
|
||||
|
@ -1,6 +1,6 @@
|
||||
pub mod kvp;
|
||||
|
||||
use std::fs::create_dir_all;
|
||||
use std::fs::{create_dir_all, remove_dir_all};
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@ -10,7 +10,7 @@ use indoc::indoc;
|
||||
use sqlez::connection::Connection;
|
||||
use sqlez::domain::{Domain, Migrator};
|
||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
||||
use util::channel::RELEASE_CHANNEL_NAME;
|
||||
use util::channel::{ReleaseChannel, RELEASE_CHANNEL, RELEASE_CHANNEL_NAME};
|
||||
use util::paths::DB_DIR;
|
||||
|
||||
const INITIALIZE_QUERY: &'static str = indoc! {"
|
||||
@ -26,18 +26,18 @@ pub fn open_file_db<M: Migrator>() -> ThreadSafeConnection<M> {
|
||||
// Use 0 for now. Will implement incrementing and clearing of old db files soon TM
|
||||
let current_db_dir = (*DB_DIR).join(Path::new(&format!("0-{}", *RELEASE_CHANNEL_NAME)));
|
||||
|
||||
// if *RELEASE_CHANNEL == ReleaseChannel::Dev {
|
||||
// remove_dir_all(¤t_db_dir).ok();
|
||||
// }
|
||||
if *RELEASE_CHANNEL == ReleaseChannel::Dev && std::env::var("WIPE_DB").is_ok() {
|
||||
remove_dir_all(¤t_db_dir).ok();
|
||||
}
|
||||
|
||||
create_dir_all(¤t_db_dir).expect("Should be able to create the database directory");
|
||||
let db_path = current_db_dir.join(Path::new("db.sqlite"));
|
||||
|
||||
ThreadSafeConnection::new(db_path.to_string_lossy().as_ref(), true)
|
||||
ThreadSafeConnection::new(Some(db_path.to_string_lossy().as_ref()), true)
|
||||
.with_initialize_query(INITIALIZE_QUERY)
|
||||
}
|
||||
|
||||
pub fn open_memory_db<D: Domain>(db_name: &str) -> ThreadSafeConnection<D> {
|
||||
pub fn open_memory_db<M: Migrator>(db_name: Option<&str>) -> ThreadSafeConnection<M> {
|
||||
ThreadSafeConnection::new(db_name, false).with_initialize_query(INITIALIZE_QUERY)
|
||||
}
|
||||
|
||||
@ -65,7 +65,11 @@ macro_rules! connection {
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref $id: $t = $t(::db::open_file_db());
|
||||
pub static ref $id: $t = $t(if cfg!(any(test, feature = "test-support")) {
|
||||
::db::open_memory_db(None)
|
||||
} else {
|
||||
::db::open_file_db()
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_kvp() -> Result<()> {
|
||||
let db = KeyValueStore(crate::open_memory_db("test_kvp"));
|
||||
let db = KeyValueStore(crate::open_memory_db(Some("test_kvp")));
|
||||
|
||||
assert_eq!(db.read_kvp("key-1").unwrap(), None);
|
||||
|
||||
|
@ -554,7 +554,8 @@ impl Item for Editor {
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("Editor")
|
||||
// TODO: Some("Editor")
|
||||
None
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
|
@ -42,11 +42,16 @@ impl Connection {
|
||||
/// Attempts to open the database at uri. If it fails, a shared memory db will be opened
|
||||
/// instead.
|
||||
pub fn open_file(uri: &str) -> Self {
|
||||
Self::open(uri, true).unwrap_or_else(|_| Self::open_memory(uri))
|
||||
Self::open(uri, true).unwrap_or_else(|_| Self::open_memory(Some(uri)))
|
||||
}
|
||||
|
||||
pub fn open_memory(uri: &str) -> Self {
|
||||
let in_memory_path = format!("file:{}?mode=memory&cache=shared", uri);
|
||||
pub fn open_memory(uri: Option<&str>) -> Self {
|
||||
let in_memory_path = if let Some(uri) = uri {
|
||||
format!("file:{}?mode=memory&cache=shared", uri)
|
||||
} else {
|
||||
":memory:".to_string()
|
||||
};
|
||||
|
||||
Self::open(&in_memory_path, false).expect("Could not create fallback in memory db")
|
||||
}
|
||||
|
||||
@ -110,7 +115,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn string_round_trips() -> Result<()> {
|
||||
let connection = Connection::open_memory("string_round_trips");
|
||||
let connection = Connection::open_memory(Some("string_round_trips"));
|
||||
connection
|
||||
.exec(indoc! {"
|
||||
CREATE TABLE text (
|
||||
@ -136,7 +141,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn tuple_round_trips() {
|
||||
let connection = Connection::open_memory("tuple_round_trips");
|
||||
let connection = Connection::open_memory(Some("tuple_round_trips"));
|
||||
connection
|
||||
.exec(indoc! {"
|
||||
CREATE TABLE test (
|
||||
@ -170,7 +175,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn bool_round_trips() {
|
||||
let connection = Connection::open_memory("bool_round_trips");
|
||||
let connection = Connection::open_memory(Some("bool_round_trips"));
|
||||
connection
|
||||
.exec(indoc! {"
|
||||
CREATE TABLE bools (
|
||||
@ -196,7 +201,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn backup_works() {
|
||||
let connection1 = Connection::open_memory("backup_works");
|
||||
let connection1 = Connection::open_memory(Some("backup_works"));
|
||||
connection1
|
||||
.exec(indoc! {"
|
||||
CREATE TABLE blobs (
|
||||
@ -211,7 +216,7 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
// Backup connection1 to connection2
|
||||
let connection2 = Connection::open_memory("backup_works_other");
|
||||
let connection2 = Connection::open_memory(Some("backup_works_other"));
|
||||
connection1.backup_main(&connection2).unwrap();
|
||||
|
||||
// Delete the added blob and verify its deleted on the other side
|
||||
@ -224,7 +229,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn multi_step_statement_works() {
|
||||
let connection = Connection::open_memory("multi_step_statement_works");
|
||||
let connection = Connection::open_memory(Some("multi_step_statement_works"));
|
||||
|
||||
connection
|
||||
.exec(indoc! {"
|
||||
|
@ -62,7 +62,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_migrations_are_added_to_table() {
|
||||
let connection = Connection::open_memory("migrations_are_added_to_table");
|
||||
let connection = Connection::open_memory(Some("migrations_are_added_to_table"));
|
||||
|
||||
// Create first migration with a single step and run it
|
||||
connection
|
||||
@ -131,7 +131,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_migration_setup_works() {
|
||||
let connection = Connection::open_memory("migration_setup_works");
|
||||
let connection = Connection::open_memory(Some("migration_setup_works"));
|
||||
|
||||
connection
|
||||
.exec(indoc! {"
|
||||
@ -163,7 +163,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn migrations_dont_rerun() {
|
||||
let connection = Connection::open_memory("migrations_dont_rerun");
|
||||
let connection = Connection::open_memory(Some("migrations_dont_rerun"));
|
||||
|
||||
// Create migration which clears a tabl
|
||||
|
||||
@ -222,7 +222,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn changed_migration_fails() {
|
||||
let connection = Connection::open_memory("changed_migration_fails");
|
||||
let connection = Connection::open_memory(Some("changed_migration_fails"));
|
||||
|
||||
// Create a migration with two steps and run it
|
||||
connection
|
||||
|
@ -59,7 +59,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_nested_savepoints() -> Result<()> {
|
||||
let connection = Connection::open_memory("nested_savepoints");
|
||||
let connection = Connection::open_memory(Some("nested_savepoints"));
|
||||
|
||||
connection
|
||||
.exec(indoc! {"
|
||||
|
@ -352,7 +352,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn blob_round_trips() {
|
||||
let connection1 = Connection::open_memory("blob_round_trips");
|
||||
let connection1 = Connection::open_memory(Some("blob_round_trips"));
|
||||
connection1
|
||||
.exec(indoc! {"
|
||||
CREATE TABLE blobs (
|
||||
@ -369,7 +369,7 @@ mod test {
|
||||
assert_eq!(write.step().unwrap(), StepResult::Done);
|
||||
|
||||
// Read the blob from the
|
||||
let connection2 = Connection::open_memory("blob_round_trips");
|
||||
let connection2 = Connection::open_memory(Some("blob_round_trips"));
|
||||
let mut read = Statement::prepare(&connection2, "SELECT * FROM blobs").unwrap();
|
||||
assert_eq!(read.step().unwrap(), StepResult::Row);
|
||||
assert_eq!(read.column_blob(0).unwrap(), blob);
|
||||
@ -383,7 +383,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
pub fn maybe_returns_options() {
|
||||
let connection = Connection::open_memory("maybe_returns_options");
|
||||
let connection = Connection::open_memory(Some("maybe_returns_options"));
|
||||
connection
|
||||
.exec(indoc! {"
|
||||
CREATE TABLE texts (
|
||||
|
@ -9,7 +9,7 @@ use crate::{
|
||||
};
|
||||
|
||||
pub struct ThreadSafeConnection<M: Migrator> {
|
||||
uri: Arc<str>,
|
||||
uri: Option<Arc<str>>,
|
||||
persistent: bool,
|
||||
initialize_query: Option<&'static str>,
|
||||
connection: Arc<ThreadLocal<Connection>>,
|
||||
@ -20,9 +20,13 @@ unsafe impl<T: Migrator> Send for ThreadSafeConnection<T> {}
|
||||
unsafe impl<T: Migrator> Sync for ThreadSafeConnection<T> {}
|
||||
|
||||
impl<M: Migrator> ThreadSafeConnection<M> {
|
||||
pub fn new(uri: &str, persistent: bool) -> Self {
|
||||
pub fn new(uri: Option<&str>, persistent: bool) -> Self {
|
||||
if persistent == true && uri == None {
|
||||
// This panic is securing the unwrap in open_file(), don't remove it!
|
||||
panic!("Cannot create a persistent connection without a URI")
|
||||
}
|
||||
Self {
|
||||
uri: Arc::from(uri),
|
||||
uri: uri.map(|str| Arc::from(str)),
|
||||
persistent,
|
||||
initialize_query: None,
|
||||
connection: Default::default(),
|
||||
@ -41,13 +45,14 @@ impl<M: Migrator> ThreadSafeConnection<M> {
|
||||
/// called from the deref function.
|
||||
/// If opening fails, the connection falls back to a shared memory connection
|
||||
fn open_file(&self) -> Connection {
|
||||
Connection::open_file(self.uri.as_ref())
|
||||
// This unwrap is secured by a panic in the constructor. Be careful if you remove it!
|
||||
Connection::open_file(self.uri.as_ref().unwrap())
|
||||
}
|
||||
|
||||
/// Opens a shared memory connection using the file path as the identifier. This unwraps
|
||||
/// as we expect it always to succeed
|
||||
fn open_shared_memory(&self) -> Connection {
|
||||
Connection::open_memory(self.uri.as_ref())
|
||||
Connection::open_memory(self.uri.as_ref().map(|str| str.deref()))
|
||||
}
|
||||
|
||||
// Open a new connection for the given domain, leaving this
|
||||
|
@ -33,6 +33,7 @@ settings = { path = "../settings" }
|
||||
sqlez = { path = "../sqlez" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
async-recursion = "1.0.0"
|
||||
bincode = "1.2.1"
|
||||
anyhow = "1.0.38"
|
||||
futures = "0.3"
|
||||
|
@ -13,10 +13,14 @@ use theme::Theme;
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct PaneGroup {
|
||||
root: Member,
|
||||
pub(crate) root: Member,
|
||||
}
|
||||
|
||||
impl PaneGroup {
|
||||
pub(crate) fn with_root(root: Member) -> Self {
|
||||
Self { root }
|
||||
}
|
||||
|
||||
pub fn new(pane: ViewHandle<Pane>) -> Self {
|
||||
Self {
|
||||
root: Member::Pane(pane),
|
||||
@ -85,7 +89,7 @@ impl PaneGroup {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
enum Member {
|
||||
pub(crate) enum Member {
|
||||
Axis(PaneAxis),
|
||||
Pane(ViewHandle<Pane>),
|
||||
}
|
||||
@ -276,9 +280,9 @@ impl Member {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
struct PaneAxis {
|
||||
axis: Axis,
|
||||
members: Vec<Member>,
|
||||
pub(crate) struct PaneAxis {
|
||||
pub axis: Axis,
|
||||
pub members: Vec<Member>,
|
||||
}
|
||||
|
||||
impl PaneAxis {
|
||||
|
@ -55,8 +55,8 @@ impl Domain for Workspace {
|
||||
CREATE TABLE panes(
|
||||
pane_id INTEGER PRIMARY KEY,
|
||||
workspace_id BLOB NOT NULL,
|
||||
parent_group_id INTEGER, -- NULL, this is a dock pane
|
||||
position INTEGER, -- NULL, this is a dock pane
|
||||
parent_group_id INTEGER, -- NULL means that this is a dock pane
|
||||
position INTEGER, -- NULL means that this is a dock pane
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
@ -164,7 +164,7 @@ impl WorkspaceDb {
|
||||
})
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Update workspace with roots {:?}",
|
||||
"Update workspace with roots {:?} failed.",
|
||||
workspace.workspace_id.paths()
|
||||
)
|
||||
})
|
||||
@ -196,6 +196,17 @@ impl WorkspaceDb {
|
||||
.into_iter()
|
||||
.next()
|
||||
.context("No center pane group")
|
||||
.map(|pane_group| {
|
||||
// Rewrite the special case of the root being a leaf node
|
||||
if let SerializedPaneGroup::Group { axis: Axis::Horizontal, ref children } = pane_group {
|
||||
if children.len() == 1 {
|
||||
if let Some(SerializedPaneGroup::Pane(pane)) = children.get(0) {
|
||||
return SerializedPaneGroup::Pane(pane.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
pane_group
|
||||
})
|
||||
}
|
||||
|
||||
fn get_pane_group_children<'a>(
|
||||
@ -242,9 +253,12 @@ impl WorkspaceDb {
|
||||
pane_group: &SerializedPaneGroup,
|
||||
parent: Option<(GroupId, usize)>,
|
||||
) -> Result<()> {
|
||||
if parent.is_none() && !matches!(pane_group, SerializedPaneGroup::Group { .. }) {
|
||||
bail!("Pane groups must have a SerializedPaneGroup::Group at the root")
|
||||
}
|
||||
// Rewrite the root node to fit with the database
|
||||
let pane_group = if parent.is_none() && matches!(pane_group, SerializedPaneGroup::Pane { .. }) {
|
||||
SerializedPaneGroup::Group { axis: Axis::Horizontal, children: vec![pane_group.clone()] }
|
||||
} else {
|
||||
pane_group.clone()
|
||||
};
|
||||
|
||||
match pane_group {
|
||||
SerializedPaneGroup::Group { axis, children } => {
|
||||
@ -254,7 +268,7 @@ impl WorkspaceDb {
|
||||
INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis)
|
||||
VALUES (?, ?, ?, ?)
|
||||
RETURNING group_id"})?
|
||||
((workspace_id, parent_id, position, *axis))?
|
||||
((workspace_id, parent_id, position, axis))?
|
||||
.ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?;
|
||||
|
||||
for (position, group) in children.iter().enumerate() {
|
||||
@ -262,7 +276,9 @@ impl WorkspaceDb {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
SerializedPaneGroup::Pane(pane) => self.save_pane(workspace_id, pane, parent),
|
||||
SerializedPaneGroup::Pane(pane) => {
|
||||
self.save_pane(workspace_id, &pane, parent)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -324,7 +340,7 @@ impl WorkspaceDb {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use db::open_memory_db;
|
||||
use db::{open_memory_db, write_db_to};
|
||||
use settings::DockAnchor;
|
||||
|
||||
use super::*;
|
||||
@ -333,7 +349,7 @@ mod tests {
|
||||
fn test_full_workspace_serialization() {
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let db = WorkspaceDb(open_memory_db("test_full_workspace_serialization"));
|
||||
let db = WorkspaceDb(open_memory_db(Some("test_full_workspace_serialization")));
|
||||
|
||||
let dock_pane = crate::persistence::model::SerializedPane {
|
||||
children: vec![
|
||||
@ -407,7 +423,7 @@ mod tests {
|
||||
fn test_workspace_assignment() {
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let db = WorkspaceDb(open_memory_db("test_basic_functionality"));
|
||||
let db = WorkspaceDb(open_memory_db(Some("test_basic_functionality")));
|
||||
|
||||
let workspace_1 = SerializedWorkspace {
|
||||
workspace_id: (["/tmp", "/tmp2"]).into(),
|
||||
@ -500,7 +516,7 @@ mod tests {
|
||||
fn test_basic_dock_pane() {
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let db = WorkspaceDb(open_memory_db("basic_dock_pane"));
|
||||
let db = WorkspaceDb(open_memory_db(Some("basic_dock_pane")));
|
||||
|
||||
let dock_pane = crate::persistence::model::SerializedPane {
|
||||
children: vec![
|
||||
@ -514,7 +530,7 @@ mod tests {
|
||||
let workspace = default_workspace(&["/tmp"], dock_pane, &Default::default());
|
||||
|
||||
db.save_workspace(None, &workspace);
|
||||
|
||||
write_db_to(&db, "dest.db").unwrap();
|
||||
let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
|
||||
|
||||
assert_eq!(workspace.dock_pane, new_workspace.dock_pane);
|
||||
@ -524,7 +540,7 @@ mod tests {
|
||||
fn test_simple_split() {
|
||||
// env_logger::try_init().ok();
|
||||
|
||||
let db = WorkspaceDb(open_memory_db("simple_split"));
|
||||
let db = WorkspaceDb(open_memory_db(Some("simple_split")));
|
||||
|
||||
// -----------------
|
||||
// | 1,2 | 5,6 |
|
||||
|
@ -5,15 +5,20 @@ use std::{
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use gpui::Axis;
|
||||
use async_recursion::async_recursion;
|
||||
use gpui::{AsyncAppContext, Axis, ModelHandle, Task, ViewHandle};
|
||||
|
||||
use project::Project;
|
||||
use settings::DockAnchor;
|
||||
use sqlez::{
|
||||
bindable::{Bind, Column},
|
||||
statement::Statement,
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::dock::DockPosition;
|
||||
use crate::{
|
||||
dock::DockPosition, item::ItemHandle, ItemDeserializers, Member, Pane, PaneAxis, Workspace,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct WorkspaceId(Arc<Vec<PathBuf>>);
|
||||
@ -69,9 +74,42 @@ pub enum SerializedPaneGroup {
|
||||
|
||||
impl Default for SerializedPaneGroup {
|
||||
fn default() -> Self {
|
||||
Self::Group {
|
||||
axis: Axis::Horizontal,
|
||||
children: vec![Self::Pane(Default::default())],
|
||||
Self::Pane(SerializedPane {
|
||||
children: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SerializedPaneGroup {
|
||||
#[async_recursion(?Send)]
|
||||
pub(crate) async fn deserialize(
|
||||
&self,
|
||||
project: &ModelHandle<Project>,
|
||||
workspace_id: &WorkspaceId,
|
||||
workspace: &ViewHandle<Workspace>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Member {
|
||||
match self {
|
||||
SerializedPaneGroup::Group { axis, children } => {
|
||||
let mut members = Vec::new();
|
||||
for child in children {
|
||||
let new_member = child
|
||||
.deserialize(project, workspace_id, workspace, cx)
|
||||
.await;
|
||||
members.push(new_member);
|
||||
}
|
||||
Member::Axis(PaneAxis {
|
||||
axis: *axis,
|
||||
members,
|
||||
})
|
||||
}
|
||||
SerializedPaneGroup::Pane(serialized_pane) => {
|
||||
let pane = workspace.update(cx, |workspace, cx| workspace.add_pane(cx));
|
||||
serialized_pane
|
||||
.deserialize_to(project, &pane, workspace_id, workspace, cx)
|
||||
.await;
|
||||
Member::Pane(pane)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -85,6 +123,44 @@ impl SerializedPane {
|
||||
pub fn new(children: Vec<SerializedItem>) -> Self {
|
||||
SerializedPane { children }
|
||||
}
|
||||
|
||||
pub async fn deserialize_to(
|
||||
&self,
|
||||
project: &ModelHandle<Project>,
|
||||
pane_handle: &ViewHandle<Pane>,
|
||||
workspace_id: &WorkspaceId,
|
||||
workspace: &ViewHandle<Workspace>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) {
|
||||
for item in self.children.iter() {
|
||||
let project = project.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let item_handle = pane_handle
|
||||
.update(cx, |_, cx| {
|
||||
if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind) {
|
||||
deserializer(
|
||||
project,
|
||||
workspace.downgrade(),
|
||||
workspace_id,
|
||||
item.item_id,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
Task::ready(Err(anyhow::anyhow!(
|
||||
"Deserializer does not exist for item kind: {}",
|
||||
item.kind
|
||||
)))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
if let Some(item_handle) = item_handle {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
Pane::add_item(workspace, &pane_handle, item_handle, false, false, None, cx);
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type GroupId = i64;
|
||||
@ -150,7 +226,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_workspace_round_trips() {
|
||||
let db = Connection::open_memory("workspace_id_round_trips");
|
||||
let db = Connection::open_memory(Some("workspace_id_round_trips"));
|
||||
|
||||
db.exec(indoc::indoc! {"
|
||||
CREATE TABLE workspace_id_test(
|
||||
|
@ -58,7 +58,7 @@ use theme::{Theme, ThemeRegistry};
|
||||
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::persistence::model::{SerializedPane, SerializedWorkspace};
|
||||
use crate::persistence::model::{SerializedPane, SerializedPaneGroup, SerializedWorkspace};
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct RemoveWorktreeFromProject(pub WorktreeId);
|
||||
@ -2264,27 +2264,62 @@ impl Workspace {
|
||||
.into()
|
||||
}
|
||||
|
||||
fn serialize_workspace(&self, old_id: Option<WorkspaceId>, cx: &mut MutableAppContext) {
|
||||
let dock_pane = SerializedPane {
|
||||
children: self
|
||||
.dock
|
||||
.pane()
|
||||
.read(cx)
|
||||
.items()
|
||||
.filter_map(|item_handle| {
|
||||
Some(SerializedItem {
|
||||
kind: Arc::from(item_handle.serialized_item_kind()?),
|
||||
item_id: item_handle.id(),
|
||||
fn remove_panes(&mut self, member: Member, cx: &mut ViewContext<Workspace>) {
|
||||
match member {
|
||||
Member::Axis(PaneAxis { members, .. }) => {
|
||||
for child in members.iter() {
|
||||
self.remove_panes(child.clone(), cx)
|
||||
}
|
||||
}
|
||||
Member::Pane(pane) => self.remove_pane(pane.clone(), cx),
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_workspace(&self, old_id: Option<WorkspaceId>, cx: &AppContext) {
|
||||
fn serialize_pane_handle(
|
||||
pane_handle: &ViewHandle<Pane>,
|
||||
cx: &AppContext,
|
||||
) -> SerializedPane {
|
||||
SerializedPane {
|
||||
children: pane_handle
|
||||
.read(cx)
|
||||
.items()
|
||||
.filter_map(|item_handle| {
|
||||
Some(SerializedItem {
|
||||
kind: Arc::from(item_handle.serialized_item_kind()?),
|
||||
item_id: item_handle.id(),
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
};
|
||||
.collect::<Vec<_>>(),
|
||||
}
|
||||
}
|
||||
|
||||
let dock_pane = serialize_pane_handle(self.dock.pane(), cx);
|
||||
|
||||
fn build_serialized_pane_group(
|
||||
pane_group: &Member,
|
||||
cx: &AppContext,
|
||||
) -> SerializedPaneGroup {
|
||||
match pane_group {
|
||||
Member::Axis(PaneAxis { axis, members }) => SerializedPaneGroup::Group {
|
||||
axis: *axis,
|
||||
children: members
|
||||
.iter()
|
||||
.map(|member| build_serialized_pane_group(member, cx))
|
||||
.collect::<Vec<_>>(),
|
||||
},
|
||||
Member::Pane(pane_handle) => {
|
||||
SerializedPaneGroup::Pane(serialize_pane_handle(&pane_handle, cx))
|
||||
}
|
||||
}
|
||||
}
|
||||
let center_group = build_serialized_pane_group(&self.center.root, cx);
|
||||
|
||||
let serialized_workspace = SerializedWorkspace {
|
||||
workspace_id: self.workspace_id(cx),
|
||||
dock_position: self.dock.position(),
|
||||
dock_pane,
|
||||
center_group: Default::default(),
|
||||
center_group,
|
||||
};
|
||||
|
||||
cx.background()
|
||||
@ -2299,87 +2334,43 @@ impl Workspace {
|
||||
serialized_workspace: SerializedWorkspace,
|
||||
cx: &mut MutableAppContext,
|
||||
) {
|
||||
// fn process_splits(
|
||||
// pane_group: SerializedPaneGroup,
|
||||
// parent: Option<PaneGroup>,
|
||||
// workspace: ViewHandle<Workspace>,
|
||||
// cx: &mut AsyncAppContext,
|
||||
// ) {
|
||||
// match pane_group {
|
||||
// SerializedPaneGroup::Group { axis, children } => {
|
||||
// process_splits(pane_group, parent)
|
||||
// }
|
||||
// SerializedPaneGroup::Pane(pane) => {
|
||||
// process_pane(pane)
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
|
||||
async fn deserialize_pane(
|
||||
project: ModelHandle<Project>,
|
||||
pane: SerializedPane,
|
||||
pane_handle: ViewHandle<Pane>,
|
||||
workspace_id: WorkspaceId,
|
||||
workspace: &ViewHandle<Workspace>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) {
|
||||
for item in pane.children {
|
||||
let project = project.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let item_handle = pane_handle
|
||||
.update(cx, |_, cx| {
|
||||
if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind)
|
||||
{
|
||||
deserializer(
|
||||
project,
|
||||
workspace.downgrade(),
|
||||
workspace_id,
|
||||
item.item_id,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
Task::ready(Err(anyhow!(
|
||||
"Deserializer does not exist for item kind: {}",
|
||||
item.kind
|
||||
)))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
if let Some(item_handle) = item_handle {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
Pane::add_item(
|
||||
workspace,
|
||||
&pane_handle,
|
||||
item_handle,
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
if let Some(workspace) = workspace.upgrade(&cx) {
|
||||
let (project, dock_pane_handle) = workspace.read_with(&cx, |workspace, _| {
|
||||
(workspace.project().clone(), workspace.dock_pane().clone())
|
||||
});
|
||||
deserialize_pane(
|
||||
project,
|
||||
serialized_workspace.dock_pane,
|
||||
dock_pane_handle,
|
||||
serialized_workspace.workspace_id,
|
||||
&workspace,
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
serialized_workspace
|
||||
.dock_pane
|
||||
.deserialize_to(
|
||||
&project,
|
||||
&dock_pane_handle,
|
||||
&serialized_workspace.workspace_id,
|
||||
&workspace,
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Traverse the splits tree and add to things
|
||||
// process_splits(serialized_workspace.center_group, None, workspace, &mut cx);
|
||||
|
||||
let root = serialized_workspace
|
||||
.center_group
|
||||
.deserialize(
|
||||
&project,
|
||||
&serialized_workspace.workspace_id,
|
||||
&workspace,
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Remove old panes from workspace panes list
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.remove_panes(workspace.center.root.clone(), cx);
|
||||
|
||||
// Swap workspace center group
|
||||
workspace.center = PaneGroup::with_root(root);
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
Dock::set_dock_position(workspace, serialized_workspace.dock_position, cx)
|
||||
|
Loading…
Reference in New Issue
Block a user