mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-27 10:34:53 +03:00
Dock persistence working!
Co-Authored-By: Mikayla Maki <mikayla@zed.dev>
This commit is contained in:
parent
c1f7902309
commit
d20d21c6a2
3
Cargo.lock
generated
3
Cargo.lock
generated
@ -1738,6 +1738,7 @@ dependencies = [
|
||||
"collections",
|
||||
"context_menu",
|
||||
"ctor",
|
||||
"db",
|
||||
"drag_and_drop",
|
||||
"env_logger",
|
||||
"futures 0.3.25",
|
||||
@ -1761,6 +1762,7 @@ dependencies = [
|
||||
"smallvec",
|
||||
"smol",
|
||||
"snippet",
|
||||
"sqlez",
|
||||
"sum_tree",
|
||||
"text",
|
||||
"theme",
|
||||
@ -7629,6 +7631,7 @@ dependencies = [
|
||||
"context_menu",
|
||||
"db",
|
||||
"drag_and_drop",
|
||||
"env_logger",
|
||||
"fs",
|
||||
"futures 0.3.25",
|
||||
"gpui",
|
||||
|
@ -52,7 +52,7 @@ use std::{
|
||||
use theme::ThemeRegistry;
|
||||
use unindent::Unindent as _;
|
||||
use util::post_inc;
|
||||
use workspace::{shared_screen::SharedScreen, item::Item, SplitDirection, ToggleFollow, Workspace};
|
||||
use workspace::{item::Item, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
|
@ -1,6 +1,6 @@
|
||||
pub mod kvp;
|
||||
|
||||
use std::fs;
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@ -8,24 +8,29 @@ use anyhow::Result;
|
||||
use indoc::indoc;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
use sqlez::connection::Connection;
|
||||
use sqlez::domain::Domain;
|
||||
use sqlez::domain::{Domain, Migrator};
|
||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
||||
use util::channel::RELEASE_CHANNEL_NAME;
|
||||
use util::paths::DB_DIR;
|
||||
|
||||
const INITIALIZE_QUERY: &'static str = indoc! {"
|
||||
PRAGMA journal_mode=WAL;
|
||||
PRAGMA synchronous=NORMAL;
|
||||
PRAGMA busy_timeout=1;
|
||||
PRAGMA foreign_keys=TRUE;
|
||||
PRAGMA case_sensitive_like=TRUE;
|
||||
"};
|
||||
|
||||
/// Open or create a database at the given directory path.
|
||||
pub fn open_file_db<D: Domain>() -> ThreadSafeConnection<D> {
|
||||
pub fn open_file_db<M: Migrator>() -> ThreadSafeConnection<M> {
|
||||
// Use 0 for now. Will implement incrementing and clearing of old db files soon TM
|
||||
let current_db_dir = (*util::paths::DB_DIR).join(Path::new(&format!(
|
||||
"0-{}",
|
||||
*util::channel::RELEASE_CHANNEL_NAME
|
||||
)));
|
||||
fs::create_dir_all(¤t_db_dir).expect("Should be able to create the database directory");
|
||||
let current_db_dir = (*DB_DIR).join(Path::new(&format!("0-{}", *RELEASE_CHANNEL_NAME)));
|
||||
|
||||
// if *RELEASE_CHANNEL == ReleaseChannel::Dev {
|
||||
// remove_dir_all(¤t_db_dir).ok();
|
||||
// }
|
||||
|
||||
create_dir_all(¤t_db_dir).expect("Should be able to create the database directory");
|
||||
let db_path = current_db_dir.join(Path::new("db.sqlite"));
|
||||
|
||||
ThreadSafeConnection::new(db_path.to_string_lossy().as_ref(), true)
|
||||
@ -44,3 +49,23 @@ pub fn write_db_to<D: Domain, P: AsRef<Path>>(
|
||||
let destination = Connection::open_file(dest.as_ref().to_string_lossy().as_ref());
|
||||
conn.backup_main(&destination)
|
||||
}
|
||||
|
||||
/// Implements a basic DB wrapper for a given domain
|
||||
#[macro_export]
|
||||
macro_rules! connection {
|
||||
($id:ident: $t:ident<$d:ty>) => {
|
||||
pub struct $t(::sqlez::thread_safe_connection::ThreadSafeConnection<$d>);
|
||||
|
||||
impl ::std::ops::Deref for $t {
|
||||
type Target = ::sqlez::thread_safe_connection::ThreadSafeConnection<$d>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref $id: $t = $t(::db::open_file_db());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -1,22 +1,9 @@
|
||||
use anyhow::Result;
|
||||
use indoc::indoc;
|
||||
|
||||
use sqlez::{
|
||||
connection::Connection, domain::Domain, migrations::Migration,
|
||||
thread_safe_connection::ThreadSafeConnection,
|
||||
};
|
||||
use sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection};
|
||||
use std::ops::Deref;
|
||||
|
||||
pub(crate) const KVP_MIGRATION: Migration = Migration::new(
|
||||
"kvp",
|
||||
&[indoc! {"
|
||||
CREATE TABLE kv_store(
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
) STRICT;
|
||||
"}],
|
||||
);
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref KEY_VALUE_STORE: KeyValueStore =
|
||||
KeyValueStore(crate::open_file_db());
|
||||
@ -26,8 +13,17 @@ lazy_static::lazy_static! {
|
||||
pub struct KeyValueStore(ThreadSafeConnection<KeyValueStore>);
|
||||
|
||||
impl Domain for KeyValueStore {
|
||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
||||
KVP_MIGRATION.run(conn)
|
||||
fn name() -> &'static str {
|
||||
"kvp"
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[indoc! {"
|
||||
CREATE TABLE kv_store(
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
) STRICT;
|
||||
"}]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ use std::{
|
||||
use util::TryFutureExt;
|
||||
use workspace::{
|
||||
item::{Item, ItemEvent, ItemHandle},
|
||||
ItemNavHistory, Workspace,
|
||||
ItemNavHistory, Pane, Workspace,
|
||||
};
|
||||
|
||||
actions!(diagnostics, [Deploy]);
|
||||
@ -613,6 +613,20 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor.update(cx, |editor, cx| editor.deactivated(cx));
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("diagnostics")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
project: ModelHandle<Project>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
_item_id: workspace::ItemId,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<Result<ViewHandle<Self>>> {
|
||||
Task::ready(Ok(cx.add_view(|cx| Self::new(project, workspace, cx))))
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||
|
@ -23,6 +23,7 @@ test-support = [
|
||||
drag_and_drop = { path = "../drag_and_drop" }
|
||||
text = { path = "../text" }
|
||||
clock = { path = "../clock" }
|
||||
db = { path = "../db" }
|
||||
collections = { path = "../collections" }
|
||||
context_menu = { path = "../context_menu" }
|
||||
fuzzy = { path = "../fuzzy" }
|
||||
@ -37,6 +38,7 @@ snippet = { path = "../snippet" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
sqlez = { path = "../sqlez" }
|
||||
workspace = { path = "../workspace" }
|
||||
aho-corasick = "0.7"
|
||||
anyhow = "1.0"
|
||||
|
@ -9,6 +9,7 @@ mod link_go_to_definition;
|
||||
mod mouse_context_menu;
|
||||
pub mod movement;
|
||||
mod multi_buffer;
|
||||
mod persistence;
|
||||
pub mod selections_collection;
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -7,7 +7,7 @@ use anyhow::{anyhow, Result};
|
||||
use futures::FutureExt;
|
||||
use gpui::{
|
||||
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
|
||||
use project::{File, FormatTrigger, Project, ProjectEntryId, ProjectPath};
|
||||
@ -26,7 +26,7 @@ use util::TryFutureExt;
|
||||
use workspace::{
|
||||
item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
|
||||
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
|
||||
ItemNavHistory, StatusItemView, ToolbarItemLocation,
|
||||
ItemId, ItemNavHistory, Pane, StatusItemView, ToolbarItemLocation, Workspace, WorkspaceId,
|
||||
};
|
||||
|
||||
pub const MAX_TAB_TITLE_LEN: usize = 24;
|
||||
@ -552,6 +552,21 @@ impl Item for Editor {
|
||||
}));
|
||||
Some(breadcrumbs)
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("Editor")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: WorkspaceId,
|
||||
_item_id: ItemId,
|
||||
_cx: &mut ViewContext<Pane>,
|
||||
) -> Task<Result<ViewHandle<Self>>> {
|
||||
// Look up the path with this key associated, create a self with that path
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectItem for Editor {
|
||||
|
30
crates/editor/src/persistence.rs
Normal file
30
crates/editor/src/persistence.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use db::connection;
|
||||
use indoc::indoc;
|
||||
use lazy_static::lazy_static;
|
||||
use project::WorktreeId;
|
||||
use sqlez::domain::Domain;
|
||||
use workspace::{ItemId, Workspace};
|
||||
|
||||
use crate::Editor;
|
||||
|
||||
connection!(DB: EditorDb<(Workspace, Editor)>);
|
||||
|
||||
impl Domain for Editor {
|
||||
fn name() -> &'static str {
|
||||
"editor"
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[indoc! {"
|
||||
|
||||
"}]
|
||||
}
|
||||
}
|
||||
|
||||
impl EditorDb {
|
||||
fn get_path(_item_id: ItemId, _workspace_id: WorktreeId) -> PathBuf {
|
||||
unimplemented!();
|
||||
}
|
||||
}
|
@ -804,6 +804,7 @@ impl Project {
|
||||
&self.collaborators
|
||||
}
|
||||
|
||||
/// Collect all worktrees, including ones that don't appear in the project panel
|
||||
pub fn worktrees<'a>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
@ -813,6 +814,7 @@ impl Project {
|
||||
.filter_map(move |worktree| worktree.upgrade(cx))
|
||||
}
|
||||
|
||||
/// Collect all user-visible worktrees, the ones that appear in the project panel
|
||||
pub fn visible_worktrees<'a>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
|
@ -353,6 +353,20 @@ impl Item for ProjectSearchView {
|
||||
fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
|
||||
self.results_editor.breadcrumbs(theme, cx)
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
_item_id: workspace::ItemId,
|
||||
_cx: &mut ViewContext<Pane>,
|
||||
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectSearchView {
|
||||
|
@ -2,6 +2,7 @@ use std::{
|
||||
ffi::OsStr,
|
||||
os::unix::prelude::OsStrExt,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
@ -118,6 +119,13 @@ impl Bind for &str {
|
||||
}
|
||||
}
|
||||
|
||||
impl Bind for Arc<str> {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
statement.bind_text(start_index, self.as_ref())?;
|
||||
Ok(start_index + 1)
|
||||
}
|
||||
}
|
||||
|
||||
impl Bind for String {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
statement.bind_text(start_index, self)?;
|
||||
@ -125,6 +133,13 @@ impl Bind for String {
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for Arc<str> {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let result = statement.column_text(start_index)?;
|
||||
Ok((Arc::from(result), start_index + 1))
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for String {
|
||||
fn column<'a>(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let result = statement.column_text(start_index)?;
|
||||
|
@ -54,10 +54,6 @@ impl Connection {
|
||||
self.persistent
|
||||
}
|
||||
|
||||
pub(crate) fn last_insert_id(&self) -> i64 {
|
||||
unsafe { sqlite3_last_insert_rowid(self.sqlite3) }
|
||||
}
|
||||
|
||||
pub fn backup_main(&self, destination: &Connection) -> Result<()> {
|
||||
unsafe {
|
||||
let backup = sqlite3_backup_init(
|
||||
@ -126,7 +122,7 @@ mod test {
|
||||
let text = "Some test text";
|
||||
|
||||
connection
|
||||
.insert_bound("INSERT INTO text (text) VALUES (?);")
|
||||
.exec_bound("INSERT INTO text (text) VALUES (?);")
|
||||
.unwrap()(text)
|
||||
.unwrap();
|
||||
|
||||
@ -155,7 +151,7 @@ mod test {
|
||||
let tuple2 = ("test2".to_string(), 32, vec![64, 32, 16, 8, 4, 2, 1, 0]);
|
||||
|
||||
let mut insert = connection
|
||||
.insert_bound::<(String, usize, Vec<u8>)>(
|
||||
.exec_bound::<(String, usize, Vec<u8>)>(
|
||||
"INSERT INTO test (text, integer, blob) VALUES (?, ?, ?)",
|
||||
)
|
||||
.unwrap();
|
||||
@ -185,7 +181,7 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
connection
|
||||
.insert_bound("INSERT INTO bools(t, f) VALUES (?, ?);")
|
||||
.exec_bound("INSERT INTO bools(t, f) VALUES (?, ?)")
|
||||
.unwrap()((true, false))
|
||||
.unwrap();
|
||||
|
||||
@ -210,7 +206,7 @@ mod test {
|
||||
.unwrap();
|
||||
let blob = vec![0, 1, 2, 4, 8, 16, 32, 64];
|
||||
connection1
|
||||
.insert_bound::<Vec<u8>>("INSERT INTO blobs (data) VALUES (?);")
|
||||
.exec_bound::<Vec<u8>>("INSERT INTO blobs (data) VALUES (?);")
|
||||
.unwrap()(blob.clone())
|
||||
.unwrap();
|
||||
|
||||
|
@ -1,39 +1,50 @@
|
||||
use crate::connection::Connection;
|
||||
|
||||
pub trait Domain {
|
||||
fn migrate(conn: &Connection) -> anyhow::Result<()>;
|
||||
fn name() -> &'static str;
|
||||
fn migrations() -> &'static [&'static str];
|
||||
}
|
||||
|
||||
impl<D1: Domain, D2: Domain> Domain for (D1, D2) {
|
||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(conn)?;
|
||||
D2::migrate(conn)
|
||||
pub trait Migrator {
|
||||
fn migrate(connection: &Connection) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
impl<D: Domain> Migrator for D {
|
||||
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||
connection.migrate(Self::name(), Self::migrations())
|
||||
}
|
||||
}
|
||||
|
||||
impl<D1: Domain, D2: Domain, D3: Domain> Domain for (D1, D2, D3) {
|
||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(conn)?;
|
||||
D2::migrate(conn)?;
|
||||
D3::migrate(conn)
|
||||
impl<D1: Domain, D2: Domain> Migrator for (D1, D2) {
|
||||
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(connection)?;
|
||||
D2::migrate(connection)
|
||||
}
|
||||
}
|
||||
|
||||
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain> Domain for (D1, D2, D3, D4) {
|
||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(conn)?;
|
||||
D2::migrate(conn)?;
|
||||
D3::migrate(conn)?;
|
||||
D4::migrate(conn)
|
||||
impl<D1: Domain, D2: Domain, D3: Domain> Migrator for (D1, D2, D3) {
|
||||
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(connection)?;
|
||||
D2::migrate(connection)?;
|
||||
D3::migrate(connection)
|
||||
}
|
||||
}
|
||||
|
||||
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain, D5: Domain> Domain for (D1, D2, D3, D4, D5) {
|
||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(conn)?;
|
||||
D2::migrate(conn)?;
|
||||
D3::migrate(conn)?;
|
||||
D4::migrate(conn)?;
|
||||
D5::migrate(conn)
|
||||
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain> Migrator for (D1, D2, D3, D4) {
|
||||
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(connection)?;
|
||||
D2::migrate(connection)?;
|
||||
D3::migrate(connection)?;
|
||||
D4::migrate(connection)
|
||||
}
|
||||
}
|
||||
|
||||
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain, D5: Domain> Migrator for (D1, D2, D3, D4, D5) {
|
||||
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||
D1::migrate(connection)?;
|
||||
D2::migrate(connection)?;
|
||||
D3::migrate(connection)?;
|
||||
D4::migrate(connection)?;
|
||||
D5::migrate(connection)
|
||||
}
|
||||
}
|
||||
|
@ -9,53 +9,27 @@ use indoc::{formatdoc, indoc};
|
||||
|
||||
use crate::connection::Connection;
|
||||
|
||||
const MIGRATIONS_MIGRATION: Migration = Migration::new(
|
||||
"migrations",
|
||||
// The migrations migration must be infallable because it runs to completion
|
||||
// with every call to migration run and is run unchecked.
|
||||
&[indoc! {"
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
domain TEXT,
|
||||
step INTEGER,
|
||||
migration TEXT
|
||||
)
|
||||
"}],
|
||||
);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Migration {
|
||||
domain: &'static str,
|
||||
migrations: &'static [&'static str],
|
||||
}
|
||||
|
||||
impl Migration {
|
||||
pub const fn new(domain: &'static str, migrations: &'static [&'static str]) -> Self {
|
||||
Self { domain, migrations }
|
||||
}
|
||||
|
||||
fn run_unchecked(&self, connection: &Connection) -> Result<()> {
|
||||
for migration in self.migrations {
|
||||
connection.exec(migration)?()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run(&self, connection: &Connection) -> Result<()> {
|
||||
impl Connection {
|
||||
pub fn migrate(&self, domain: &'static str, migrations: &[&'static str]) -> Result<()> {
|
||||
// Setup the migrations table unconditionally
|
||||
MIGRATIONS_MIGRATION.run_unchecked(connection)?;
|
||||
self.exec(indoc! {"
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
domain TEXT,
|
||||
step INTEGER,
|
||||
migration TEXT
|
||||
)"})?()?;
|
||||
|
||||
let completed_migrations =
|
||||
connection.select_bound::<&str, (String, usize, String)>(indoc! {"
|
||||
self.select_bound::<&str, (String, usize, String)>(indoc! {"
|
||||
SELECT domain, step, migration FROM migrations
|
||||
WHERE domain = ?
|
||||
ORDER BY step
|
||||
"})?(self.domain)?;
|
||||
"})?(domain)?;
|
||||
|
||||
let mut store_completed_migration = connection
|
||||
.insert_bound("INSERT INTO migrations (domain, step, migration) VALUES (?, ?, ?)")?;
|
||||
let mut store_completed_migration =
|
||||
self.exec_bound("INSERT INTO migrations (domain, step, migration) VALUES (?, ?, ?)")?;
|
||||
|
||||
for (index, migration) in self.migrations.iter().enumerate() {
|
||||
for (index, migration) in migrations.iter().enumerate() {
|
||||
if let Some((_, _, completed_migration)) = completed_migrations.get(index) {
|
||||
if completed_migration != migration {
|
||||
return Err(anyhow!(formatdoc! {"
|
||||
@ -65,15 +39,15 @@ impl Migration {
|
||||
{}
|
||||
|
||||
Proposed migration:
|
||||
{}", self.domain, index, completed_migration, migration}));
|
||||
{}", domain, index, completed_migration, migration}));
|
||||
} else {
|
||||
// Migration already run. Continue
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
connection.exec(migration)?()?;
|
||||
store_completed_migration((self.domain, index, *migration))?;
|
||||
self.exec(migration)?()?;
|
||||
store_completed_migration((domain, index, *migration))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -84,22 +58,23 @@ impl Migration {
|
||||
mod test {
|
||||
use indoc::indoc;
|
||||
|
||||
use crate::{connection::Connection, migrations::Migration};
|
||||
use crate::connection::Connection;
|
||||
|
||||
#[test]
|
||||
fn test_migrations_are_added_to_table() {
|
||||
let connection = Connection::open_memory("migrations_are_added_to_table");
|
||||
|
||||
// Create first migration with a single step and run it
|
||||
let mut migration = Migration::new(
|
||||
"test",
|
||||
&[indoc! {"
|
||||
CREATE TABLE test1 (
|
||||
a TEXT,
|
||||
b TEXT
|
||||
)"}],
|
||||
);
|
||||
migration.run(&connection).unwrap();
|
||||
connection
|
||||
.migrate(
|
||||
"test",
|
||||
&[indoc! {"
|
||||
CREATE TABLE test1 (
|
||||
a TEXT,
|
||||
b TEXT
|
||||
)"}],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Verify it got added to the migrations table
|
||||
assert_eq!(
|
||||
@ -107,23 +82,31 @@ mod test {
|
||||
.select::<String>("SELECT (migration) FROM migrations")
|
||||
.unwrap()()
|
||||
.unwrap()[..],
|
||||
migration.migrations
|
||||
);
|
||||
|
||||
// Add another step to the migration and run it again
|
||||
migration.migrations = &[
|
||||
indoc! {"
|
||||
&[indoc! {"
|
||||
CREATE TABLE test1 (
|
||||
a TEXT,
|
||||
b TEXT
|
||||
)"},
|
||||
indoc! {"
|
||||
CREATE TABLE test2 (
|
||||
c TEXT,
|
||||
d TEXT
|
||||
)"},
|
||||
];
|
||||
migration.run(&connection).unwrap();
|
||||
)"}],
|
||||
);
|
||||
|
||||
// Add another step to the migration and run it again
|
||||
connection
|
||||
.migrate(
|
||||
"test",
|
||||
&[
|
||||
indoc! {"
|
||||
CREATE TABLE test1 (
|
||||
a TEXT,
|
||||
b TEXT
|
||||
)"},
|
||||
indoc! {"
|
||||
CREATE TABLE test2 (
|
||||
c TEXT,
|
||||
d TEXT
|
||||
)"},
|
||||
],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Verify it is also added to the migrations table
|
||||
assert_eq!(
|
||||
@ -131,7 +114,18 @@ mod test {
|
||||
.select::<String>("SELECT (migration) FROM migrations")
|
||||
.unwrap()()
|
||||
.unwrap()[..],
|
||||
migration.migrations
|
||||
&[
|
||||
indoc! {"
|
||||
CREATE TABLE test1 (
|
||||
a TEXT,
|
||||
b TEXT
|
||||
)"},
|
||||
indoc! {"
|
||||
CREATE TABLE test2 (
|
||||
c TEXT,
|
||||
d TEXT
|
||||
)"},
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
@ -150,7 +144,7 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
let mut store_completed_migration = connection
|
||||
.insert_bound::<(&str, usize, String)>(indoc! {"
|
||||
.exec_bound::<(&str, usize, String)>(indoc! {"
|
||||
INSERT INTO migrations (domain, step, migration)
|
||||
VALUES (?, ?, ?)"})
|
||||
.unwrap();
|
||||
@ -171,8 +165,7 @@ mod test {
|
||||
fn migrations_dont_rerun() {
|
||||
let connection = Connection::open_memory("migrations_dont_rerun");
|
||||
|
||||
// Create migration which clears a table
|
||||
let migration = Migration::new("test", &["DELETE FROM test_table"]);
|
||||
// Create migration which clears a tabl
|
||||
|
||||
// Manually create the table for that migration with a row
|
||||
connection
|
||||
@ -197,7 +190,9 @@ mod test {
|
||||
);
|
||||
|
||||
// Run the migration verifying that the row got dropped
|
||||
migration.run(&connection).unwrap();
|
||||
connection
|
||||
.migrate("test", &["DELETE FROM test_table"])
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
connection
|
||||
.select_row::<usize>("SELECT * FROM test_table")
|
||||
@ -213,7 +208,9 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
// Run the same migration again and verify that the table was left unchanged
|
||||
migration.run(&connection).unwrap();
|
||||
connection
|
||||
.migrate("test", &["DELETE FROM test_table"])
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
connection
|
||||
.select_row::<usize>("SELECT * FROM test_table")
|
||||
@ -228,22 +225,22 @@ mod test {
|
||||
let connection = Connection::open_memory("changed_migration_fails");
|
||||
|
||||
// Create a migration with two steps and run it
|
||||
Migration::new(
|
||||
"test migration",
|
||||
&[
|
||||
indoc! {"
|
||||
connection
|
||||
.migrate(
|
||||
"test migration",
|
||||
&[
|
||||
indoc! {"
|
||||
CREATE TABLE test (
|
||||
col INTEGER
|
||||
)"},
|
||||
indoc! {"
|
||||
INSERT INTO test (col) VALUES (1)"},
|
||||
],
|
||||
)
|
||||
.run(&connection)
|
||||
.unwrap();
|
||||
indoc! {"
|
||||
INSERT INTO test (col) VALUES (1)"},
|
||||
],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Create another migration with the same domain but different steps
|
||||
let second_migration_result = Migration::new(
|
||||
let second_migration_result = connection.migrate(
|
||||
"test migration",
|
||||
&[
|
||||
indoc! {"
|
||||
@ -253,8 +250,7 @@ mod test {
|
||||
indoc! {"
|
||||
INSERT INTO test (color) VALUES (1)"},
|
||||
],
|
||||
)
|
||||
.run(&connection);
|
||||
);
|
||||
|
||||
// Verify new migration returns error when run
|
||||
assert!(second_migration_result.is_err())
|
||||
|
@ -256,11 +256,6 @@ impl<'a> Statement<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert(&mut self) -> Result<i64> {
|
||||
self.exec()?;
|
||||
Ok(self.connection.last_insert_id())
|
||||
}
|
||||
|
||||
pub fn exec(&mut self) -> Result<()> {
|
||||
fn logic(this: &mut Statement) -> Result<()> {
|
||||
while this.step()? == StepResult::Row {}
|
||||
|
@ -3,20 +3,23 @@ use std::{marker::PhantomData, ops::Deref, sync::Arc};
|
||||
use connection::Connection;
|
||||
use thread_local::ThreadLocal;
|
||||
|
||||
use crate::{connection, domain::Domain};
|
||||
use crate::{
|
||||
connection,
|
||||
domain::{Domain, Migrator},
|
||||
};
|
||||
|
||||
pub struct ThreadSafeConnection<D: Domain> {
|
||||
pub struct ThreadSafeConnection<M: Migrator> {
|
||||
uri: Arc<str>,
|
||||
persistent: bool,
|
||||
initialize_query: Option<&'static str>,
|
||||
connection: Arc<ThreadLocal<Connection>>,
|
||||
_pd: PhantomData<D>,
|
||||
_pd: PhantomData<M>,
|
||||
}
|
||||
|
||||
unsafe impl<T: Domain> Send for ThreadSafeConnection<T> {}
|
||||
unsafe impl<T: Domain> Sync for ThreadSafeConnection<T> {}
|
||||
unsafe impl<T: Migrator> Send for ThreadSafeConnection<T> {}
|
||||
unsafe impl<T: Migrator> Sync for ThreadSafeConnection<T> {}
|
||||
|
||||
impl<D: Domain> ThreadSafeConnection<D> {
|
||||
impl<M: Migrator> ThreadSafeConnection<M> {
|
||||
pub fn new(uri: &str, persistent: bool) -> Self {
|
||||
Self {
|
||||
uri: Arc::from(uri),
|
||||
@ -72,7 +75,11 @@ impl<D: Domain> Clone for ThreadSafeConnection<D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: Domain> Deref for ThreadSafeConnection<D> {
|
||||
// TODO:
|
||||
// 1. When migration or initialization fails, move the corrupted db to a holding place and create a new one
|
||||
// 2. If the new db also fails, downgrade to a shared in memory db
|
||||
// 3. In either case notify the user about what went wrong
|
||||
impl<M: Migrator> Deref for ThreadSafeConnection<M> {
|
||||
type Target = Connection;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
@ -91,7 +98,7 @@ impl<D: Domain> Deref for ThreadSafeConnection<D> {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
D::migrate(&connection).expect("Migrations failed");
|
||||
M::migrate(&connection).expect("Migrations failed");
|
||||
|
||||
connection
|
||||
})
|
||||
|
@ -20,19 +20,6 @@ impl Connection {
|
||||
Ok(move |bindings| statement.with_bindings(bindings)?.exec())
|
||||
}
|
||||
|
||||
pub fn insert<'a>(&'a self, query: &str) -> Result<impl 'a + FnMut() -> Result<i64>> {
|
||||
let mut statement = Statement::prepare(&self, query)?;
|
||||
Ok(move || statement.insert())
|
||||
}
|
||||
|
||||
pub fn insert_bound<'a, B: Bind>(
|
||||
&'a self,
|
||||
query: &str,
|
||||
) -> Result<impl 'a + FnMut(B) -> Result<i64>> {
|
||||
let mut statement = Statement::prepare(&self, query)?;
|
||||
Ok(move |bindings| statement.with_bindings(bindings)?.insert())
|
||||
}
|
||||
|
||||
pub fn select<'a, C: Column>(
|
||||
&'a self,
|
||||
query: &str,
|
||||
|
@ -34,7 +34,9 @@ use mappings::mouse::{
|
||||
|
||||
use procinfo::LocalProcessInfo;
|
||||
use settings::{AlternateScroll, Settings, Shell, TerminalBlink};
|
||||
use terminal_container_view::TerminalContainer;
|
||||
use util::ResultExt;
|
||||
use workspace::register_deserializable_item;
|
||||
|
||||
use std::{
|
||||
cmp::min,
|
||||
@ -67,6 +69,8 @@ use lazy_static::lazy_static;
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
terminal_view::init(cx);
|
||||
terminal_container_view::init(cx);
|
||||
|
||||
register_deserializable_item::<TerminalContainer>(cx);
|
||||
}
|
||||
|
||||
///Scrolling is unbearably sluggish by default. Alacritty supports a configurable
|
||||
|
@ -5,7 +5,7 @@ use alacritty_terminal::index::Point;
|
||||
use dirs::home_dir;
|
||||
use gpui::{
|
||||
actions, elements::*, AnyViewHandle, AppContext, Entity, ModelHandle, MutableAppContext, Task,
|
||||
View, ViewContext, ViewHandle,
|
||||
View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use util::truncate_and_trailoff;
|
||||
use workspace::searchable::{SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle};
|
||||
@ -13,6 +13,7 @@ use workspace::{
|
||||
item::{Item, ItemEvent},
|
||||
ToolbarItemLocation, Workspace,
|
||||
};
|
||||
use workspace::{register_deserializable_item, Pane};
|
||||
|
||||
use project::{LocalWorktree, Project, ProjectPath};
|
||||
use settings::{AlternateScroll, Settings, WorkingDirectory};
|
||||
@ -26,6 +27,8 @@ actions!(terminal, [DeployModal]);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(TerminalContainer::deploy);
|
||||
|
||||
register_deserializable_item::<TerminalContainer>(cx);
|
||||
}
|
||||
|
||||
//Make terminal view an enum, that can give you views for the error and non-error states
|
||||
@ -127,7 +130,7 @@ impl TerminalContainer {
|
||||
TerminalContainerContent::Error(view)
|
||||
}
|
||||
};
|
||||
cx.focus(content.handle());
|
||||
// cx.focus(content.handle());
|
||||
|
||||
TerminalContainer {
|
||||
content,
|
||||
@ -375,6 +378,22 @@ impl Item for TerminalContainer {
|
||||
)
|
||||
.boxed()])
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("Terminal")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
_item_id: workspace::ItemId,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||
// TODO: Pull the current working directory out of the DB.
|
||||
|
||||
Task::ready(Ok(cx.add_view(|cx| TerminalContainer::new(None, false, cx))))
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchableItem for TerminalContainer {
|
||||
|
@ -6,7 +6,8 @@ use gpui::{
|
||||
Padding, ParentElement,
|
||||
},
|
||||
fonts::TextStyle,
|
||||
Border, Element, Entity, MutableAppContext, Quad, RenderContext, View, ViewContext,
|
||||
Border, Element, Entity, ModelHandle, MutableAppContext, Quad, RenderContext, Task, View,
|
||||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::{Project, ProjectEntryId, ProjectPath};
|
||||
use settings::Settings;
|
||||
@ -14,13 +15,15 @@ use smallvec::SmallVec;
|
||||
use theme::{ColorScheme, Layer, Style, StyleSet};
|
||||
use workspace::{
|
||||
item::{Item, ItemEvent},
|
||||
Workspace,
|
||||
register_deserializable_item, Pane, Workspace,
|
||||
};
|
||||
|
||||
actions!(theme, [DeployThemeTestbench]);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(ThemeTestbench::deploy);
|
||||
|
||||
register_deserializable_item::<ThemeTestbench>(cx)
|
||||
}
|
||||
|
||||
pub struct ThemeTestbench {}
|
||||
@ -357,4 +360,18 @@ impl Item for ThemeTestbench {
|
||||
fn to_item_events(_: &Self::Event) -> Vec<ItemEvent> {
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("ThemeTestBench")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
_item_id: workspace::ItemId,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<gpui::anyhow::Result<ViewHandle<Self>>> {
|
||||
Task::ready(Ok(cx.add_view(|_| Self {})))
|
||||
}
|
||||
}
|
||||
|
@ -37,6 +37,7 @@ bincode = "1.2.1"
|
||||
anyhow = "1.0.38"
|
||||
futures = "0.3"
|
||||
lazy_static = "1.4"
|
||||
env_logger = "0.9.1"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
parking_lot = "0.11.1"
|
||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||
|
@ -137,13 +137,8 @@ pub struct Dock {
|
||||
}
|
||||
|
||||
impl Dock {
|
||||
pub fn new(
|
||||
default_item_factory: DefaultItemFactory,
|
||||
position: Option<DockPosition>,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
) -> Self {
|
||||
let position = position
|
||||
.unwrap_or_else(|| DockPosition::Hidden(cx.global::<Settings>().default_dock_anchor));
|
||||
pub fn new(default_item_factory: DefaultItemFactory, cx: &mut ViewContext<Workspace>) -> Self {
|
||||
let position = DockPosition::Hidden(cx.global::<Settings>().default_dock_anchor);
|
||||
|
||||
let pane = cx.add_view(|cx| Pane::new(Some(position.anchor()), cx));
|
||||
pane.update(cx, |pane, cx| {
|
||||
@ -175,7 +170,7 @@ impl Dock {
|
||||
self.position.is_visible() && self.position.anchor() == anchor
|
||||
}
|
||||
|
||||
fn set_dock_position(
|
||||
pub(crate) fn set_dock_position(
|
||||
workspace: &mut Workspace,
|
||||
new_position: DockPosition,
|
||||
cx: &mut ViewContext<Workspace>,
|
||||
@ -211,6 +206,7 @@ impl Dock {
|
||||
cx.focus(last_active_center_pane);
|
||||
}
|
||||
cx.emit(crate::Event::DockAnchorChanged);
|
||||
workspace.serialize_workspace(None, cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@ -347,6 +343,10 @@ impl Dock {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn position(&self) -> DockPosition {
|
||||
self.position
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ToggleDockButton {
|
||||
|
@ -117,15 +117,18 @@ pub trait Item: View {
|
||||
fn breadcrumb_location(&self) -> ToolbarItemLocation {
|
||||
ToolbarItemLocation::Hidden
|
||||
}
|
||||
|
||||
fn breadcrumbs(&self, _theme: &Theme, _cx: &AppContext) -> Option<Vec<ElementBox>> {
|
||||
None
|
||||
}
|
||||
fn serialized_item_kind() -> Option<&'static str>;
|
||||
fn deserialize(
|
||||
project: ModelHandle<Project>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
workspace_id: WorkspaceId,
|
||||
item_id: ItemId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Result<Self>;
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<Result<ViewHandle<Self>>>;
|
||||
}
|
||||
|
||||
pub trait ItemHandle: 'static + fmt::Debug {
|
||||
@ -181,6 +184,7 @@ pub trait ItemHandle: 'static + fmt::Debug {
|
||||
fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
|
||||
fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
|
||||
fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>>;
|
||||
fn serialized_item_kind(&self) -> Option<&'static str>;
|
||||
}
|
||||
|
||||
pub trait WeakItemHandle {
|
||||
@ -515,6 +519,10 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
|
||||
fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
|
||||
self.read(cx).breadcrumbs(theme, cx)
|
||||
}
|
||||
|
||||
fn serialized_item_kind(&self) -> Option<&'static str> {
|
||||
T::serialized_item_kind()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Box<dyn ItemHandle>> for AnyViewHandle {
|
||||
@ -645,15 +653,14 @@ impl<T: FollowableItem> FollowableItemHandle for ViewHandle<T> {
|
||||
pub(crate) mod test {
|
||||
use std::{any::Any, borrow::Cow, cell::Cell};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use gpui::{
|
||||
elements::Empty, AppContext, Element, ElementBox, Entity, ModelHandle, RenderContext, Task,
|
||||
View, ViewContext,
|
||||
View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use project::{Project, ProjectEntryId, ProjectPath};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::{sidebar::SidebarItem, ItemNavHistory};
|
||||
use crate::{sidebar::SidebarItem, ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
|
||||
|
||||
use super::{Item, ItemEvent};
|
||||
|
||||
@ -864,11 +871,13 @@ pub(crate) mod test {
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
workspace_id: crate::persistence::model::WorkspaceId,
|
||||
item_id: crate::persistence::model::ItemId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> anyhow::Result<Self> {
|
||||
Err(anyhow!("Cannot deserialize test item"))
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: WorkspaceId,
|
||||
_item_id: ItemId,
|
||||
_cx: &mut ViewContext<Pane>,
|
||||
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||
unreachable!("Cannot deserialize test item")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,93 +2,81 @@
|
||||
|
||||
pub mod model;
|
||||
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use db::open_file_db;
|
||||
use anyhow::{anyhow, bail, Result, Context};
|
||||
use db::connection;
|
||||
use gpui::Axis;
|
||||
use indoc::indoc;
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
||||
use sqlez::{connection::Connection, domain::Domain, migrations::Migration};
|
||||
|
||||
use sqlez::domain::Domain;
|
||||
use util::{iife, unzip_option, ResultExt};
|
||||
|
||||
use crate::dock::DockPosition;
|
||||
|
||||
use super::Workspace;
|
||||
|
||||
use model::{
|
||||
GroupId, PaneId, SerializedItem, SerializedItemKind, SerializedPane, SerializedPaneGroup,
|
||||
GroupId, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup,
|
||||
SerializedWorkspace, WorkspaceId,
|
||||
};
|
||||
|
||||
lazy_static! {
|
||||
pub static ref DB: WorkspaceDb = WorkspaceDb(open_file_db());
|
||||
}
|
||||
|
||||
pub struct WorkspaceDb(ThreadSafeConnection<Workspace>);
|
||||
|
||||
impl Deref for WorkspaceDb {
|
||||
type Target = ThreadSafeConnection<Workspace>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) const WORKSPACES_MIGRATION: Migration = Migration::new(
|
||||
"workspace",
|
||||
&[indoc! {"
|
||||
CREATE TABLE workspaces(
|
||||
workspace_id BLOB PRIMARY KEY,
|
||||
dock_anchor TEXT, -- Enum: 'Bottom' / 'Right' / 'Expanded'
|
||||
dock_visible INTEGER, -- Boolean
|
||||
timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE pane_groups(
|
||||
group_id INTEGER PRIMARY KEY,
|
||||
workspace_id BLOB NOT NULL,
|
||||
parent_group_id INTEGER, -- NULL indicates that this is a root node
|
||||
position INTEGER, -- NULL indicates that this is a root node
|
||||
axis TEXT NOT NULL, -- Enum: 'Vertical' / 'Horizontal'
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE panes(
|
||||
pane_id INTEGER PRIMARY KEY,
|
||||
workspace_id BLOB NOT NULL,
|
||||
parent_group_id INTEGER, -- NULL, this is a dock pane
|
||||
position INTEGER, -- NULL, this is a dock pane
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE items(
|
||||
item_id INTEGER NOT NULL, -- This is the item's view id, so this is not unique
|
||||
workspace_id BLOB NOT NULL,
|
||||
pane_id INTEGER NOT NULL,
|
||||
kind TEXT NOT NULL,
|
||||
position INTEGER NOT NULL,
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
|
||||
ON DELETE CASCADE,
|
||||
PRIMARY KEY(item_id, workspace_id)
|
||||
) STRICT;
|
||||
"}],
|
||||
);
|
||||
connection!(DB: WorkspaceDb<Workspace>);
|
||||
|
||||
impl Domain for Workspace {
|
||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
||||
WORKSPACES_MIGRATION.run(&conn)
|
||||
fn name() -> &'static str {
|
||||
"workspace"
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[indoc! {"
|
||||
CREATE TABLE workspaces(
|
||||
workspace_id BLOB PRIMARY KEY,
|
||||
dock_visible INTEGER, -- Boolean
|
||||
dock_anchor TEXT, -- Enum: 'Bottom' / 'Right' / 'Expanded'
|
||||
timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE pane_groups(
|
||||
group_id INTEGER PRIMARY KEY,
|
||||
workspace_id BLOB NOT NULL,
|
||||
parent_group_id INTEGER, -- NULL indicates that this is a root node
|
||||
position INTEGER, -- NULL indicates that this is a root node
|
||||
axis TEXT NOT NULL, -- Enum: 'Vertical' / 'Horizontal'
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE panes(
|
||||
pane_id INTEGER PRIMARY KEY,
|
||||
workspace_id BLOB NOT NULL,
|
||||
parent_group_id INTEGER, -- NULL, this is a dock pane
|
||||
position INTEGER, -- NULL, this is a dock pane
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
||||
) STRICT;
|
||||
|
||||
CREATE TABLE items(
|
||||
item_id INTEGER NOT NULL, -- This is the item's view id, so this is not unique
|
||||
workspace_id BLOB NOT NULL,
|
||||
pane_id INTEGER NOT NULL,
|
||||
kind TEXT NOT NULL,
|
||||
position INTEGER NOT NULL,
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE,
|
||||
FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
|
||||
ON DELETE CASCADE,
|
||||
PRIMARY KEY(item_id, workspace_id)
|
||||
) STRICT;
|
||||
"}]
|
||||
}
|
||||
}
|
||||
|
||||
@ -104,7 +92,7 @@ impl WorkspaceDb {
|
||||
|
||||
// Note that we re-assign the workspace_id here in case it's empty
|
||||
// and we've grabbed the most recent workspace
|
||||
let (workspace_id, dock_position) = iife!({
|
||||
let (workspace_id, dock_position): (WorkspaceId, DockPosition) = iife!({
|
||||
if worktree_roots.len() == 0 {
|
||||
self.select_row(indoc! {"
|
||||
SELECT workspace_id, dock_visible, dock_anchor
|
||||
@ -122,6 +110,7 @@ impl WorkspaceDb {
|
||||
.flatten()?;
|
||||
|
||||
Some(SerializedWorkspace {
|
||||
workspace_id: workspace_id.clone(),
|
||||
dock_pane: self
|
||||
.get_dock_pane(&workspace_id)
|
||||
.context("Getting dock pane")
|
||||
@ -136,43 +125,47 @@ impl WorkspaceDb {
|
||||
|
||||
/// Saves a workspace using the worktree roots. Will garbage collect any workspaces
|
||||
/// that used this workspace previously
|
||||
pub fn save_workspace<P: AsRef<Path>>(
|
||||
pub fn save_workspace(
|
||||
&self,
|
||||
worktree_roots: &[P],
|
||||
old_roots: Option<&[P]>,
|
||||
old_id: Option<WorkspaceId>,
|
||||
workspace: &SerializedWorkspace,
|
||||
) {
|
||||
let workspace_id: WorkspaceId = worktree_roots.into();
|
||||
|
||||
self.with_savepoint("update_worktrees", || {
|
||||
if let Some(old_roots) = old_roots {
|
||||
let old_id: WorkspaceId = old_roots.into();
|
||||
|
||||
self.exec_bound("DELETE FROM WORKSPACES WHERE workspace_id = ?")?(&old_id)?;
|
||||
if let Some(old_id) = old_id {
|
||||
self.exec_bound(indoc! {"
|
||||
DELETE FROM pane_groups WHERE workspace_id = ?"})?(&old_id)?;
|
||||
|
||||
// If collision, delete
|
||||
|
||||
self.exec_bound(indoc! {"
|
||||
UPDATE OR REPLACE workspaces
|
||||
SET workspace_id = ?,
|
||||
dock_visible = ?,
|
||||
dock_anchor = ?,
|
||||
timestamp = CURRENT_TIMESTAMP
|
||||
WHERE workspace_id = ?"})?((
|
||||
&workspace.workspace_id,
|
||||
workspace.dock_position,
|
||||
&old_id,
|
||||
))?;
|
||||
} else {
|
||||
self.exec_bound(indoc! {"
|
||||
DELETE FROM pane_groups WHERE workspace_id = ?"})?(&workspace.workspace_id)?;
|
||||
self.exec_bound(
|
||||
"INSERT OR REPLACE INTO workspaces(workspace_id, dock_visible, dock_anchor) VALUES (?, ?, ?)",
|
||||
)?((&workspace.workspace_id, workspace.dock_position))?;
|
||||
}
|
||||
|
||||
// Delete any previous workspaces with the same roots. This cascades to all
|
||||
// other tables that are based on the same roots set.
|
||||
// Insert new workspace into workspaces table if none were found
|
||||
self.exec_bound("DELETE FROM workspaces WHERE workspace_id = ?;")?(&workspace_id)?;
|
||||
|
||||
self.exec_bound(
|
||||
"INSERT INTO workspaces(workspace_id, dock_visible, dock_anchor) VALUES (?, ?, ?)",
|
||||
)?((&workspace_id, workspace.dock_position))?;
|
||||
|
||||
|
||||
// Save center pane group and dock pane
|
||||
self.save_pane_group(&workspace_id, &workspace.center_group, None)?;
|
||||
self.save_pane(&workspace_id, &workspace.dock_pane, None)?;
|
||||
self.save_pane_group(&workspace.workspace_id, &workspace.center_group, None)?;
|
||||
self.save_pane(&workspace.workspace_id, &workspace.dock_pane, None)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Update workspace with roots {:?}",
|
||||
worktree_roots
|
||||
.iter()
|
||||
.map(|p| p.as_ref())
|
||||
.collect::<Vec<_>>()
|
||||
workspace.workspace_id.paths()
|
||||
)
|
||||
})
|
||||
.log_err();
|
||||
@ -253,15 +246,19 @@ impl WorkspaceDb {
|
||||
bail!("Pane groups must have a SerializedPaneGroup::Group at the root")
|
||||
}
|
||||
|
||||
let (parent_id, position) = unzip_option(parent);
|
||||
|
||||
match pane_group {
|
||||
SerializedPaneGroup::Group { axis, children } => {
|
||||
let parent_id = self.insert_bound("INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis) VALUES (?, ?, ?, ?)")?
|
||||
((workspace_id, parent_id, position, *axis))?;
|
||||
let (parent_id, position) = unzip_option(parent);
|
||||
|
||||
let group_id = self.select_row_bound::<_, i64>(indoc!{"
|
||||
INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis)
|
||||
VALUES (?, ?, ?, ?)
|
||||
RETURNING group_id"})?
|
||||
((workspace_id, parent_id, position, *axis))?
|
||||
.ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?;
|
||||
|
||||
for (position, group) in children.iter().enumerate() {
|
||||
self.save_pane_group(workspace_id, group, Some((parent_id, position)))?
|
||||
self.save_pane_group(workspace_id, group, Some((group_id, position)))?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -289,10 +286,13 @@ impl WorkspaceDb {
|
||||
parent: Option<(GroupId, usize)>,
|
||||
) -> Result<()> {
|
||||
let (parent_id, order) = unzip_option(parent);
|
||||
|
||||
let pane_id = self.insert_bound(
|
||||
"INSERT INTO panes(workspace_id, parent_group_id, position) VALUES (?, ?, ?)",
|
||||
)?((workspace_id, parent_id, order))?;
|
||||
|
||||
let pane_id = self.select_row_bound::<_, i64>(indoc!{"
|
||||
INSERT INTO panes(workspace_id, parent_group_id, position)
|
||||
VALUES (?, ?, ?)
|
||||
RETURNING pane_id"},
|
||||
)?((workspace_id, parent_id, order))?
|
||||
.ok_or_else(|| anyhow!("Could not retrieve inserted pane_id"))?;
|
||||
|
||||
self.save_items(workspace_id, pane_id, &pane.children)
|
||||
.context("Saving items")
|
||||
@ -300,15 +300,9 @@ impl WorkspaceDb {
|
||||
|
||||
pub(crate) fn get_items(&self, pane_id: PaneId) -> Result<Vec<SerializedItem>> {
|
||||
Ok(self.select_bound(indoc! {"
|
||||
SELECT item_id, kind FROM items
|
||||
SELECT kind, item_id FROM items
|
||||
WHERE pane_id = ?
|
||||
ORDER BY position"})?(pane_id)?
|
||||
.into_iter()
|
||||
.map(|(item_id, kind)| match kind {
|
||||
SerializedItemKind::Terminal => SerializedItem::Terminal { item_id },
|
||||
_ => unimplemented!(),
|
||||
})
|
||||
.collect())
|
||||
ORDER BY position"})?(pane_id)?)
|
||||
}
|
||||
|
||||
pub(crate) fn save_items(
|
||||
@ -317,15 +311,11 @@ impl WorkspaceDb {
|
||||
pane_id: PaneId,
|
||||
items: &[SerializedItem],
|
||||
) -> Result<()> {
|
||||
let mut delete_old = self
|
||||
.exec_bound("DELETE FROM items WHERE workspace_id = ? AND pane_id = ? AND item_id = ?")
|
||||
.context("Preparing deletion")?;
|
||||
let mut insert_new = self.exec_bound(
|
||||
"INSERT INTO items(item_id, workspace_id, pane_id, kind, position) VALUES (?, ?, ?, ?, ?)",
|
||||
let mut insert = self.exec_bound(
|
||||
"INSERT INTO items(workspace_id, pane_id, position, kind, item_id) VALUES (?, ?, ?, ?, ?)",
|
||||
).context("Preparing insertion")?;
|
||||
for (position, item) in items.iter().enumerate() {
|
||||
delete_old((workspace_id, pane_id, item.item_id()))?;
|
||||
insert_new((item.item_id(), workspace_id, pane_id, item.kind(), position))?;
|
||||
insert((workspace_id, pane_id, position, item))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -339,34 +329,102 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_full_workspace_serialization() {
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let db = WorkspaceDb(open_memory_db("test_full_workspace_serialization"));
|
||||
|
||||
let dock_pane = crate::persistence::model::SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::new("Terminal", 1),
|
||||
SerializedItem::new("Terminal", 2),
|
||||
SerializedItem::new("Terminal", 3),
|
||||
SerializedItem::new("Terminal", 4),
|
||||
|
||||
],
|
||||
};
|
||||
|
||||
// -----------------
|
||||
// | 1,2 | 5,6 |
|
||||
// | - - - | |
|
||||
// | 3,4 | |
|
||||
// -----------------
|
||||
let center_group = SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Horizontal,
|
||||
children: vec![
|
||||
SerializedPaneGroup::Group {
|
||||
axis: gpui::Axis::Vertical,
|
||||
children: vec![
|
||||
SerializedPaneGroup::Pane(SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::new("Terminal", 5),
|
||||
SerializedItem::new("Terminal", 6),
|
||||
],
|
||||
}),
|
||||
SerializedPaneGroup::Pane(SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::new("Terminal", 7),
|
||||
SerializedItem::new("Terminal", 8),
|
||||
|
||||
],
|
||||
}),
|
||||
],
|
||||
},
|
||||
SerializedPaneGroup::Pane(SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::new("Terminal", 9),
|
||||
SerializedItem::new("Terminal", 10),
|
||||
|
||||
],
|
||||
}),
|
||||
],
|
||||
};
|
||||
|
||||
let workspace = SerializedWorkspace {
|
||||
workspace_id: (["/tmp", "/tmp2"]).into(),
|
||||
dock_position: DockPosition::Shown(DockAnchor::Bottom),
|
||||
center_group,
|
||||
dock_pane,
|
||||
};
|
||||
|
||||
db.save_workspace(None, &workspace);
|
||||
let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
|
||||
|
||||
assert_eq!(workspace, round_trip_workspace.unwrap());
|
||||
|
||||
// Test guaranteed duplicate IDs
|
||||
db.save_workspace(None, &workspace);
|
||||
db.save_workspace(None, &workspace);
|
||||
|
||||
let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
|
||||
assert_eq!(workspace, round_trip_workspace.unwrap());
|
||||
|
||||
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_workspace_assignment() {
|
||||
// env_logger::try_init().ok();
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let db = WorkspaceDb(open_memory_db("test_basic_functionality"));
|
||||
|
||||
let workspace_1 = SerializedWorkspace {
|
||||
workspace_id: (["/tmp", "/tmp2"]).into(),
|
||||
dock_position: crate::dock::DockPosition::Shown(DockAnchor::Bottom),
|
||||
center_group: Default::default(),
|
||||
dock_pane: Default::default(),
|
||||
};
|
||||
|
||||
let workspace_2 = SerializedWorkspace {
|
||||
let mut workspace_2 = SerializedWorkspace {
|
||||
workspace_id: (["/tmp"]).into(),
|
||||
dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Expanded),
|
||||
center_group: Default::default(),
|
||||
dock_pane: Default::default(),
|
||||
};
|
||||
|
||||
let workspace_3 = SerializedWorkspace {
|
||||
dock_position: crate::dock::DockPosition::Shown(DockAnchor::Right),
|
||||
center_group: Default::default(),
|
||||
dock_pane: Default::default(),
|
||||
};
|
||||
|
||||
db.save_workspace(&["/tmp", "/tmp2"], None, &workspace_1);
|
||||
db.save_workspace(&["/tmp"], None, &workspace_2);
|
||||
|
||||
db::write_db_to(&db, "test.db").unwrap();
|
||||
db.save_workspace(None, &workspace_1);
|
||||
db.save_workspace(None, &workspace_2);
|
||||
|
||||
// Test that paths are treated as a set
|
||||
assert_eq!(
|
||||
@ -383,23 +441,32 @@ mod tests {
|
||||
assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
|
||||
|
||||
// Test 'mutate' case of updating a pre-existing id
|
||||
db.save_workspace(&["/tmp", "/tmp2"], Some(&["/tmp", "/tmp2"]), &workspace_2);
|
||||
workspace_2.workspace_id = (["/tmp", "/tmp2"]).into();
|
||||
db.save_workspace(Some((&["/tmp"]).into()), &workspace_2);
|
||||
assert_eq!(
|
||||
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
|
||||
workspace_2
|
||||
);
|
||||
|
||||
// Test other mechanism for mutating
|
||||
db.save_workspace(&["/tmp", "/tmp2"], None, &workspace_3);
|
||||
let mut workspace_3 = SerializedWorkspace {
|
||||
workspace_id: (&["/tmp", "/tmp2"]).into(),
|
||||
dock_position: DockPosition::Shown(DockAnchor::Right),
|
||||
center_group: Default::default(),
|
||||
dock_pane: Default::default(),
|
||||
};
|
||||
|
||||
|
||||
db.save_workspace(None, &workspace_3);
|
||||
assert_eq!(
|
||||
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
|
||||
workspace_3
|
||||
);
|
||||
|
||||
// Make sure that updating paths differently also works
|
||||
workspace_3.workspace_id = (["/tmp3", "/tmp4", "/tmp2"]).into();
|
||||
db.save_workspace(
|
||||
&["/tmp3", "/tmp4", "/tmp2"],
|
||||
Some(&["/tmp", "/tmp2"]),
|
||||
Some((&["/tmp", "/tmp2"]).into()),
|
||||
&workspace_3,
|
||||
);
|
||||
assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
|
||||
@ -408,16 +475,21 @@ mod tests {
|
||||
.unwrap(),
|
||||
workspace_3
|
||||
);
|
||||
|
||||
|
||||
}
|
||||
|
||||
use crate::dock::DockPosition;
|
||||
use crate::persistence::model::SerializedWorkspace;
|
||||
use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
|
||||
|
||||
fn default_workspace(
|
||||
fn default_workspace<P: AsRef<Path>>(
|
||||
workspace_id: &[P],
|
||||
dock_pane: SerializedPane,
|
||||
center_group: &SerializedPaneGroup,
|
||||
) -> SerializedWorkspace {
|
||||
SerializedWorkspace {
|
||||
workspace_id: workspace_id.into(),
|
||||
dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Right),
|
||||
center_group: center_group.clone(),
|
||||
dock_pane,
|
||||
@ -426,23 +498,23 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_basic_dock_pane() {
|
||||
// env_logger::try_init().ok();
|
||||
env_logger::try_init().ok();
|
||||
|
||||
let db = WorkspaceDb(open_memory_db("basic_dock_pane"));
|
||||
|
||||
let dock_pane = crate::persistence::model::SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::Terminal { item_id: 1 },
|
||||
SerializedItem::Terminal { item_id: 4 },
|
||||
SerializedItem::Terminal { item_id: 2 },
|
||||
SerializedItem::Terminal { item_id: 3 },
|
||||
SerializedItem::new("Terminal", 1),
|
||||
SerializedItem::new("Terminal", 4),
|
||||
SerializedItem::new("Terminal", 2),
|
||||
SerializedItem::new("Terminal", 3),
|
||||
],
|
||||
};
|
||||
|
||||
let workspace = default_workspace(dock_pane, &Default::default());
|
||||
|
||||
db.save_workspace(&["/tmp"], None, &workspace);
|
||||
let workspace = default_workspace(&["/tmp"], dock_pane, &Default::default());
|
||||
|
||||
db.save_workspace(None, &workspace);
|
||||
|
||||
let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
|
||||
|
||||
assert_eq!(workspace.dock_pane, new_workspace.dock_pane);
|
||||
@ -467,30 +539,30 @@ mod tests {
|
||||
children: vec![
|
||||
SerializedPaneGroup::Pane(SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::Terminal { item_id: 1 },
|
||||
SerializedItem::Terminal { item_id: 2 },
|
||||
SerializedItem::new("Terminal", 1),
|
||||
SerializedItem::new("Terminal", 2),
|
||||
],
|
||||
}),
|
||||
SerializedPaneGroup::Pane(SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::Terminal { item_id: 4 },
|
||||
SerializedItem::Terminal { item_id: 3 },
|
||||
SerializedItem::new("Terminal", 4),
|
||||
SerializedItem::new("Terminal", 3),
|
||||
],
|
||||
}),
|
||||
],
|
||||
},
|
||||
SerializedPaneGroup::Pane(SerializedPane {
|
||||
children: vec![
|
||||
SerializedItem::Terminal { item_id: 5 },
|
||||
SerializedItem::Terminal { item_id: 6 },
|
||||
SerializedItem::new("Terminal", 5),
|
||||
SerializedItem::new("Terminal", 6),
|
||||
],
|
||||
}),
|
||||
],
|
||||
};
|
||||
|
||||
let workspace = default_workspace(Default::default(), ¢er_pane);
|
||||
let workspace = default_workspace(&["/tmp"], Default::default(), ¢er_pane);
|
||||
|
||||
db.save_workspace(&["/tmp"], None, &workspace);
|
||||
db.save_workspace(None, &workspace);
|
||||
|
||||
assert_eq!(workspace.center_group, center_pane);
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use anyhow::Result;
|
||||
|
||||
use gpui::Axis;
|
||||
|
||||
@ -16,10 +16,10 @@ use sqlez::{
|
||||
use crate::dock::DockPosition;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct WorkspaceId(Arc<Vec<PathBuf>>);
|
||||
pub struct WorkspaceId(Arc<Vec<PathBuf>>);
|
||||
|
||||
impl WorkspaceId {
|
||||
pub fn paths(self) -> Arc<Vec<PathBuf>> {
|
||||
pub fn paths(&self) -> Arc<Vec<PathBuf>> {
|
||||
self.0.clone()
|
||||
}
|
||||
}
|
||||
@ -52,6 +52,7 @@ impl Column for WorkspaceId {
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct SerializedWorkspace {
|
||||
pub workspace_id: WorkspaceId,
|
||||
pub dock_position: DockPosition,
|
||||
pub center_group: SerializedPaneGroup,
|
||||
pub dock_pane: SerializedPane,
|
||||
@ -90,67 +91,33 @@ pub type GroupId = i64;
|
||||
pub type PaneId = i64;
|
||||
pub type ItemId = usize;
|
||||
|
||||
pub(crate) enum SerializedItemKind {
|
||||
Editor,
|
||||
Diagnostics,
|
||||
ProjectSearch,
|
||||
Terminal,
|
||||
}
|
||||
|
||||
impl Bind for SerializedItemKind {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> anyhow::Result<i32> {
|
||||
match self {
|
||||
SerializedItemKind::Editor => "Editor",
|
||||
SerializedItemKind::Diagnostics => "Diagnostics",
|
||||
SerializedItemKind::ProjectSearch => "ProjectSearch",
|
||||
SerializedItemKind::Terminal => "Terminal",
|
||||
}
|
||||
.bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for SerializedItemKind {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> anyhow::Result<(Self, i32)> {
|
||||
String::column(statement, start_index).and_then(|(kind_text, next_index)| {
|
||||
Ok((
|
||||
match kind_text.as_ref() {
|
||||
"Editor" => SerializedItemKind::Editor,
|
||||
"Diagnostics" => SerializedItemKind::Diagnostics,
|
||||
"ProjectSearch" => SerializedItemKind::ProjectSearch,
|
||||
"Terminal" => SerializedItemKind::Terminal,
|
||||
_ => bail!("Stored serialized item kind is incorrect"),
|
||||
},
|
||||
next_index,
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum SerializedItem {
|
||||
Editor { item_id: usize, path: Arc<Path> },
|
||||
Diagnostics { item_id: usize },
|
||||
ProjectSearch { item_id: usize, query: String },
|
||||
Terminal { item_id: usize },
|
||||
pub struct SerializedItem {
|
||||
pub kind: Arc<str>,
|
||||
pub item_id: ItemId,
|
||||
}
|
||||
|
||||
impl SerializedItem {
|
||||
pub fn item_id(&self) -> usize {
|
||||
match self {
|
||||
SerializedItem::Editor { item_id, .. } => *item_id,
|
||||
SerializedItem::Diagnostics { item_id } => *item_id,
|
||||
SerializedItem::ProjectSearch { item_id, .. } => *item_id,
|
||||
SerializedItem::Terminal { item_id } => *item_id,
|
||||
pub fn new(kind: impl AsRef<str>, item_id: ItemId) -> Self {
|
||||
Self {
|
||||
kind: Arc::from(kind.as_ref()),
|
||||
item_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn kind(&self) -> SerializedItemKind {
|
||||
match self {
|
||||
SerializedItem::Editor { .. } => SerializedItemKind::Editor,
|
||||
SerializedItem::Diagnostics { .. } => SerializedItemKind::Diagnostics,
|
||||
SerializedItem::ProjectSearch { .. } => SerializedItemKind::ProjectSearch,
|
||||
SerializedItem::Terminal { .. } => SerializedItemKind::Terminal,
|
||||
}
|
||||
impl Bind for &SerializedItem {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||
let next_index = statement.bind(self.kind.clone(), start_index)?;
|
||||
statement.bind(self.item_id, next_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for SerializedItem {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||
let (kind, next_index) = Arc::<str>::column(statement, start_index)?;
|
||||
let (item_id, next_index) = ItemId::column(statement, next_index)?;
|
||||
Ok((SerializedItem { kind, item_id }, next_index))
|
||||
}
|
||||
}
|
||||
|
||||
@ -187,8 +154,8 @@ mod tests {
|
||||
|
||||
db.exec(indoc::indoc! {"
|
||||
CREATE TABLE workspace_id_test(
|
||||
workspace_id BLOB,
|
||||
dock_anchor TEXT
|
||||
workspace_id BLOB,
|
||||
dock_anchor TEXT
|
||||
);"})
|
||||
.unwrap()()
|
||||
.unwrap();
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
item::ItemEvent,
|
||||
persistence::model::{ItemId, WorkspaceId},
|
||||
Item, ItemNavHistory,
|
||||
Item, ItemNavHistory, Pane, Workspace,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use call::participant::{Frame, RemoteVideoTrack};
|
||||
@ -10,8 +10,10 @@ use futures::StreamExt;
|
||||
use gpui::{
|
||||
elements::*,
|
||||
geometry::{rect::RectF, vector::vec2f},
|
||||
Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext,
|
||||
Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||
WeakViewHandle,
|
||||
};
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
@ -191,10 +193,12 @@ impl Item for SharedScreen {
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
workspace_id: WorkspaceId,
|
||||
item_id: ItemId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Result<Self> {
|
||||
Err(anyhow!("SharedScreen can not be deserialized"))
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: WorkspaceId,
|
||||
_item_id: ItemId,
|
||||
_cx: &mut ViewContext<Pane>,
|
||||
) -> Task<Result<ViewHandle<Self>>> {
|
||||
unreachable!("Shared screen can not be deserialized")
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,8 @@ use language::LanguageRegistry;
|
||||
use log::{error, warn};
|
||||
pub use pane::*;
|
||||
pub use pane_group::*;
|
||||
use persistence::model::{ItemId, WorkspaceId};
|
||||
use persistence::model::SerializedItem;
|
||||
pub use persistence::model::{ItemId, WorkspaceId};
|
||||
use postage::prelude::Stream;
|
||||
use project::{Project, ProjectEntryId, ProjectPath, ProjectStore, Worktree, WorktreeId};
|
||||
use serde::Deserialize;
|
||||
@ -57,7 +58,7 @@ use theme::{Theme, ThemeRegistry};
|
||||
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::persistence::model::SerializedWorkspace;
|
||||
use crate::persistence::model::{SerializedPane, SerializedWorkspace};
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct RemoveWorktreeFromProject(pub WorktreeId);
|
||||
@ -337,22 +338,27 @@ pub fn register_followable_item<I: FollowableItem>(cx: &mut MutableAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
type SerializableItemBuilders = HashMap<
|
||||
&'static str,
|
||||
fn(WorkspaceId, ItemId, &mut ViewContext<Pane>) -> Option<Box<dyn ItemHandle>>,
|
||||
type ItemDeserializers = HashMap<
|
||||
Arc<str>,
|
||||
fn(
|
||||
ModelHandle<Project>,
|
||||
WeakViewHandle<Workspace>,
|
||||
WorkspaceId,
|
||||
ItemId,
|
||||
&mut ViewContext<Pane>,
|
||||
) -> Task<Result<Box<dyn ItemHandle>>>,
|
||||
>;
|
||||
pub fn register_deserializable_item<I: Item>(cx: &mut MutableAppContext) {
|
||||
cx.update_default_global(|deserializers: &mut SerializableItemBuilders, _| {
|
||||
cx.update_default_global(|deserializers: &mut ItemDeserializers, _cx| {
|
||||
if let Some(serialized_item_kind) = I::serialized_item_kind() {
|
||||
deserializers.insert(serialized_item_kind, |workspace_id, item_id, cx| {
|
||||
if let Some(v) =
|
||||
cx.add_option_view(|cx| I::deserialize(workspace_id, item_id, cx).log_err())
|
||||
{
|
||||
Some(Box::new(v))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
deserializers.insert(
|
||||
Arc::from(serialized_item_kind),
|
||||
|project, workspace, workspace_id, item_id, cx| {
|
||||
let task = I::deserialize(project, workspace, workspace_id, item_id, cx);
|
||||
cx.foreground()
|
||||
.spawn(async { Ok(Box::new(task.await?) as Box<_>) })
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -549,6 +555,8 @@ impl Workspace {
|
||||
}
|
||||
project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded => {
|
||||
this.update_window_title(cx);
|
||||
// TODO: Cache workspace_id on workspace and read from it here
|
||||
this.serialize_workspace(None, cx);
|
||||
}
|
||||
project::Event::DisconnectedFromHost => {
|
||||
this.update_window_edited(cx);
|
||||
@ -568,21 +576,9 @@ impl Workspace {
|
||||
.detach();
|
||||
cx.focus(¢er_pane);
|
||||
cx.emit(Event::PaneAdded(center_pane.clone()));
|
||||
let dock = Dock::new(
|
||||
dock_default_factory,
|
||||
serialized_workspace
|
||||
.as_ref()
|
||||
.map(|ws| ws.dock_position)
|
||||
.clone(),
|
||||
cx,
|
||||
);
|
||||
let dock = Dock::new(dock_default_factory, cx);
|
||||
let dock_pane = dock.pane().clone();
|
||||
|
||||
if let Some(serialized_workspace) = serialized_workspace {
|
||||
|
||||
// Fill them in?
|
||||
}
|
||||
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let user_store = project.read(cx).user_store();
|
||||
let client = project.read(cx).client();
|
||||
@ -636,13 +632,13 @@ impl Workspace {
|
||||
|
||||
let mut this = Workspace {
|
||||
modal: None,
|
||||
weak_self: weak_handle,
|
||||
weak_self: weak_handle.clone(),
|
||||
center: PaneGroup::new(center_pane.clone()),
|
||||
dock,
|
||||
// When removing an item, the last element remaining in this array
|
||||
// is used to find where focus should fallback to. As such, the order
|
||||
// of these two variables is important.
|
||||
panes: vec![dock_pane, center_pane.clone()],
|
||||
panes: vec![dock_pane.clone(), center_pane.clone()],
|
||||
panes_by_item: Default::default(),
|
||||
active_pane: center_pane.clone(),
|
||||
last_active_center_pane: Some(center_pane.downgrade()),
|
||||
@ -655,7 +651,7 @@ impl Workspace {
|
||||
fs,
|
||||
left_sidebar,
|
||||
right_sidebar,
|
||||
project,
|
||||
project: project.clone(),
|
||||
leader_state: Default::default(),
|
||||
follower_states_by_leader: Default::default(),
|
||||
last_leaders_by_pane: Default::default(),
|
||||
@ -663,9 +659,15 @@ impl Workspace {
|
||||
active_call,
|
||||
_observe_current_user,
|
||||
};
|
||||
this.project_remote_id_changed(this.project.read(cx).remote_id(), cx);
|
||||
this.project_remote_id_changed(project.read(cx).remote_id(), cx);
|
||||
cx.defer(|this, cx| this.update_window_title(cx));
|
||||
|
||||
if let Some(serialized_workspace) = serialized_workspace {
|
||||
cx.defer(move |_, cx| {
|
||||
Self::load_from_serialized_workspace(weak_handle, serialized_workspace, cx)
|
||||
});
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
@ -1315,6 +1317,7 @@ impl Workspace {
|
||||
pub fn add_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) {
|
||||
let active_pane = self.active_pane().clone();
|
||||
Pane::add_item(self, &active_pane, item, true, true, None, cx);
|
||||
self.serialize_workspace(None, cx);
|
||||
}
|
||||
|
||||
pub fn open_path(
|
||||
@ -1519,6 +1522,7 @@ impl Workspace {
|
||||
entry.remove();
|
||||
}
|
||||
}
|
||||
self.serialize_workspace(None, cx);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -2250,6 +2254,140 @@ impl Workspace {
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn workspace_id(&self, cx: &AppContext) -> WorkspaceId {
|
||||
self.project()
|
||||
.read(cx)
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).abs_path())
|
||||
.collect::<Vec<_>>()
|
||||
.into()
|
||||
}
|
||||
|
||||
fn serialize_workspace(&self, old_id: Option<WorkspaceId>, cx: &mut MutableAppContext) {
|
||||
let dock_pane = SerializedPane {
|
||||
children: self
|
||||
.dock
|
||||
.pane()
|
||||
.read(cx)
|
||||
.items()
|
||||
.filter_map(|item_handle| {
|
||||
Some(SerializedItem {
|
||||
kind: Arc::from(item_handle.serialized_item_kind()?),
|
||||
item_id: item_handle.id(),
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
};
|
||||
|
||||
let serialized_workspace = SerializedWorkspace {
|
||||
workspace_id: self.workspace_id(cx),
|
||||
dock_position: self.dock.position(),
|
||||
dock_pane,
|
||||
center_group: Default::default(),
|
||||
};
|
||||
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
persistence::DB.save_workspace(old_id, &serialized_workspace);
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn load_from_serialized_workspace(
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
serialized_workspace: SerializedWorkspace,
|
||||
cx: &mut MutableAppContext,
|
||||
) {
|
||||
// fn process_splits(
|
||||
// pane_group: SerializedPaneGroup,
|
||||
// parent: Option<PaneGroup>,
|
||||
// workspace: ViewHandle<Workspace>,
|
||||
// cx: &mut AsyncAppContext,
|
||||
// ) {
|
||||
// match pane_group {
|
||||
// SerializedPaneGroup::Group { axis, children } => {
|
||||
// process_splits(pane_group, parent)
|
||||
// }
|
||||
// SerializedPaneGroup::Pane(pane) => {
|
||||
// process_pane(pane)
|
||||
// },
|
||||
// }
|
||||
// }
|
||||
|
||||
async fn deserialize_pane(
|
||||
project: ModelHandle<Project>,
|
||||
pane: SerializedPane,
|
||||
pane_handle: ViewHandle<Pane>,
|
||||
workspace_id: WorkspaceId,
|
||||
workspace: &ViewHandle<Workspace>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) {
|
||||
for item in pane.children {
|
||||
let project = project.clone();
|
||||
let workspace_id = workspace_id.clone();
|
||||
let item_handle = pane_handle
|
||||
.update(cx, |_, cx| {
|
||||
if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind)
|
||||
{
|
||||
deserializer(
|
||||
project,
|
||||
workspace.downgrade(),
|
||||
workspace_id,
|
||||
item.item_id,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
Task::ready(Err(anyhow!(
|
||||
"Deserializer does not exist for item kind: {}",
|
||||
item.kind
|
||||
)))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
if let Some(item_handle) = item_handle {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
Pane::add_item(
|
||||
workspace,
|
||||
&pane_handle,
|
||||
item_handle,
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
if let Some(workspace) = workspace.upgrade(&cx) {
|
||||
let (project, dock_pane_handle) = workspace.read_with(&cx, |workspace, _| {
|
||||
(workspace.project().clone(), workspace.dock_pane().clone())
|
||||
});
|
||||
deserialize_pane(
|
||||
project,
|
||||
serialized_workspace.dock_pane,
|
||||
dock_pane_handle,
|
||||
serialized_workspace.workspace_id,
|
||||
&workspace,
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
// Traverse the splits tree and add to things
|
||||
// process_splits(serialized_workspace.center_group, None, workspace, &mut cx);
|
||||
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
Dock::set_dock_position(workspace, serialized_workspace.dock_position, cx)
|
||||
});
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
impl Entity for Workspace {
|
||||
|
Binary file not shown.
Loading…
Reference in New Issue
Block a user