mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-07 20:39:04 +03:00
Dock persistence working!
Co-Authored-By: Mikayla Maki <mikayla@zed.dev>
This commit is contained in:
parent
c1f7902309
commit
d20d21c6a2
3
Cargo.lock
generated
3
Cargo.lock
generated
@ -1738,6 +1738,7 @@ dependencies = [
|
|||||||
"collections",
|
"collections",
|
||||||
"context_menu",
|
"context_menu",
|
||||||
"ctor",
|
"ctor",
|
||||||
|
"db",
|
||||||
"drag_and_drop",
|
"drag_and_drop",
|
||||||
"env_logger",
|
"env_logger",
|
||||||
"futures 0.3.25",
|
"futures 0.3.25",
|
||||||
@ -1761,6 +1762,7 @@ dependencies = [
|
|||||||
"smallvec",
|
"smallvec",
|
||||||
"smol",
|
"smol",
|
||||||
"snippet",
|
"snippet",
|
||||||
|
"sqlez",
|
||||||
"sum_tree",
|
"sum_tree",
|
||||||
"text",
|
"text",
|
||||||
"theme",
|
"theme",
|
||||||
@ -7629,6 +7631,7 @@ dependencies = [
|
|||||||
"context_menu",
|
"context_menu",
|
||||||
"db",
|
"db",
|
||||||
"drag_and_drop",
|
"drag_and_drop",
|
||||||
|
"env_logger",
|
||||||
"fs",
|
"fs",
|
||||||
"futures 0.3.25",
|
"futures 0.3.25",
|
||||||
"gpui",
|
"gpui",
|
||||||
|
@ -52,7 +52,7 @@ use std::{
|
|||||||
use theme::ThemeRegistry;
|
use theme::ThemeRegistry;
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
use util::post_inc;
|
use util::post_inc;
|
||||||
use workspace::{shared_screen::SharedScreen, item::Item, SplitDirection, ToggleFollow, Workspace};
|
use workspace::{item::Item, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace};
|
||||||
|
|
||||||
#[ctor::ctor]
|
#[ctor::ctor]
|
||||||
fn init_logger() {
|
fn init_logger() {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
pub mod kvp;
|
pub mod kvp;
|
||||||
|
|
||||||
use std::fs;
|
use std::fs::create_dir_all;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
@ -8,24 +8,29 @@ use anyhow::Result;
|
|||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
use sqlez::connection::Connection;
|
use sqlez::connection::Connection;
|
||||||
use sqlez::domain::Domain;
|
use sqlez::domain::{Domain, Migrator};
|
||||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
||||||
|
use util::channel::RELEASE_CHANNEL_NAME;
|
||||||
|
use util::paths::DB_DIR;
|
||||||
|
|
||||||
const INITIALIZE_QUERY: &'static str = indoc! {"
|
const INITIALIZE_QUERY: &'static str = indoc! {"
|
||||||
PRAGMA journal_mode=WAL;
|
PRAGMA journal_mode=WAL;
|
||||||
PRAGMA synchronous=NORMAL;
|
PRAGMA synchronous=NORMAL;
|
||||||
|
PRAGMA busy_timeout=1;
|
||||||
PRAGMA foreign_keys=TRUE;
|
PRAGMA foreign_keys=TRUE;
|
||||||
PRAGMA case_sensitive_like=TRUE;
|
PRAGMA case_sensitive_like=TRUE;
|
||||||
"};
|
"};
|
||||||
|
|
||||||
/// Open or create a database at the given directory path.
|
/// Open or create a database at the given directory path.
|
||||||
pub fn open_file_db<D: Domain>() -> ThreadSafeConnection<D> {
|
pub fn open_file_db<M: Migrator>() -> ThreadSafeConnection<M> {
|
||||||
// Use 0 for now. Will implement incrementing and clearing of old db files soon TM
|
// Use 0 for now. Will implement incrementing and clearing of old db files soon TM
|
||||||
let current_db_dir = (*util::paths::DB_DIR).join(Path::new(&format!(
|
let current_db_dir = (*DB_DIR).join(Path::new(&format!("0-{}", *RELEASE_CHANNEL_NAME)));
|
||||||
"0-{}",
|
|
||||||
*util::channel::RELEASE_CHANNEL_NAME
|
// if *RELEASE_CHANNEL == ReleaseChannel::Dev {
|
||||||
)));
|
// remove_dir_all(¤t_db_dir).ok();
|
||||||
fs::create_dir_all(¤t_db_dir).expect("Should be able to create the database directory");
|
// }
|
||||||
|
|
||||||
|
create_dir_all(¤t_db_dir).expect("Should be able to create the database directory");
|
||||||
let db_path = current_db_dir.join(Path::new("db.sqlite"));
|
let db_path = current_db_dir.join(Path::new("db.sqlite"));
|
||||||
|
|
||||||
ThreadSafeConnection::new(db_path.to_string_lossy().as_ref(), true)
|
ThreadSafeConnection::new(db_path.to_string_lossy().as_ref(), true)
|
||||||
@ -44,3 +49,23 @@ pub fn write_db_to<D: Domain, P: AsRef<Path>>(
|
|||||||
let destination = Connection::open_file(dest.as_ref().to_string_lossy().as_ref());
|
let destination = Connection::open_file(dest.as_ref().to_string_lossy().as_ref());
|
||||||
conn.backup_main(&destination)
|
conn.backup_main(&destination)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Implements a basic DB wrapper for a given domain
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! connection {
|
||||||
|
($id:ident: $t:ident<$d:ty>) => {
|
||||||
|
pub struct $t(::sqlez::thread_safe_connection::ThreadSafeConnection<$d>);
|
||||||
|
|
||||||
|
impl ::std::ops::Deref for $t {
|
||||||
|
type Target = ::sqlez::thread_safe_connection::ThreadSafeConnection<$d>;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref $id: $t = $t(::db::open_file_db());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
@ -1,22 +1,9 @@
|
|||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
|
|
||||||
use sqlez::{
|
use sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection};
|
||||||
connection::Connection, domain::Domain, migrations::Migration,
|
|
||||||
thread_safe_connection::ThreadSafeConnection,
|
|
||||||
};
|
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
pub(crate) const KVP_MIGRATION: Migration = Migration::new(
|
|
||||||
"kvp",
|
|
||||||
&[indoc! {"
|
|
||||||
CREATE TABLE kv_store(
|
|
||||||
key TEXT PRIMARY KEY,
|
|
||||||
value TEXT NOT NULL
|
|
||||||
) STRICT;
|
|
||||||
"}],
|
|
||||||
);
|
|
||||||
|
|
||||||
lazy_static::lazy_static! {
|
lazy_static::lazy_static! {
|
||||||
pub static ref KEY_VALUE_STORE: KeyValueStore =
|
pub static ref KEY_VALUE_STORE: KeyValueStore =
|
||||||
KeyValueStore(crate::open_file_db());
|
KeyValueStore(crate::open_file_db());
|
||||||
@ -26,8 +13,17 @@ lazy_static::lazy_static! {
|
|||||||
pub struct KeyValueStore(ThreadSafeConnection<KeyValueStore>);
|
pub struct KeyValueStore(ThreadSafeConnection<KeyValueStore>);
|
||||||
|
|
||||||
impl Domain for KeyValueStore {
|
impl Domain for KeyValueStore {
|
||||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
fn name() -> &'static str {
|
||||||
KVP_MIGRATION.run(conn)
|
"kvp"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrations() -> &'static [&'static str] {
|
||||||
|
&[indoc! {"
|
||||||
|
CREATE TABLE kv_store(
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL
|
||||||
|
) STRICT;
|
||||||
|
"}]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ use std::{
|
|||||||
use util::TryFutureExt;
|
use util::TryFutureExt;
|
||||||
use workspace::{
|
use workspace::{
|
||||||
item::{Item, ItemEvent, ItemHandle},
|
item::{Item, ItemEvent, ItemHandle},
|
||||||
ItemNavHistory, Workspace,
|
ItemNavHistory, Pane, Workspace,
|
||||||
};
|
};
|
||||||
|
|
||||||
actions!(diagnostics, [Deploy]);
|
actions!(diagnostics, [Deploy]);
|
||||||
@ -613,6 +613,20 @@ impl Item for ProjectDiagnosticsEditor {
|
|||||||
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||||
self.editor.update(cx, |editor, cx| editor.deactivated(cx));
|
self.editor.update(cx, |editor, cx| editor.deactivated(cx));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn serialized_item_kind() -> Option<&'static str> {
|
||||||
|
Some("diagnostics")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize(
|
||||||
|
project: ModelHandle<Project>,
|
||||||
|
workspace: WeakViewHandle<Workspace>,
|
||||||
|
_workspace_id: workspace::WorkspaceId,
|
||||||
|
_item_id: workspace::ItemId,
|
||||||
|
cx: &mut ViewContext<Pane>,
|
||||||
|
) -> Task<Result<ViewHandle<Self>>> {
|
||||||
|
Task::ready(Ok(cx.add_view(|cx| Self::new(project, workspace, cx))))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||||
|
@ -23,6 +23,7 @@ test-support = [
|
|||||||
drag_and_drop = { path = "../drag_and_drop" }
|
drag_and_drop = { path = "../drag_and_drop" }
|
||||||
text = { path = "../text" }
|
text = { path = "../text" }
|
||||||
clock = { path = "../clock" }
|
clock = { path = "../clock" }
|
||||||
|
db = { path = "../db" }
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
context_menu = { path = "../context_menu" }
|
context_menu = { path = "../context_menu" }
|
||||||
fuzzy = { path = "../fuzzy" }
|
fuzzy = { path = "../fuzzy" }
|
||||||
@ -37,6 +38,7 @@ snippet = { path = "../snippet" }
|
|||||||
sum_tree = { path = "../sum_tree" }
|
sum_tree = { path = "../sum_tree" }
|
||||||
theme = { path = "../theme" }
|
theme = { path = "../theme" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
sqlez = { path = "../sqlez" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
aho-corasick = "0.7"
|
aho-corasick = "0.7"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
|
@ -9,6 +9,7 @@ mod link_go_to_definition;
|
|||||||
mod mouse_context_menu;
|
mod mouse_context_menu;
|
||||||
pub mod movement;
|
pub mod movement;
|
||||||
mod multi_buffer;
|
mod multi_buffer;
|
||||||
|
mod persistence;
|
||||||
pub mod selections_collection;
|
pub mod selections_collection;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -7,7 +7,7 @@ use anyhow::{anyhow, Result};
|
|||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use gpui::{
|
use gpui::{
|
||||||
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
|
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||||
};
|
};
|
||||||
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
|
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
|
||||||
use project::{File, FormatTrigger, Project, ProjectEntryId, ProjectPath};
|
use project::{File, FormatTrigger, Project, ProjectEntryId, ProjectPath};
|
||||||
@ -26,7 +26,7 @@ use util::TryFutureExt;
|
|||||||
use workspace::{
|
use workspace::{
|
||||||
item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
|
item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
|
||||||
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
|
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
|
||||||
ItemNavHistory, StatusItemView, ToolbarItemLocation,
|
ItemId, ItemNavHistory, Pane, StatusItemView, ToolbarItemLocation, Workspace, WorkspaceId,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const MAX_TAB_TITLE_LEN: usize = 24;
|
pub const MAX_TAB_TITLE_LEN: usize = 24;
|
||||||
@ -552,6 +552,21 @@ impl Item for Editor {
|
|||||||
}));
|
}));
|
||||||
Some(breadcrumbs)
|
Some(breadcrumbs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn serialized_item_kind() -> Option<&'static str> {
|
||||||
|
Some("Editor")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize(
|
||||||
|
_project: ModelHandle<Project>,
|
||||||
|
_workspace: WeakViewHandle<Workspace>,
|
||||||
|
_workspace_id: WorkspaceId,
|
||||||
|
_item_id: ItemId,
|
||||||
|
_cx: &mut ViewContext<Pane>,
|
||||||
|
) -> Task<Result<ViewHandle<Self>>> {
|
||||||
|
// Look up the path with this key associated, create a self with that path
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProjectItem for Editor {
|
impl ProjectItem for Editor {
|
||||||
|
30
crates/editor/src/persistence.rs
Normal file
30
crates/editor/src/persistence.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use db::connection;
|
||||||
|
use indoc::indoc;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use project::WorktreeId;
|
||||||
|
use sqlez::domain::Domain;
|
||||||
|
use workspace::{ItemId, Workspace};
|
||||||
|
|
||||||
|
use crate::Editor;
|
||||||
|
|
||||||
|
connection!(DB: EditorDb<(Workspace, Editor)>);
|
||||||
|
|
||||||
|
impl Domain for Editor {
|
||||||
|
fn name() -> &'static str {
|
||||||
|
"editor"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrations() -> &'static [&'static str] {
|
||||||
|
&[indoc! {"
|
||||||
|
|
||||||
|
"}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EditorDb {
|
||||||
|
fn get_path(_item_id: ItemId, _workspace_id: WorktreeId) -> PathBuf {
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
}
|
@ -804,6 +804,7 @@ impl Project {
|
|||||||
&self.collaborators
|
&self.collaborators
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Collect all worktrees, including ones that don't appear in the project panel
|
||||||
pub fn worktrees<'a>(
|
pub fn worktrees<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
cx: &'a AppContext,
|
cx: &'a AppContext,
|
||||||
@ -813,6 +814,7 @@ impl Project {
|
|||||||
.filter_map(move |worktree| worktree.upgrade(cx))
|
.filter_map(move |worktree| worktree.upgrade(cx))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Collect all user-visible worktrees, the ones that appear in the project panel
|
||||||
pub fn visible_worktrees<'a>(
|
pub fn visible_worktrees<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
cx: &'a AppContext,
|
cx: &'a AppContext,
|
||||||
|
@ -353,6 +353,20 @@ impl Item for ProjectSearchView {
|
|||||||
fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
|
fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
|
||||||
self.results_editor.breadcrumbs(theme, cx)
|
self.results_editor.breadcrumbs(theme, cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn serialized_item_kind() -> Option<&'static str> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize(
|
||||||
|
_project: ModelHandle<Project>,
|
||||||
|
_workspace: WeakViewHandle<Workspace>,
|
||||||
|
_workspace_id: workspace::WorkspaceId,
|
||||||
|
_item_id: workspace::ItemId,
|
||||||
|
_cx: &mut ViewContext<Pane>,
|
||||||
|
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProjectSearchView {
|
impl ProjectSearchView {
|
||||||
|
@ -2,6 +2,7 @@ use std::{
|
|||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
os::unix::prelude::OsStrExt,
|
os::unix::prelude::OsStrExt,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
@ -118,6 +119,13 @@ impl Bind for &str {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Bind for Arc<str> {
|
||||||
|
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||||
|
statement.bind_text(start_index, self.as_ref())?;
|
||||||
|
Ok(start_index + 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Bind for String {
|
impl Bind for String {
|
||||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||||
statement.bind_text(start_index, self)?;
|
statement.bind_text(start_index, self)?;
|
||||||
@ -125,6 +133,13 @@ impl Bind for String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Column for Arc<str> {
|
||||||
|
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||||
|
let result = statement.column_text(start_index)?;
|
||||||
|
Ok((Arc::from(result), start_index + 1))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Column for String {
|
impl Column for String {
|
||||||
fn column<'a>(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
fn column<'a>(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||||
let result = statement.column_text(start_index)?;
|
let result = statement.column_text(start_index)?;
|
||||||
|
@ -54,10 +54,6 @@ impl Connection {
|
|||||||
self.persistent
|
self.persistent
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn last_insert_id(&self) -> i64 {
|
|
||||||
unsafe { sqlite3_last_insert_rowid(self.sqlite3) }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn backup_main(&self, destination: &Connection) -> Result<()> {
|
pub fn backup_main(&self, destination: &Connection) -> Result<()> {
|
||||||
unsafe {
|
unsafe {
|
||||||
let backup = sqlite3_backup_init(
|
let backup = sqlite3_backup_init(
|
||||||
@ -126,7 +122,7 @@ mod test {
|
|||||||
let text = "Some test text";
|
let text = "Some test text";
|
||||||
|
|
||||||
connection
|
connection
|
||||||
.insert_bound("INSERT INTO text (text) VALUES (?);")
|
.exec_bound("INSERT INTO text (text) VALUES (?);")
|
||||||
.unwrap()(text)
|
.unwrap()(text)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -155,7 +151,7 @@ mod test {
|
|||||||
let tuple2 = ("test2".to_string(), 32, vec![64, 32, 16, 8, 4, 2, 1, 0]);
|
let tuple2 = ("test2".to_string(), 32, vec![64, 32, 16, 8, 4, 2, 1, 0]);
|
||||||
|
|
||||||
let mut insert = connection
|
let mut insert = connection
|
||||||
.insert_bound::<(String, usize, Vec<u8>)>(
|
.exec_bound::<(String, usize, Vec<u8>)>(
|
||||||
"INSERT INTO test (text, integer, blob) VALUES (?, ?, ?)",
|
"INSERT INTO test (text, integer, blob) VALUES (?, ?, ?)",
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -185,7 +181,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
connection
|
connection
|
||||||
.insert_bound("INSERT INTO bools(t, f) VALUES (?, ?);")
|
.exec_bound("INSERT INTO bools(t, f) VALUES (?, ?)")
|
||||||
.unwrap()((true, false))
|
.unwrap()((true, false))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -210,7 +206,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
let blob = vec![0, 1, 2, 4, 8, 16, 32, 64];
|
let blob = vec![0, 1, 2, 4, 8, 16, 32, 64];
|
||||||
connection1
|
connection1
|
||||||
.insert_bound::<Vec<u8>>("INSERT INTO blobs (data) VALUES (?);")
|
.exec_bound::<Vec<u8>>("INSERT INTO blobs (data) VALUES (?);")
|
||||||
.unwrap()(blob.clone())
|
.unwrap()(blob.clone())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -1,39 +1,50 @@
|
|||||||
use crate::connection::Connection;
|
use crate::connection::Connection;
|
||||||
|
|
||||||
pub trait Domain {
|
pub trait Domain {
|
||||||
fn migrate(conn: &Connection) -> anyhow::Result<()>;
|
fn name() -> &'static str;
|
||||||
|
fn migrations() -> &'static [&'static str];
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D1: Domain, D2: Domain> Domain for (D1, D2) {
|
pub trait Migrator {
|
||||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
fn migrate(connection: &Connection) -> anyhow::Result<()>;
|
||||||
D1::migrate(conn)?;
|
}
|
||||||
D2::migrate(conn)
|
|
||||||
|
impl<D: Domain> Migrator for D {
|
||||||
|
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||||
|
connection.migrate(Self::name(), Self::migrations())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D1: Domain, D2: Domain, D3: Domain> Domain for (D1, D2, D3) {
|
impl<D1: Domain, D2: Domain> Migrator for (D1, D2) {
|
||||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||||
D1::migrate(conn)?;
|
D1::migrate(connection)?;
|
||||||
D2::migrate(conn)?;
|
D2::migrate(connection)
|
||||||
D3::migrate(conn)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain> Domain for (D1, D2, D3, D4) {
|
impl<D1: Domain, D2: Domain, D3: Domain> Migrator for (D1, D2, D3) {
|
||||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||||
D1::migrate(conn)?;
|
D1::migrate(connection)?;
|
||||||
D2::migrate(conn)?;
|
D2::migrate(connection)?;
|
||||||
D3::migrate(conn)?;
|
D3::migrate(connection)
|
||||||
D4::migrate(conn)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain, D5: Domain> Domain for (D1, D2, D3, D4, D5) {
|
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain> Migrator for (D1, D2, D3, D4) {
|
||||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||||
D1::migrate(conn)?;
|
D1::migrate(connection)?;
|
||||||
D2::migrate(conn)?;
|
D2::migrate(connection)?;
|
||||||
D3::migrate(conn)?;
|
D3::migrate(connection)?;
|
||||||
D4::migrate(conn)?;
|
D4::migrate(connection)
|
||||||
D5::migrate(conn)
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<D1: Domain, D2: Domain, D3: Domain, D4: Domain, D5: Domain> Migrator for (D1, D2, D3, D4, D5) {
|
||||||
|
fn migrate(connection: &Connection) -> anyhow::Result<()> {
|
||||||
|
D1::migrate(connection)?;
|
||||||
|
D2::migrate(connection)?;
|
||||||
|
D3::migrate(connection)?;
|
||||||
|
D4::migrate(connection)?;
|
||||||
|
D5::migrate(connection)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,53 +9,27 @@ use indoc::{formatdoc, indoc};
|
|||||||
|
|
||||||
use crate::connection::Connection;
|
use crate::connection::Connection;
|
||||||
|
|
||||||
const MIGRATIONS_MIGRATION: Migration = Migration::new(
|
impl Connection {
|
||||||
"migrations",
|
pub fn migrate(&self, domain: &'static str, migrations: &[&'static str]) -> Result<()> {
|
||||||
// The migrations migration must be infallable because it runs to completion
|
|
||||||
// with every call to migration run and is run unchecked.
|
|
||||||
&[indoc! {"
|
|
||||||
CREATE TABLE IF NOT EXISTS migrations (
|
|
||||||
domain TEXT,
|
|
||||||
step INTEGER,
|
|
||||||
migration TEXT
|
|
||||||
)
|
|
||||||
"}],
|
|
||||||
);
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Migration {
|
|
||||||
domain: &'static str,
|
|
||||||
migrations: &'static [&'static str],
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Migration {
|
|
||||||
pub const fn new(domain: &'static str, migrations: &'static [&'static str]) -> Self {
|
|
||||||
Self { domain, migrations }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn run_unchecked(&self, connection: &Connection) -> Result<()> {
|
|
||||||
for migration in self.migrations {
|
|
||||||
connection.exec(migration)?()?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run(&self, connection: &Connection) -> Result<()> {
|
|
||||||
// Setup the migrations table unconditionally
|
// Setup the migrations table unconditionally
|
||||||
MIGRATIONS_MIGRATION.run_unchecked(connection)?;
|
self.exec(indoc! {"
|
||||||
|
CREATE TABLE IF NOT EXISTS migrations (
|
||||||
|
domain TEXT,
|
||||||
|
step INTEGER,
|
||||||
|
migration TEXT
|
||||||
|
)"})?()?;
|
||||||
|
|
||||||
let completed_migrations =
|
let completed_migrations =
|
||||||
connection.select_bound::<&str, (String, usize, String)>(indoc! {"
|
self.select_bound::<&str, (String, usize, String)>(indoc! {"
|
||||||
SELECT domain, step, migration FROM migrations
|
SELECT domain, step, migration FROM migrations
|
||||||
WHERE domain = ?
|
WHERE domain = ?
|
||||||
ORDER BY step
|
ORDER BY step
|
||||||
"})?(self.domain)?;
|
"})?(domain)?;
|
||||||
|
|
||||||
let mut store_completed_migration = connection
|
let mut store_completed_migration =
|
||||||
.insert_bound("INSERT INTO migrations (domain, step, migration) VALUES (?, ?, ?)")?;
|
self.exec_bound("INSERT INTO migrations (domain, step, migration) VALUES (?, ?, ?)")?;
|
||||||
|
|
||||||
for (index, migration) in self.migrations.iter().enumerate() {
|
for (index, migration) in migrations.iter().enumerate() {
|
||||||
if let Some((_, _, completed_migration)) = completed_migrations.get(index) {
|
if let Some((_, _, completed_migration)) = completed_migrations.get(index) {
|
||||||
if completed_migration != migration {
|
if completed_migration != migration {
|
||||||
return Err(anyhow!(formatdoc! {"
|
return Err(anyhow!(formatdoc! {"
|
||||||
@ -65,15 +39,15 @@ impl Migration {
|
|||||||
{}
|
{}
|
||||||
|
|
||||||
Proposed migration:
|
Proposed migration:
|
||||||
{}", self.domain, index, completed_migration, migration}));
|
{}", domain, index, completed_migration, migration}));
|
||||||
} else {
|
} else {
|
||||||
// Migration already run. Continue
|
// Migration already run. Continue
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
connection.exec(migration)?()?;
|
self.exec(migration)?()?;
|
||||||
store_completed_migration((self.domain, index, *migration))?;
|
store_completed_migration((domain, index, *migration))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -84,22 +58,23 @@ impl Migration {
|
|||||||
mod test {
|
mod test {
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
|
|
||||||
use crate::{connection::Connection, migrations::Migration};
|
use crate::connection::Connection;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_migrations_are_added_to_table() {
|
fn test_migrations_are_added_to_table() {
|
||||||
let connection = Connection::open_memory("migrations_are_added_to_table");
|
let connection = Connection::open_memory("migrations_are_added_to_table");
|
||||||
|
|
||||||
// Create first migration with a single step and run it
|
// Create first migration with a single step and run it
|
||||||
let mut migration = Migration::new(
|
connection
|
||||||
"test",
|
.migrate(
|
||||||
&[indoc! {"
|
"test",
|
||||||
CREATE TABLE test1 (
|
&[indoc! {"
|
||||||
a TEXT,
|
CREATE TABLE test1 (
|
||||||
b TEXT
|
a TEXT,
|
||||||
)"}],
|
b TEXT
|
||||||
);
|
)"}],
|
||||||
migration.run(&connection).unwrap();
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// Verify it got added to the migrations table
|
// Verify it got added to the migrations table
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -107,23 +82,31 @@ mod test {
|
|||||||
.select::<String>("SELECT (migration) FROM migrations")
|
.select::<String>("SELECT (migration) FROM migrations")
|
||||||
.unwrap()()
|
.unwrap()()
|
||||||
.unwrap()[..],
|
.unwrap()[..],
|
||||||
migration.migrations
|
&[indoc! {"
|
||||||
);
|
|
||||||
|
|
||||||
// Add another step to the migration and run it again
|
|
||||||
migration.migrations = &[
|
|
||||||
indoc! {"
|
|
||||||
CREATE TABLE test1 (
|
CREATE TABLE test1 (
|
||||||
a TEXT,
|
a TEXT,
|
||||||
b TEXT
|
b TEXT
|
||||||
)"},
|
)"}],
|
||||||
indoc! {"
|
);
|
||||||
CREATE TABLE test2 (
|
|
||||||
c TEXT,
|
// Add another step to the migration and run it again
|
||||||
d TEXT
|
connection
|
||||||
)"},
|
.migrate(
|
||||||
];
|
"test",
|
||||||
migration.run(&connection).unwrap();
|
&[
|
||||||
|
indoc! {"
|
||||||
|
CREATE TABLE test1 (
|
||||||
|
a TEXT,
|
||||||
|
b TEXT
|
||||||
|
)"},
|
||||||
|
indoc! {"
|
||||||
|
CREATE TABLE test2 (
|
||||||
|
c TEXT,
|
||||||
|
d TEXT
|
||||||
|
)"},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// Verify it is also added to the migrations table
|
// Verify it is also added to the migrations table
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -131,7 +114,18 @@ mod test {
|
|||||||
.select::<String>("SELECT (migration) FROM migrations")
|
.select::<String>("SELECT (migration) FROM migrations")
|
||||||
.unwrap()()
|
.unwrap()()
|
||||||
.unwrap()[..],
|
.unwrap()[..],
|
||||||
migration.migrations
|
&[
|
||||||
|
indoc! {"
|
||||||
|
CREATE TABLE test1 (
|
||||||
|
a TEXT,
|
||||||
|
b TEXT
|
||||||
|
)"},
|
||||||
|
indoc! {"
|
||||||
|
CREATE TABLE test2 (
|
||||||
|
c TEXT,
|
||||||
|
d TEXT
|
||||||
|
)"},
|
||||||
|
],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,7 +144,7 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let mut store_completed_migration = connection
|
let mut store_completed_migration = connection
|
||||||
.insert_bound::<(&str, usize, String)>(indoc! {"
|
.exec_bound::<(&str, usize, String)>(indoc! {"
|
||||||
INSERT INTO migrations (domain, step, migration)
|
INSERT INTO migrations (domain, step, migration)
|
||||||
VALUES (?, ?, ?)"})
|
VALUES (?, ?, ?)"})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -171,8 +165,7 @@ mod test {
|
|||||||
fn migrations_dont_rerun() {
|
fn migrations_dont_rerun() {
|
||||||
let connection = Connection::open_memory("migrations_dont_rerun");
|
let connection = Connection::open_memory("migrations_dont_rerun");
|
||||||
|
|
||||||
// Create migration which clears a table
|
// Create migration which clears a tabl
|
||||||
let migration = Migration::new("test", &["DELETE FROM test_table"]);
|
|
||||||
|
|
||||||
// Manually create the table for that migration with a row
|
// Manually create the table for that migration with a row
|
||||||
connection
|
connection
|
||||||
@ -197,7 +190,9 @@ mod test {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Run the migration verifying that the row got dropped
|
// Run the migration verifying that the row got dropped
|
||||||
migration.run(&connection).unwrap();
|
connection
|
||||||
|
.migrate("test", &["DELETE FROM test_table"])
|
||||||
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
connection
|
connection
|
||||||
.select_row::<usize>("SELECT * FROM test_table")
|
.select_row::<usize>("SELECT * FROM test_table")
|
||||||
@ -213,7 +208,9 @@ mod test {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Run the same migration again and verify that the table was left unchanged
|
// Run the same migration again and verify that the table was left unchanged
|
||||||
migration.run(&connection).unwrap();
|
connection
|
||||||
|
.migrate("test", &["DELETE FROM test_table"])
|
||||||
|
.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
connection
|
connection
|
||||||
.select_row::<usize>("SELECT * FROM test_table")
|
.select_row::<usize>("SELECT * FROM test_table")
|
||||||
@ -228,22 +225,22 @@ mod test {
|
|||||||
let connection = Connection::open_memory("changed_migration_fails");
|
let connection = Connection::open_memory("changed_migration_fails");
|
||||||
|
|
||||||
// Create a migration with two steps and run it
|
// Create a migration with two steps and run it
|
||||||
Migration::new(
|
connection
|
||||||
"test migration",
|
.migrate(
|
||||||
&[
|
"test migration",
|
||||||
indoc! {"
|
&[
|
||||||
|
indoc! {"
|
||||||
CREATE TABLE test (
|
CREATE TABLE test (
|
||||||
col INTEGER
|
col INTEGER
|
||||||
)"},
|
)"},
|
||||||
indoc! {"
|
indoc! {"
|
||||||
INSERT INTO test (col) VALUES (1)"},
|
INSERT INTO test (col) VALUES (1)"},
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
.run(&connection)
|
.unwrap();
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Create another migration with the same domain but different steps
|
// Create another migration with the same domain but different steps
|
||||||
let second_migration_result = Migration::new(
|
let second_migration_result = connection.migrate(
|
||||||
"test migration",
|
"test migration",
|
||||||
&[
|
&[
|
||||||
indoc! {"
|
indoc! {"
|
||||||
@ -253,8 +250,7 @@ mod test {
|
|||||||
indoc! {"
|
indoc! {"
|
||||||
INSERT INTO test (color) VALUES (1)"},
|
INSERT INTO test (color) VALUES (1)"},
|
||||||
],
|
],
|
||||||
)
|
);
|
||||||
.run(&connection);
|
|
||||||
|
|
||||||
// Verify new migration returns error when run
|
// Verify new migration returns error when run
|
||||||
assert!(second_migration_result.is_err())
|
assert!(second_migration_result.is_err())
|
||||||
|
@ -256,11 +256,6 @@ impl<'a> Statement<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert(&mut self) -> Result<i64> {
|
|
||||||
self.exec()?;
|
|
||||||
Ok(self.connection.last_insert_id())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn exec(&mut self) -> Result<()> {
|
pub fn exec(&mut self) -> Result<()> {
|
||||||
fn logic(this: &mut Statement) -> Result<()> {
|
fn logic(this: &mut Statement) -> Result<()> {
|
||||||
while this.step()? == StepResult::Row {}
|
while this.step()? == StepResult::Row {}
|
||||||
|
@ -3,20 +3,23 @@ use std::{marker::PhantomData, ops::Deref, sync::Arc};
|
|||||||
use connection::Connection;
|
use connection::Connection;
|
||||||
use thread_local::ThreadLocal;
|
use thread_local::ThreadLocal;
|
||||||
|
|
||||||
use crate::{connection, domain::Domain};
|
use crate::{
|
||||||
|
connection,
|
||||||
|
domain::{Domain, Migrator},
|
||||||
|
};
|
||||||
|
|
||||||
pub struct ThreadSafeConnection<D: Domain> {
|
pub struct ThreadSafeConnection<M: Migrator> {
|
||||||
uri: Arc<str>,
|
uri: Arc<str>,
|
||||||
persistent: bool,
|
persistent: bool,
|
||||||
initialize_query: Option<&'static str>,
|
initialize_query: Option<&'static str>,
|
||||||
connection: Arc<ThreadLocal<Connection>>,
|
connection: Arc<ThreadLocal<Connection>>,
|
||||||
_pd: PhantomData<D>,
|
_pd: PhantomData<M>,
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe impl<T: Domain> Send for ThreadSafeConnection<T> {}
|
unsafe impl<T: Migrator> Send for ThreadSafeConnection<T> {}
|
||||||
unsafe impl<T: Domain> Sync for ThreadSafeConnection<T> {}
|
unsafe impl<T: Migrator> Sync for ThreadSafeConnection<T> {}
|
||||||
|
|
||||||
impl<D: Domain> ThreadSafeConnection<D> {
|
impl<M: Migrator> ThreadSafeConnection<M> {
|
||||||
pub fn new(uri: &str, persistent: bool) -> Self {
|
pub fn new(uri: &str, persistent: bool) -> Self {
|
||||||
Self {
|
Self {
|
||||||
uri: Arc::from(uri),
|
uri: Arc::from(uri),
|
||||||
@ -72,7 +75,11 @@ impl<D: Domain> Clone for ThreadSafeConnection<D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D: Domain> Deref for ThreadSafeConnection<D> {
|
// TODO:
|
||||||
|
// 1. When migration or initialization fails, move the corrupted db to a holding place and create a new one
|
||||||
|
// 2. If the new db also fails, downgrade to a shared in memory db
|
||||||
|
// 3. In either case notify the user about what went wrong
|
||||||
|
impl<M: Migrator> Deref for ThreadSafeConnection<M> {
|
||||||
type Target = Connection;
|
type Target = Connection;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
@ -91,7 +98,7 @@ impl<D: Domain> Deref for ThreadSafeConnection<D> {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
D::migrate(&connection).expect("Migrations failed");
|
M::migrate(&connection).expect("Migrations failed");
|
||||||
|
|
||||||
connection
|
connection
|
||||||
})
|
})
|
||||||
|
@ -20,19 +20,6 @@ impl Connection {
|
|||||||
Ok(move |bindings| statement.with_bindings(bindings)?.exec())
|
Ok(move |bindings| statement.with_bindings(bindings)?.exec())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert<'a>(&'a self, query: &str) -> Result<impl 'a + FnMut() -> Result<i64>> {
|
|
||||||
let mut statement = Statement::prepare(&self, query)?;
|
|
||||||
Ok(move || statement.insert())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_bound<'a, B: Bind>(
|
|
||||||
&'a self,
|
|
||||||
query: &str,
|
|
||||||
) -> Result<impl 'a + FnMut(B) -> Result<i64>> {
|
|
||||||
let mut statement = Statement::prepare(&self, query)?;
|
|
||||||
Ok(move |bindings| statement.with_bindings(bindings)?.insert())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn select<'a, C: Column>(
|
pub fn select<'a, C: Column>(
|
||||||
&'a self,
|
&'a self,
|
||||||
query: &str,
|
query: &str,
|
||||||
|
@ -34,7 +34,9 @@ use mappings::mouse::{
|
|||||||
|
|
||||||
use procinfo::LocalProcessInfo;
|
use procinfo::LocalProcessInfo;
|
||||||
use settings::{AlternateScroll, Settings, Shell, TerminalBlink};
|
use settings::{AlternateScroll, Settings, Shell, TerminalBlink};
|
||||||
|
use terminal_container_view::TerminalContainer;
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
use workspace::register_deserializable_item;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
cmp::min,
|
cmp::min,
|
||||||
@ -67,6 +69,8 @@ use lazy_static::lazy_static;
|
|||||||
pub fn init(cx: &mut MutableAppContext) {
|
pub fn init(cx: &mut MutableAppContext) {
|
||||||
terminal_view::init(cx);
|
terminal_view::init(cx);
|
||||||
terminal_container_view::init(cx);
|
terminal_container_view::init(cx);
|
||||||
|
|
||||||
|
register_deserializable_item::<TerminalContainer>(cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
///Scrolling is unbearably sluggish by default. Alacritty supports a configurable
|
///Scrolling is unbearably sluggish by default. Alacritty supports a configurable
|
||||||
|
@ -5,7 +5,7 @@ use alacritty_terminal::index::Point;
|
|||||||
use dirs::home_dir;
|
use dirs::home_dir;
|
||||||
use gpui::{
|
use gpui::{
|
||||||
actions, elements::*, AnyViewHandle, AppContext, Entity, ModelHandle, MutableAppContext, Task,
|
actions, elements::*, AnyViewHandle, AppContext, Entity, ModelHandle, MutableAppContext, Task,
|
||||||
View, ViewContext, ViewHandle,
|
View, ViewContext, ViewHandle, WeakViewHandle,
|
||||||
};
|
};
|
||||||
use util::truncate_and_trailoff;
|
use util::truncate_and_trailoff;
|
||||||
use workspace::searchable::{SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle};
|
use workspace::searchable::{SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle};
|
||||||
@ -13,6 +13,7 @@ use workspace::{
|
|||||||
item::{Item, ItemEvent},
|
item::{Item, ItemEvent},
|
||||||
ToolbarItemLocation, Workspace,
|
ToolbarItemLocation, Workspace,
|
||||||
};
|
};
|
||||||
|
use workspace::{register_deserializable_item, Pane};
|
||||||
|
|
||||||
use project::{LocalWorktree, Project, ProjectPath};
|
use project::{LocalWorktree, Project, ProjectPath};
|
||||||
use settings::{AlternateScroll, Settings, WorkingDirectory};
|
use settings::{AlternateScroll, Settings, WorkingDirectory};
|
||||||
@ -26,6 +27,8 @@ actions!(terminal, [DeployModal]);
|
|||||||
|
|
||||||
pub fn init(cx: &mut MutableAppContext) {
|
pub fn init(cx: &mut MutableAppContext) {
|
||||||
cx.add_action(TerminalContainer::deploy);
|
cx.add_action(TerminalContainer::deploy);
|
||||||
|
|
||||||
|
register_deserializable_item::<TerminalContainer>(cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
//Make terminal view an enum, that can give you views for the error and non-error states
|
//Make terminal view an enum, that can give you views for the error and non-error states
|
||||||
@ -127,7 +130,7 @@ impl TerminalContainer {
|
|||||||
TerminalContainerContent::Error(view)
|
TerminalContainerContent::Error(view)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
cx.focus(content.handle());
|
// cx.focus(content.handle());
|
||||||
|
|
||||||
TerminalContainer {
|
TerminalContainer {
|
||||||
content,
|
content,
|
||||||
@ -375,6 +378,22 @@ impl Item for TerminalContainer {
|
|||||||
)
|
)
|
||||||
.boxed()])
|
.boxed()])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn serialized_item_kind() -> Option<&'static str> {
|
||||||
|
Some("Terminal")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize(
|
||||||
|
_project: ModelHandle<Project>,
|
||||||
|
_workspace: WeakViewHandle<Workspace>,
|
||||||
|
_workspace_id: workspace::WorkspaceId,
|
||||||
|
_item_id: workspace::ItemId,
|
||||||
|
cx: &mut ViewContext<Pane>,
|
||||||
|
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||||
|
// TODO: Pull the current working directory out of the DB.
|
||||||
|
|
||||||
|
Task::ready(Ok(cx.add_view(|cx| TerminalContainer::new(None, false, cx))))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SearchableItem for TerminalContainer {
|
impl SearchableItem for TerminalContainer {
|
||||||
|
@ -6,7 +6,8 @@ use gpui::{
|
|||||||
Padding, ParentElement,
|
Padding, ParentElement,
|
||||||
},
|
},
|
||||||
fonts::TextStyle,
|
fonts::TextStyle,
|
||||||
Border, Element, Entity, MutableAppContext, Quad, RenderContext, View, ViewContext,
|
Border, Element, Entity, ModelHandle, MutableAppContext, Quad, RenderContext, Task, View,
|
||||||
|
ViewContext, ViewHandle, WeakViewHandle,
|
||||||
};
|
};
|
||||||
use project::{Project, ProjectEntryId, ProjectPath};
|
use project::{Project, ProjectEntryId, ProjectPath};
|
||||||
use settings::Settings;
|
use settings::Settings;
|
||||||
@ -14,13 +15,15 @@ use smallvec::SmallVec;
|
|||||||
use theme::{ColorScheme, Layer, Style, StyleSet};
|
use theme::{ColorScheme, Layer, Style, StyleSet};
|
||||||
use workspace::{
|
use workspace::{
|
||||||
item::{Item, ItemEvent},
|
item::{Item, ItemEvent},
|
||||||
Workspace,
|
register_deserializable_item, Pane, Workspace,
|
||||||
};
|
};
|
||||||
|
|
||||||
actions!(theme, [DeployThemeTestbench]);
|
actions!(theme, [DeployThemeTestbench]);
|
||||||
|
|
||||||
pub fn init(cx: &mut MutableAppContext) {
|
pub fn init(cx: &mut MutableAppContext) {
|
||||||
cx.add_action(ThemeTestbench::deploy);
|
cx.add_action(ThemeTestbench::deploy);
|
||||||
|
|
||||||
|
register_deserializable_item::<ThemeTestbench>(cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ThemeTestbench {}
|
pub struct ThemeTestbench {}
|
||||||
@ -357,4 +360,18 @@ impl Item for ThemeTestbench {
|
|||||||
fn to_item_events(_: &Self::Event) -> Vec<ItemEvent> {
|
fn to_item_events(_: &Self::Event) -> Vec<ItemEvent> {
|
||||||
Vec::new()
|
Vec::new()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn serialized_item_kind() -> Option<&'static str> {
|
||||||
|
Some("ThemeTestBench")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize(
|
||||||
|
_project: ModelHandle<Project>,
|
||||||
|
_workspace: WeakViewHandle<Workspace>,
|
||||||
|
_workspace_id: workspace::WorkspaceId,
|
||||||
|
_item_id: workspace::ItemId,
|
||||||
|
cx: &mut ViewContext<Pane>,
|
||||||
|
) -> Task<gpui::anyhow::Result<ViewHandle<Self>>> {
|
||||||
|
Task::ready(Ok(cx.add_view(|_| Self {})))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,7 @@ bincode = "1.2.1"
|
|||||||
anyhow = "1.0.38"
|
anyhow = "1.0.38"
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
lazy_static = "1.4"
|
lazy_static = "1.4"
|
||||||
|
env_logger = "0.9.1"
|
||||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||||
parking_lot = "0.11.1"
|
parking_lot = "0.11.1"
|
||||||
postage = { version = "0.4.1", features = ["futures-traits"] }
|
postage = { version = "0.4.1", features = ["futures-traits"] }
|
||||||
|
@ -137,13 +137,8 @@ pub struct Dock {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Dock {
|
impl Dock {
|
||||||
pub fn new(
|
pub fn new(default_item_factory: DefaultItemFactory, cx: &mut ViewContext<Workspace>) -> Self {
|
||||||
default_item_factory: DefaultItemFactory,
|
let position = DockPosition::Hidden(cx.global::<Settings>().default_dock_anchor);
|
||||||
position: Option<DockPosition>,
|
|
||||||
cx: &mut ViewContext<Workspace>,
|
|
||||||
) -> Self {
|
|
||||||
let position = position
|
|
||||||
.unwrap_or_else(|| DockPosition::Hidden(cx.global::<Settings>().default_dock_anchor));
|
|
||||||
|
|
||||||
let pane = cx.add_view(|cx| Pane::new(Some(position.anchor()), cx));
|
let pane = cx.add_view(|cx| Pane::new(Some(position.anchor()), cx));
|
||||||
pane.update(cx, |pane, cx| {
|
pane.update(cx, |pane, cx| {
|
||||||
@ -175,7 +170,7 @@ impl Dock {
|
|||||||
self.position.is_visible() && self.position.anchor() == anchor
|
self.position.is_visible() && self.position.anchor() == anchor
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_dock_position(
|
pub(crate) fn set_dock_position(
|
||||||
workspace: &mut Workspace,
|
workspace: &mut Workspace,
|
||||||
new_position: DockPosition,
|
new_position: DockPosition,
|
||||||
cx: &mut ViewContext<Workspace>,
|
cx: &mut ViewContext<Workspace>,
|
||||||
@ -211,6 +206,7 @@ impl Dock {
|
|||||||
cx.focus(last_active_center_pane);
|
cx.focus(last_active_center_pane);
|
||||||
}
|
}
|
||||||
cx.emit(crate::Event::DockAnchorChanged);
|
cx.emit(crate::Event::DockAnchorChanged);
|
||||||
|
workspace.serialize_workspace(None, cx);
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -347,6 +343,10 @@ impl Dock {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn position(&self) -> DockPosition {
|
||||||
|
self.position
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ToggleDockButton {
|
pub struct ToggleDockButton {
|
||||||
|
@ -117,15 +117,18 @@ pub trait Item: View {
|
|||||||
fn breadcrumb_location(&self) -> ToolbarItemLocation {
|
fn breadcrumb_location(&self) -> ToolbarItemLocation {
|
||||||
ToolbarItemLocation::Hidden
|
ToolbarItemLocation::Hidden
|
||||||
}
|
}
|
||||||
|
|
||||||
fn breadcrumbs(&self, _theme: &Theme, _cx: &AppContext) -> Option<Vec<ElementBox>> {
|
fn breadcrumbs(&self, _theme: &Theme, _cx: &AppContext) -> Option<Vec<ElementBox>> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
fn serialized_item_kind() -> Option<&'static str>;
|
fn serialized_item_kind() -> Option<&'static str>;
|
||||||
fn deserialize(
|
fn deserialize(
|
||||||
|
project: ModelHandle<Project>,
|
||||||
|
workspace: WeakViewHandle<Workspace>,
|
||||||
workspace_id: WorkspaceId,
|
workspace_id: WorkspaceId,
|
||||||
item_id: ItemId,
|
item_id: ItemId,
|
||||||
cx: &mut ViewContext<Self>,
|
cx: &mut ViewContext<Pane>,
|
||||||
) -> Result<Self>;
|
) -> Task<Result<ViewHandle<Self>>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait ItemHandle: 'static + fmt::Debug {
|
pub trait ItemHandle: 'static + fmt::Debug {
|
||||||
@ -181,6 +184,7 @@ pub trait ItemHandle: 'static + fmt::Debug {
|
|||||||
fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
|
fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
|
||||||
fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
|
fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
|
||||||
fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>>;
|
fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>>;
|
||||||
|
fn serialized_item_kind(&self) -> Option<&'static str>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait WeakItemHandle {
|
pub trait WeakItemHandle {
|
||||||
@ -515,6 +519,10 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
|
|||||||
fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
|
fn breadcrumbs(&self, theme: &Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
|
||||||
self.read(cx).breadcrumbs(theme, cx)
|
self.read(cx).breadcrumbs(theme, cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn serialized_item_kind(&self) -> Option<&'static str> {
|
||||||
|
T::serialized_item_kind()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Box<dyn ItemHandle>> for AnyViewHandle {
|
impl From<Box<dyn ItemHandle>> for AnyViewHandle {
|
||||||
@ -645,15 +653,14 @@ impl<T: FollowableItem> FollowableItemHandle for ViewHandle<T> {
|
|||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::{any::Any, borrow::Cow, cell::Cell};
|
use std::{any::Any, borrow::Cow, cell::Cell};
|
||||||
|
|
||||||
use anyhow::anyhow;
|
|
||||||
use gpui::{
|
use gpui::{
|
||||||
elements::Empty, AppContext, Element, ElementBox, Entity, ModelHandle, RenderContext, Task,
|
elements::Empty, AppContext, Element, ElementBox, Entity, ModelHandle, RenderContext, Task,
|
||||||
View, ViewContext,
|
View, ViewContext, ViewHandle, WeakViewHandle,
|
||||||
};
|
};
|
||||||
use project::{Project, ProjectEntryId, ProjectPath};
|
use project::{Project, ProjectEntryId, ProjectPath};
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
use crate::{sidebar::SidebarItem, ItemNavHistory};
|
use crate::{sidebar::SidebarItem, ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
|
||||||
|
|
||||||
use super::{Item, ItemEvent};
|
use super::{Item, ItemEvent};
|
||||||
|
|
||||||
@ -864,11 +871,13 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize(
|
fn deserialize(
|
||||||
workspace_id: crate::persistence::model::WorkspaceId,
|
_project: ModelHandle<Project>,
|
||||||
item_id: crate::persistence::model::ItemId,
|
_workspace: WeakViewHandle<Workspace>,
|
||||||
cx: &mut ViewContext<Self>,
|
_workspace_id: WorkspaceId,
|
||||||
) -> anyhow::Result<Self> {
|
_item_id: ItemId,
|
||||||
Err(anyhow!("Cannot deserialize test item"))
|
_cx: &mut ViewContext<Pane>,
|
||||||
|
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||||
|
unreachable!("Cannot deserialize test item")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,93 +2,81 @@
|
|||||||
|
|
||||||
pub mod model;
|
pub mod model;
|
||||||
|
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::{bail, Context, Result};
|
use anyhow::{anyhow, bail, Result, Context};
|
||||||
use db::open_file_db;
|
use db::connection;
|
||||||
use gpui::Axis;
|
use gpui::Axis;
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
|
||||||
use sqlez::{connection::Connection, domain::Domain, migrations::Migration};
|
use sqlez::domain::Domain;
|
||||||
use util::{iife, unzip_option, ResultExt};
|
use util::{iife, unzip_option, ResultExt};
|
||||||
|
|
||||||
|
use crate::dock::DockPosition;
|
||||||
|
|
||||||
use super::Workspace;
|
use super::Workspace;
|
||||||
|
|
||||||
use model::{
|
use model::{
|
||||||
GroupId, PaneId, SerializedItem, SerializedItemKind, SerializedPane, SerializedPaneGroup,
|
GroupId, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup,
|
||||||
SerializedWorkspace, WorkspaceId,
|
SerializedWorkspace, WorkspaceId,
|
||||||
};
|
};
|
||||||
|
|
||||||
lazy_static! {
|
connection!(DB: WorkspaceDb<Workspace>);
|
||||||
pub static ref DB: WorkspaceDb = WorkspaceDb(open_file_db());
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct WorkspaceDb(ThreadSafeConnection<Workspace>);
|
|
||||||
|
|
||||||
impl Deref for WorkspaceDb {
|
|
||||||
type Target = ThreadSafeConnection<Workspace>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) const WORKSPACES_MIGRATION: Migration = Migration::new(
|
|
||||||
"workspace",
|
|
||||||
&[indoc! {"
|
|
||||||
CREATE TABLE workspaces(
|
|
||||||
workspace_id BLOB PRIMARY KEY,
|
|
||||||
dock_anchor TEXT, -- Enum: 'Bottom' / 'Right' / 'Expanded'
|
|
||||||
dock_visible INTEGER, -- Boolean
|
|
||||||
timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
CREATE TABLE pane_groups(
|
|
||||||
group_id INTEGER PRIMARY KEY,
|
|
||||||
workspace_id BLOB NOT NULL,
|
|
||||||
parent_group_id INTEGER, -- NULL indicates that this is a root node
|
|
||||||
position INTEGER, -- NULL indicates that this is a root node
|
|
||||||
axis TEXT NOT NULL, -- Enum: 'Vertical' / 'Horizontal'
|
|
||||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
ON UPDATE CASCADE,
|
|
||||||
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
CREATE TABLE panes(
|
|
||||||
pane_id INTEGER PRIMARY KEY,
|
|
||||||
workspace_id BLOB NOT NULL,
|
|
||||||
parent_group_id INTEGER, -- NULL, this is a dock pane
|
|
||||||
position INTEGER, -- NULL, this is a dock pane
|
|
||||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
ON UPDATE CASCADE,
|
|
||||||
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
|
||||||
) STRICT;
|
|
||||||
|
|
||||||
CREATE TABLE items(
|
|
||||||
item_id INTEGER NOT NULL, -- This is the item's view id, so this is not unique
|
|
||||||
workspace_id BLOB NOT NULL,
|
|
||||||
pane_id INTEGER NOT NULL,
|
|
||||||
kind TEXT NOT NULL,
|
|
||||||
position INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
|
||||||
ON DELETE CASCADE
|
|
||||||
ON UPDATE CASCADE,
|
|
||||||
FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
PRIMARY KEY(item_id, workspace_id)
|
|
||||||
) STRICT;
|
|
||||||
"}],
|
|
||||||
);
|
|
||||||
|
|
||||||
impl Domain for Workspace {
|
impl Domain for Workspace {
|
||||||
fn migrate(conn: &Connection) -> anyhow::Result<()> {
|
fn name() -> &'static str {
|
||||||
WORKSPACES_MIGRATION.run(&conn)
|
"workspace"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn migrations() -> &'static [&'static str] {
|
||||||
|
&[indoc! {"
|
||||||
|
CREATE TABLE workspaces(
|
||||||
|
workspace_id BLOB PRIMARY KEY,
|
||||||
|
dock_visible INTEGER, -- Boolean
|
||||||
|
dock_anchor TEXT, -- Enum: 'Bottom' / 'Right' / 'Expanded'
|
||||||
|
timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
|
CREATE TABLE pane_groups(
|
||||||
|
group_id INTEGER PRIMARY KEY,
|
||||||
|
workspace_id BLOB NOT NULL,
|
||||||
|
parent_group_id INTEGER, -- NULL indicates that this is a root node
|
||||||
|
position INTEGER, -- NULL indicates that this is a root node
|
||||||
|
axis TEXT NOT NULL, -- Enum: 'Vertical' / 'Horizontal'
|
||||||
|
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE CASCADE,
|
||||||
|
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
|
CREATE TABLE panes(
|
||||||
|
pane_id INTEGER PRIMARY KEY,
|
||||||
|
workspace_id BLOB NOT NULL,
|
||||||
|
parent_group_id INTEGER, -- NULL, this is a dock pane
|
||||||
|
position INTEGER, -- NULL, this is a dock pane
|
||||||
|
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE CASCADE,
|
||||||
|
FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE
|
||||||
|
) STRICT;
|
||||||
|
|
||||||
|
CREATE TABLE items(
|
||||||
|
item_id INTEGER NOT NULL, -- This is the item's view id, so this is not unique
|
||||||
|
workspace_id BLOB NOT NULL,
|
||||||
|
pane_id INTEGER NOT NULL,
|
||||||
|
kind TEXT NOT NULL,
|
||||||
|
position INTEGER NOT NULL,
|
||||||
|
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||||
|
ON DELETE CASCADE
|
||||||
|
ON UPDATE CASCADE,
|
||||||
|
FOREIGN KEY(pane_id) REFERENCES panes(pane_id)
|
||||||
|
ON DELETE CASCADE,
|
||||||
|
PRIMARY KEY(item_id, workspace_id)
|
||||||
|
) STRICT;
|
||||||
|
"}]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,7 +92,7 @@ impl WorkspaceDb {
|
|||||||
|
|
||||||
// Note that we re-assign the workspace_id here in case it's empty
|
// Note that we re-assign the workspace_id here in case it's empty
|
||||||
// and we've grabbed the most recent workspace
|
// and we've grabbed the most recent workspace
|
||||||
let (workspace_id, dock_position) = iife!({
|
let (workspace_id, dock_position): (WorkspaceId, DockPosition) = iife!({
|
||||||
if worktree_roots.len() == 0 {
|
if worktree_roots.len() == 0 {
|
||||||
self.select_row(indoc! {"
|
self.select_row(indoc! {"
|
||||||
SELECT workspace_id, dock_visible, dock_anchor
|
SELECT workspace_id, dock_visible, dock_anchor
|
||||||
@ -122,6 +110,7 @@ impl WorkspaceDb {
|
|||||||
.flatten()?;
|
.flatten()?;
|
||||||
|
|
||||||
Some(SerializedWorkspace {
|
Some(SerializedWorkspace {
|
||||||
|
workspace_id: workspace_id.clone(),
|
||||||
dock_pane: self
|
dock_pane: self
|
||||||
.get_dock_pane(&workspace_id)
|
.get_dock_pane(&workspace_id)
|
||||||
.context("Getting dock pane")
|
.context("Getting dock pane")
|
||||||
@ -136,43 +125,47 @@ impl WorkspaceDb {
|
|||||||
|
|
||||||
/// Saves a workspace using the worktree roots. Will garbage collect any workspaces
|
/// Saves a workspace using the worktree roots. Will garbage collect any workspaces
|
||||||
/// that used this workspace previously
|
/// that used this workspace previously
|
||||||
pub fn save_workspace<P: AsRef<Path>>(
|
pub fn save_workspace(
|
||||||
&self,
|
&self,
|
||||||
worktree_roots: &[P],
|
old_id: Option<WorkspaceId>,
|
||||||
old_roots: Option<&[P]>,
|
|
||||||
workspace: &SerializedWorkspace,
|
workspace: &SerializedWorkspace,
|
||||||
) {
|
) {
|
||||||
let workspace_id: WorkspaceId = worktree_roots.into();
|
|
||||||
|
|
||||||
self.with_savepoint("update_worktrees", || {
|
self.with_savepoint("update_worktrees", || {
|
||||||
if let Some(old_roots) = old_roots {
|
if let Some(old_id) = old_id {
|
||||||
let old_id: WorkspaceId = old_roots.into();
|
self.exec_bound(indoc! {"
|
||||||
|
DELETE FROM pane_groups WHERE workspace_id = ?"})?(&old_id)?;
|
||||||
self.exec_bound("DELETE FROM WORKSPACES WHERE workspace_id = ?")?(&old_id)?;
|
|
||||||
|
// If collision, delete
|
||||||
|
|
||||||
|
self.exec_bound(indoc! {"
|
||||||
|
UPDATE OR REPLACE workspaces
|
||||||
|
SET workspace_id = ?,
|
||||||
|
dock_visible = ?,
|
||||||
|
dock_anchor = ?,
|
||||||
|
timestamp = CURRENT_TIMESTAMP
|
||||||
|
WHERE workspace_id = ?"})?((
|
||||||
|
&workspace.workspace_id,
|
||||||
|
workspace.dock_position,
|
||||||
|
&old_id,
|
||||||
|
))?;
|
||||||
|
} else {
|
||||||
|
self.exec_bound(indoc! {"
|
||||||
|
DELETE FROM pane_groups WHERE workspace_id = ?"})?(&workspace.workspace_id)?;
|
||||||
|
self.exec_bound(
|
||||||
|
"INSERT OR REPLACE INTO workspaces(workspace_id, dock_visible, dock_anchor) VALUES (?, ?, ?)",
|
||||||
|
)?((&workspace.workspace_id, workspace.dock_position))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete any previous workspaces with the same roots. This cascades to all
|
|
||||||
// other tables that are based on the same roots set.
|
|
||||||
// Insert new workspace into workspaces table if none were found
|
|
||||||
self.exec_bound("DELETE FROM workspaces WHERE workspace_id = ?;")?(&workspace_id)?;
|
|
||||||
|
|
||||||
self.exec_bound(
|
|
||||||
"INSERT INTO workspaces(workspace_id, dock_visible, dock_anchor) VALUES (?, ?, ?)",
|
|
||||||
)?((&workspace_id, workspace.dock_position))?;
|
|
||||||
|
|
||||||
// Save center pane group and dock pane
|
// Save center pane group and dock pane
|
||||||
self.save_pane_group(&workspace_id, &workspace.center_group, None)?;
|
self.save_pane_group(&workspace.workspace_id, &workspace.center_group, None)?;
|
||||||
self.save_pane(&workspace_id, &workspace.dock_pane, None)?;
|
self.save_pane(&workspace.workspace_id, &workspace.dock_pane, None)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
.with_context(|| {
|
.with_context(|| {
|
||||||
format!(
|
format!(
|
||||||
"Update workspace with roots {:?}",
|
"Update workspace with roots {:?}",
|
||||||
worktree_roots
|
workspace.workspace_id.paths()
|
||||||
.iter()
|
|
||||||
.map(|p| p.as_ref())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.log_err();
|
.log_err();
|
||||||
@ -253,15 +246,19 @@ impl WorkspaceDb {
|
|||||||
bail!("Pane groups must have a SerializedPaneGroup::Group at the root")
|
bail!("Pane groups must have a SerializedPaneGroup::Group at the root")
|
||||||
}
|
}
|
||||||
|
|
||||||
let (parent_id, position) = unzip_option(parent);
|
|
||||||
|
|
||||||
match pane_group {
|
match pane_group {
|
||||||
SerializedPaneGroup::Group { axis, children } => {
|
SerializedPaneGroup::Group { axis, children } => {
|
||||||
let parent_id = self.insert_bound("INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis) VALUES (?, ?, ?, ?)")?
|
let (parent_id, position) = unzip_option(parent);
|
||||||
((workspace_id, parent_id, position, *axis))?;
|
|
||||||
|
|
||||||
|
let group_id = self.select_row_bound::<_, i64>(indoc!{"
|
||||||
|
INSERT INTO pane_groups(workspace_id, parent_group_id, position, axis)
|
||||||
|
VALUES (?, ?, ?, ?)
|
||||||
|
RETURNING group_id"})?
|
||||||
|
((workspace_id, parent_id, position, *axis))?
|
||||||
|
.ok_or_else(|| anyhow!("Couldn't retrieve group_id from inserted pane_group"))?;
|
||||||
|
|
||||||
for (position, group) in children.iter().enumerate() {
|
for (position, group) in children.iter().enumerate() {
|
||||||
self.save_pane_group(workspace_id, group, Some((parent_id, position)))?
|
self.save_pane_group(workspace_id, group, Some((group_id, position)))?
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -289,10 +286,13 @@ impl WorkspaceDb {
|
|||||||
parent: Option<(GroupId, usize)>,
|
parent: Option<(GroupId, usize)>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let (parent_id, order) = unzip_option(parent);
|
let (parent_id, order) = unzip_option(parent);
|
||||||
|
|
||||||
let pane_id = self.insert_bound(
|
let pane_id = self.select_row_bound::<_, i64>(indoc!{"
|
||||||
"INSERT INTO panes(workspace_id, parent_group_id, position) VALUES (?, ?, ?)",
|
INSERT INTO panes(workspace_id, parent_group_id, position)
|
||||||
)?((workspace_id, parent_id, order))?;
|
VALUES (?, ?, ?)
|
||||||
|
RETURNING pane_id"},
|
||||||
|
)?((workspace_id, parent_id, order))?
|
||||||
|
.ok_or_else(|| anyhow!("Could not retrieve inserted pane_id"))?;
|
||||||
|
|
||||||
self.save_items(workspace_id, pane_id, &pane.children)
|
self.save_items(workspace_id, pane_id, &pane.children)
|
||||||
.context("Saving items")
|
.context("Saving items")
|
||||||
@ -300,15 +300,9 @@ impl WorkspaceDb {
|
|||||||
|
|
||||||
pub(crate) fn get_items(&self, pane_id: PaneId) -> Result<Vec<SerializedItem>> {
|
pub(crate) fn get_items(&self, pane_id: PaneId) -> Result<Vec<SerializedItem>> {
|
||||||
Ok(self.select_bound(indoc! {"
|
Ok(self.select_bound(indoc! {"
|
||||||
SELECT item_id, kind FROM items
|
SELECT kind, item_id FROM items
|
||||||
WHERE pane_id = ?
|
WHERE pane_id = ?
|
||||||
ORDER BY position"})?(pane_id)?
|
ORDER BY position"})?(pane_id)?)
|
||||||
.into_iter()
|
|
||||||
.map(|(item_id, kind)| match kind {
|
|
||||||
SerializedItemKind::Terminal => SerializedItem::Terminal { item_id },
|
|
||||||
_ => unimplemented!(),
|
|
||||||
})
|
|
||||||
.collect())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn save_items(
|
pub(crate) fn save_items(
|
||||||
@ -317,15 +311,11 @@ impl WorkspaceDb {
|
|||||||
pane_id: PaneId,
|
pane_id: PaneId,
|
||||||
items: &[SerializedItem],
|
items: &[SerializedItem],
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let mut delete_old = self
|
let mut insert = self.exec_bound(
|
||||||
.exec_bound("DELETE FROM items WHERE workspace_id = ? AND pane_id = ? AND item_id = ?")
|
"INSERT INTO items(workspace_id, pane_id, position, kind, item_id) VALUES (?, ?, ?, ?, ?)",
|
||||||
.context("Preparing deletion")?;
|
|
||||||
let mut insert_new = self.exec_bound(
|
|
||||||
"INSERT INTO items(item_id, workspace_id, pane_id, kind, position) VALUES (?, ?, ?, ?, ?)",
|
|
||||||
).context("Preparing insertion")?;
|
).context("Preparing insertion")?;
|
||||||
for (position, item) in items.iter().enumerate() {
|
for (position, item) in items.iter().enumerate() {
|
||||||
delete_old((workspace_id, pane_id, item.item_id()))?;
|
insert((workspace_id, pane_id, position, item))?;
|
||||||
insert_new((item.item_id(), workspace_id, pane_id, item.kind(), position))?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -339,34 +329,102 @@ mod tests {
|
|||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_full_workspace_serialization() {
|
||||||
|
env_logger::try_init().ok();
|
||||||
|
|
||||||
|
let db = WorkspaceDb(open_memory_db("test_full_workspace_serialization"));
|
||||||
|
|
||||||
|
let dock_pane = crate::persistence::model::SerializedPane {
|
||||||
|
children: vec![
|
||||||
|
SerializedItem::new("Terminal", 1),
|
||||||
|
SerializedItem::new("Terminal", 2),
|
||||||
|
SerializedItem::new("Terminal", 3),
|
||||||
|
SerializedItem::new("Terminal", 4),
|
||||||
|
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
// -----------------
|
||||||
|
// | 1,2 | 5,6 |
|
||||||
|
// | - - - | |
|
||||||
|
// | 3,4 | |
|
||||||
|
// -----------------
|
||||||
|
let center_group = SerializedPaneGroup::Group {
|
||||||
|
axis: gpui::Axis::Horizontal,
|
||||||
|
children: vec![
|
||||||
|
SerializedPaneGroup::Group {
|
||||||
|
axis: gpui::Axis::Vertical,
|
||||||
|
children: vec![
|
||||||
|
SerializedPaneGroup::Pane(SerializedPane {
|
||||||
|
children: vec![
|
||||||
|
SerializedItem::new("Terminal", 5),
|
||||||
|
SerializedItem::new("Terminal", 6),
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
SerializedPaneGroup::Pane(SerializedPane {
|
||||||
|
children: vec![
|
||||||
|
SerializedItem::new("Terminal", 7),
|
||||||
|
SerializedItem::new("Terminal", 8),
|
||||||
|
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
},
|
||||||
|
SerializedPaneGroup::Pane(SerializedPane {
|
||||||
|
children: vec![
|
||||||
|
SerializedItem::new("Terminal", 9),
|
||||||
|
SerializedItem::new("Terminal", 10),
|
||||||
|
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
let workspace = SerializedWorkspace {
|
||||||
|
workspace_id: (["/tmp", "/tmp2"]).into(),
|
||||||
|
dock_position: DockPosition::Shown(DockAnchor::Bottom),
|
||||||
|
center_group,
|
||||||
|
dock_pane,
|
||||||
|
};
|
||||||
|
|
||||||
|
db.save_workspace(None, &workspace);
|
||||||
|
let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
|
||||||
|
|
||||||
|
assert_eq!(workspace, round_trip_workspace.unwrap());
|
||||||
|
|
||||||
|
// Test guaranteed duplicate IDs
|
||||||
|
db.save_workspace(None, &workspace);
|
||||||
|
db.save_workspace(None, &workspace);
|
||||||
|
|
||||||
|
let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
|
||||||
|
assert_eq!(workspace, round_trip_workspace.unwrap());
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_workspace_assignment() {
|
fn test_workspace_assignment() {
|
||||||
// env_logger::try_init().ok();
|
env_logger::try_init().ok();
|
||||||
|
|
||||||
let db = WorkspaceDb(open_memory_db("test_basic_functionality"));
|
let db = WorkspaceDb(open_memory_db("test_basic_functionality"));
|
||||||
|
|
||||||
let workspace_1 = SerializedWorkspace {
|
let workspace_1 = SerializedWorkspace {
|
||||||
|
workspace_id: (["/tmp", "/tmp2"]).into(),
|
||||||
dock_position: crate::dock::DockPosition::Shown(DockAnchor::Bottom),
|
dock_position: crate::dock::DockPosition::Shown(DockAnchor::Bottom),
|
||||||
center_group: Default::default(),
|
center_group: Default::default(),
|
||||||
dock_pane: Default::default(),
|
dock_pane: Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let workspace_2 = SerializedWorkspace {
|
let mut workspace_2 = SerializedWorkspace {
|
||||||
|
workspace_id: (["/tmp"]).into(),
|
||||||
dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Expanded),
|
dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Expanded),
|
||||||
center_group: Default::default(),
|
center_group: Default::default(),
|
||||||
dock_pane: Default::default(),
|
dock_pane: Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let workspace_3 = SerializedWorkspace {
|
db.save_workspace(None, &workspace_1);
|
||||||
dock_position: crate::dock::DockPosition::Shown(DockAnchor::Right),
|
db.save_workspace(None, &workspace_2);
|
||||||
center_group: Default::default(),
|
|
||||||
dock_pane: Default::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
db.save_workspace(&["/tmp", "/tmp2"], None, &workspace_1);
|
|
||||||
db.save_workspace(&["/tmp"], None, &workspace_2);
|
|
||||||
|
|
||||||
db::write_db_to(&db, "test.db").unwrap();
|
|
||||||
|
|
||||||
// Test that paths are treated as a set
|
// Test that paths are treated as a set
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@ -383,23 +441,32 @@ mod tests {
|
|||||||
assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
|
assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
|
||||||
|
|
||||||
// Test 'mutate' case of updating a pre-existing id
|
// Test 'mutate' case of updating a pre-existing id
|
||||||
db.save_workspace(&["/tmp", "/tmp2"], Some(&["/tmp", "/tmp2"]), &workspace_2);
|
workspace_2.workspace_id = (["/tmp", "/tmp2"]).into();
|
||||||
|
db.save_workspace(Some((&["/tmp"]).into()), &workspace_2);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
|
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
|
||||||
workspace_2
|
workspace_2
|
||||||
);
|
);
|
||||||
|
|
||||||
// Test other mechanism for mutating
|
// Test other mechanism for mutating
|
||||||
db.save_workspace(&["/tmp", "/tmp2"], None, &workspace_3);
|
let mut workspace_3 = SerializedWorkspace {
|
||||||
|
workspace_id: (&["/tmp", "/tmp2"]).into(),
|
||||||
|
dock_position: DockPosition::Shown(DockAnchor::Right),
|
||||||
|
center_group: Default::default(),
|
||||||
|
dock_pane: Default::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
db.save_workspace(None, &workspace_3);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
|
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
|
||||||
workspace_3
|
workspace_3
|
||||||
);
|
);
|
||||||
|
|
||||||
// Make sure that updating paths differently also works
|
// Make sure that updating paths differently also works
|
||||||
|
workspace_3.workspace_id = (["/tmp3", "/tmp4", "/tmp2"]).into();
|
||||||
db.save_workspace(
|
db.save_workspace(
|
||||||
&["/tmp3", "/tmp4", "/tmp2"],
|
Some((&["/tmp", "/tmp2"]).into()),
|
||||||
Some(&["/tmp", "/tmp2"]),
|
|
||||||
&workspace_3,
|
&workspace_3,
|
||||||
);
|
);
|
||||||
assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
|
assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
|
||||||
@ -408,16 +475,21 @@ mod tests {
|
|||||||
.unwrap(),
|
.unwrap(),
|
||||||
workspace_3
|
workspace_3
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
use crate::dock::DockPosition;
|
||||||
use crate::persistence::model::SerializedWorkspace;
|
use crate::persistence::model::SerializedWorkspace;
|
||||||
use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
|
use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
|
||||||
|
|
||||||
fn default_workspace(
|
fn default_workspace<P: AsRef<Path>>(
|
||||||
|
workspace_id: &[P],
|
||||||
dock_pane: SerializedPane,
|
dock_pane: SerializedPane,
|
||||||
center_group: &SerializedPaneGroup,
|
center_group: &SerializedPaneGroup,
|
||||||
) -> SerializedWorkspace {
|
) -> SerializedWorkspace {
|
||||||
SerializedWorkspace {
|
SerializedWorkspace {
|
||||||
|
workspace_id: workspace_id.into(),
|
||||||
dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Right),
|
dock_position: crate::dock::DockPosition::Hidden(DockAnchor::Right),
|
||||||
center_group: center_group.clone(),
|
center_group: center_group.clone(),
|
||||||
dock_pane,
|
dock_pane,
|
||||||
@ -426,23 +498,23 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_basic_dock_pane() {
|
fn test_basic_dock_pane() {
|
||||||
// env_logger::try_init().ok();
|
env_logger::try_init().ok();
|
||||||
|
|
||||||
let db = WorkspaceDb(open_memory_db("basic_dock_pane"));
|
let db = WorkspaceDb(open_memory_db("basic_dock_pane"));
|
||||||
|
|
||||||
let dock_pane = crate::persistence::model::SerializedPane {
|
let dock_pane = crate::persistence::model::SerializedPane {
|
||||||
children: vec![
|
children: vec![
|
||||||
SerializedItem::Terminal { item_id: 1 },
|
SerializedItem::new("Terminal", 1),
|
||||||
SerializedItem::Terminal { item_id: 4 },
|
SerializedItem::new("Terminal", 4),
|
||||||
SerializedItem::Terminal { item_id: 2 },
|
SerializedItem::new("Terminal", 2),
|
||||||
SerializedItem::Terminal { item_id: 3 },
|
SerializedItem::new("Terminal", 3),
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
let workspace = default_workspace(dock_pane, &Default::default());
|
let workspace = default_workspace(&["/tmp"], dock_pane, &Default::default());
|
||||||
|
|
||||||
db.save_workspace(&["/tmp"], None, &workspace);
|
|
||||||
|
|
||||||
|
db.save_workspace(None, &workspace);
|
||||||
|
|
||||||
let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
|
let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
|
||||||
|
|
||||||
assert_eq!(workspace.dock_pane, new_workspace.dock_pane);
|
assert_eq!(workspace.dock_pane, new_workspace.dock_pane);
|
||||||
@ -467,30 +539,30 @@ mod tests {
|
|||||||
children: vec![
|
children: vec![
|
||||||
SerializedPaneGroup::Pane(SerializedPane {
|
SerializedPaneGroup::Pane(SerializedPane {
|
||||||
children: vec![
|
children: vec![
|
||||||
SerializedItem::Terminal { item_id: 1 },
|
SerializedItem::new("Terminal", 1),
|
||||||
SerializedItem::Terminal { item_id: 2 },
|
SerializedItem::new("Terminal", 2),
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
SerializedPaneGroup::Pane(SerializedPane {
|
SerializedPaneGroup::Pane(SerializedPane {
|
||||||
children: vec![
|
children: vec![
|
||||||
SerializedItem::Terminal { item_id: 4 },
|
SerializedItem::new("Terminal", 4),
|
||||||
SerializedItem::Terminal { item_id: 3 },
|
SerializedItem::new("Terminal", 3),
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
SerializedPaneGroup::Pane(SerializedPane {
|
SerializedPaneGroup::Pane(SerializedPane {
|
||||||
children: vec![
|
children: vec![
|
||||||
SerializedItem::Terminal { item_id: 5 },
|
SerializedItem::new("Terminal", 5),
|
||||||
SerializedItem::Terminal { item_id: 6 },
|
SerializedItem::new("Terminal", 6),
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
let workspace = default_workspace(Default::default(), ¢er_pane);
|
let workspace = default_workspace(&["/tmp"], Default::default(), ¢er_pane);
|
||||||
|
|
||||||
db.save_workspace(&["/tmp"], None, &workspace);
|
db.save_workspace(None, &workspace);
|
||||||
|
|
||||||
assert_eq!(workspace.center_group, center_pane);
|
assert_eq!(workspace.center_group, center_pane);
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ use std::{
|
|||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{bail, Result};
|
use anyhow::Result;
|
||||||
|
|
||||||
use gpui::Axis;
|
use gpui::Axis;
|
||||||
|
|
||||||
@ -16,10 +16,10 @@ use sqlez::{
|
|||||||
use crate::dock::DockPosition;
|
use crate::dock::DockPosition;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub(crate) struct WorkspaceId(Arc<Vec<PathBuf>>);
|
pub struct WorkspaceId(Arc<Vec<PathBuf>>);
|
||||||
|
|
||||||
impl WorkspaceId {
|
impl WorkspaceId {
|
||||||
pub fn paths(self) -> Arc<Vec<PathBuf>> {
|
pub fn paths(&self) -> Arc<Vec<PathBuf>> {
|
||||||
self.0.clone()
|
self.0.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -52,6 +52,7 @@ impl Column for WorkspaceId {
|
|||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub struct SerializedWorkspace {
|
pub struct SerializedWorkspace {
|
||||||
|
pub workspace_id: WorkspaceId,
|
||||||
pub dock_position: DockPosition,
|
pub dock_position: DockPosition,
|
||||||
pub center_group: SerializedPaneGroup,
|
pub center_group: SerializedPaneGroup,
|
||||||
pub dock_pane: SerializedPane,
|
pub dock_pane: SerializedPane,
|
||||||
@ -90,67 +91,33 @@ pub type GroupId = i64;
|
|||||||
pub type PaneId = i64;
|
pub type PaneId = i64;
|
||||||
pub type ItemId = usize;
|
pub type ItemId = usize;
|
||||||
|
|
||||||
pub(crate) enum SerializedItemKind {
|
|
||||||
Editor,
|
|
||||||
Diagnostics,
|
|
||||||
ProjectSearch,
|
|
||||||
Terminal,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Bind for SerializedItemKind {
|
|
||||||
fn bind(&self, statement: &Statement, start_index: i32) -> anyhow::Result<i32> {
|
|
||||||
match self {
|
|
||||||
SerializedItemKind::Editor => "Editor",
|
|
||||||
SerializedItemKind::Diagnostics => "Diagnostics",
|
|
||||||
SerializedItemKind::ProjectSearch => "ProjectSearch",
|
|
||||||
SerializedItemKind::Terminal => "Terminal",
|
|
||||||
}
|
|
||||||
.bind(statement, start_index)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Column for SerializedItemKind {
|
|
||||||
fn column(statement: &mut Statement, start_index: i32) -> anyhow::Result<(Self, i32)> {
|
|
||||||
String::column(statement, start_index).and_then(|(kind_text, next_index)| {
|
|
||||||
Ok((
|
|
||||||
match kind_text.as_ref() {
|
|
||||||
"Editor" => SerializedItemKind::Editor,
|
|
||||||
"Diagnostics" => SerializedItemKind::Diagnostics,
|
|
||||||
"ProjectSearch" => SerializedItemKind::ProjectSearch,
|
|
||||||
"Terminal" => SerializedItemKind::Terminal,
|
|
||||||
_ => bail!("Stored serialized item kind is incorrect"),
|
|
||||||
},
|
|
||||||
next_index,
|
|
||||||
))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum SerializedItem {
|
pub struct SerializedItem {
|
||||||
Editor { item_id: usize, path: Arc<Path> },
|
pub kind: Arc<str>,
|
||||||
Diagnostics { item_id: usize },
|
pub item_id: ItemId,
|
||||||
ProjectSearch { item_id: usize, query: String },
|
|
||||||
Terminal { item_id: usize },
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SerializedItem {
|
impl SerializedItem {
|
||||||
pub fn item_id(&self) -> usize {
|
pub fn new(kind: impl AsRef<str>, item_id: ItemId) -> Self {
|
||||||
match self {
|
Self {
|
||||||
SerializedItem::Editor { item_id, .. } => *item_id,
|
kind: Arc::from(kind.as_ref()),
|
||||||
SerializedItem::Diagnostics { item_id } => *item_id,
|
item_id,
|
||||||
SerializedItem::ProjectSearch { item_id, .. } => *item_id,
|
|
||||||
SerializedItem::Terminal { item_id } => *item_id,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn kind(&self) -> SerializedItemKind {
|
impl Bind for &SerializedItem {
|
||||||
match self {
|
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||||
SerializedItem::Editor { .. } => SerializedItemKind::Editor,
|
let next_index = statement.bind(self.kind.clone(), start_index)?;
|
||||||
SerializedItem::Diagnostics { .. } => SerializedItemKind::Diagnostics,
|
statement.bind(self.item_id, next_index)
|
||||||
SerializedItem::ProjectSearch { .. } => SerializedItemKind::ProjectSearch,
|
}
|
||||||
SerializedItem::Terminal { .. } => SerializedItemKind::Terminal,
|
}
|
||||||
}
|
|
||||||
|
impl Column for SerializedItem {
|
||||||
|
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||||
|
let (kind, next_index) = Arc::<str>::column(statement, start_index)?;
|
||||||
|
let (item_id, next_index) = ItemId::column(statement, next_index)?;
|
||||||
|
Ok((SerializedItem { kind, item_id }, next_index))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -187,8 +154,8 @@ mod tests {
|
|||||||
|
|
||||||
db.exec(indoc::indoc! {"
|
db.exec(indoc::indoc! {"
|
||||||
CREATE TABLE workspace_id_test(
|
CREATE TABLE workspace_id_test(
|
||||||
workspace_id BLOB,
|
workspace_id BLOB,
|
||||||
dock_anchor TEXT
|
dock_anchor TEXT
|
||||||
);"})
|
);"})
|
||||||
.unwrap()()
|
.unwrap()()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
item::ItemEvent,
|
item::ItemEvent,
|
||||||
persistence::model::{ItemId, WorkspaceId},
|
persistence::model::{ItemId, WorkspaceId},
|
||||||
Item, ItemNavHistory,
|
Item, ItemNavHistory, Pane, Workspace,
|
||||||
};
|
};
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use call::participant::{Frame, RemoteVideoTrack};
|
use call::participant::{Frame, RemoteVideoTrack};
|
||||||
@ -10,8 +10,10 @@ use futures::StreamExt;
|
|||||||
use gpui::{
|
use gpui::{
|
||||||
elements::*,
|
elements::*,
|
||||||
geometry::{rect::RectF, vector::vec2f},
|
geometry::{rect::RectF, vector::vec2f},
|
||||||
Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext,
|
Entity, ModelHandle, MouseButton, RenderContext, Task, View, ViewContext, ViewHandle,
|
||||||
|
WeakViewHandle,
|
||||||
};
|
};
|
||||||
|
use project::Project;
|
||||||
use settings::Settings;
|
use settings::Settings;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use std::{
|
use std::{
|
||||||
@ -191,10 +193,12 @@ impl Item for SharedScreen {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn deserialize(
|
fn deserialize(
|
||||||
workspace_id: WorkspaceId,
|
_project: ModelHandle<Project>,
|
||||||
item_id: ItemId,
|
_workspace: WeakViewHandle<Workspace>,
|
||||||
cx: &mut ViewContext<Self>,
|
_workspace_id: WorkspaceId,
|
||||||
) -> Result<Self> {
|
_item_id: ItemId,
|
||||||
Err(anyhow!("SharedScreen can not be deserialized"))
|
_cx: &mut ViewContext<Pane>,
|
||||||
|
) -> Task<Result<ViewHandle<Self>>> {
|
||||||
|
unreachable!("Shared screen can not be deserialized")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,8 @@ use language::LanguageRegistry;
|
|||||||
use log::{error, warn};
|
use log::{error, warn};
|
||||||
pub use pane::*;
|
pub use pane::*;
|
||||||
pub use pane_group::*;
|
pub use pane_group::*;
|
||||||
use persistence::model::{ItemId, WorkspaceId};
|
use persistence::model::SerializedItem;
|
||||||
|
pub use persistence::model::{ItemId, WorkspaceId};
|
||||||
use postage::prelude::Stream;
|
use postage::prelude::Stream;
|
||||||
use project::{Project, ProjectEntryId, ProjectPath, ProjectStore, Worktree, WorktreeId};
|
use project::{Project, ProjectEntryId, ProjectPath, ProjectStore, Worktree, WorktreeId};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
@ -57,7 +58,7 @@ use theme::{Theme, ThemeRegistry};
|
|||||||
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
|
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
use crate::persistence::model::SerializedWorkspace;
|
use crate::persistence::model::{SerializedPane, SerializedWorkspace};
|
||||||
|
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
pub struct RemoveWorktreeFromProject(pub WorktreeId);
|
pub struct RemoveWorktreeFromProject(pub WorktreeId);
|
||||||
@ -337,22 +338,27 @@ pub fn register_followable_item<I: FollowableItem>(cx: &mut MutableAppContext) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
type SerializableItemBuilders = HashMap<
|
type ItemDeserializers = HashMap<
|
||||||
&'static str,
|
Arc<str>,
|
||||||
fn(WorkspaceId, ItemId, &mut ViewContext<Pane>) -> Option<Box<dyn ItemHandle>>,
|
fn(
|
||||||
|
ModelHandle<Project>,
|
||||||
|
WeakViewHandle<Workspace>,
|
||||||
|
WorkspaceId,
|
||||||
|
ItemId,
|
||||||
|
&mut ViewContext<Pane>,
|
||||||
|
) -> Task<Result<Box<dyn ItemHandle>>>,
|
||||||
>;
|
>;
|
||||||
pub fn register_deserializable_item<I: Item>(cx: &mut MutableAppContext) {
|
pub fn register_deserializable_item<I: Item>(cx: &mut MutableAppContext) {
|
||||||
cx.update_default_global(|deserializers: &mut SerializableItemBuilders, _| {
|
cx.update_default_global(|deserializers: &mut ItemDeserializers, _cx| {
|
||||||
if let Some(serialized_item_kind) = I::serialized_item_kind() {
|
if let Some(serialized_item_kind) = I::serialized_item_kind() {
|
||||||
deserializers.insert(serialized_item_kind, |workspace_id, item_id, cx| {
|
deserializers.insert(
|
||||||
if let Some(v) =
|
Arc::from(serialized_item_kind),
|
||||||
cx.add_option_view(|cx| I::deserialize(workspace_id, item_id, cx).log_err())
|
|project, workspace, workspace_id, item_id, cx| {
|
||||||
{
|
let task = I::deserialize(project, workspace, workspace_id, item_id, cx);
|
||||||
Some(Box::new(v))
|
cx.foreground()
|
||||||
} else {
|
.spawn(async { Ok(Box::new(task.await?) as Box<_>) })
|
||||||
None
|
},
|
||||||
}
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -549,6 +555,8 @@ impl Workspace {
|
|||||||
}
|
}
|
||||||
project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded => {
|
project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded => {
|
||||||
this.update_window_title(cx);
|
this.update_window_title(cx);
|
||||||
|
// TODO: Cache workspace_id on workspace and read from it here
|
||||||
|
this.serialize_workspace(None, cx);
|
||||||
}
|
}
|
||||||
project::Event::DisconnectedFromHost => {
|
project::Event::DisconnectedFromHost => {
|
||||||
this.update_window_edited(cx);
|
this.update_window_edited(cx);
|
||||||
@ -568,21 +576,9 @@ impl Workspace {
|
|||||||
.detach();
|
.detach();
|
||||||
cx.focus(¢er_pane);
|
cx.focus(¢er_pane);
|
||||||
cx.emit(Event::PaneAdded(center_pane.clone()));
|
cx.emit(Event::PaneAdded(center_pane.clone()));
|
||||||
let dock = Dock::new(
|
let dock = Dock::new(dock_default_factory, cx);
|
||||||
dock_default_factory,
|
|
||||||
serialized_workspace
|
|
||||||
.as_ref()
|
|
||||||
.map(|ws| ws.dock_position)
|
|
||||||
.clone(),
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
let dock_pane = dock.pane().clone();
|
let dock_pane = dock.pane().clone();
|
||||||
|
|
||||||
if let Some(serialized_workspace) = serialized_workspace {
|
|
||||||
|
|
||||||
// Fill them in?
|
|
||||||
}
|
|
||||||
|
|
||||||
let fs = project.read(cx).fs().clone();
|
let fs = project.read(cx).fs().clone();
|
||||||
let user_store = project.read(cx).user_store();
|
let user_store = project.read(cx).user_store();
|
||||||
let client = project.read(cx).client();
|
let client = project.read(cx).client();
|
||||||
@ -636,13 +632,13 @@ impl Workspace {
|
|||||||
|
|
||||||
let mut this = Workspace {
|
let mut this = Workspace {
|
||||||
modal: None,
|
modal: None,
|
||||||
weak_self: weak_handle,
|
weak_self: weak_handle.clone(),
|
||||||
center: PaneGroup::new(center_pane.clone()),
|
center: PaneGroup::new(center_pane.clone()),
|
||||||
dock,
|
dock,
|
||||||
// When removing an item, the last element remaining in this array
|
// When removing an item, the last element remaining in this array
|
||||||
// is used to find where focus should fallback to. As such, the order
|
// is used to find where focus should fallback to. As such, the order
|
||||||
// of these two variables is important.
|
// of these two variables is important.
|
||||||
panes: vec![dock_pane, center_pane.clone()],
|
panes: vec![dock_pane.clone(), center_pane.clone()],
|
||||||
panes_by_item: Default::default(),
|
panes_by_item: Default::default(),
|
||||||
active_pane: center_pane.clone(),
|
active_pane: center_pane.clone(),
|
||||||
last_active_center_pane: Some(center_pane.downgrade()),
|
last_active_center_pane: Some(center_pane.downgrade()),
|
||||||
@ -655,7 +651,7 @@ impl Workspace {
|
|||||||
fs,
|
fs,
|
||||||
left_sidebar,
|
left_sidebar,
|
||||||
right_sidebar,
|
right_sidebar,
|
||||||
project,
|
project: project.clone(),
|
||||||
leader_state: Default::default(),
|
leader_state: Default::default(),
|
||||||
follower_states_by_leader: Default::default(),
|
follower_states_by_leader: Default::default(),
|
||||||
last_leaders_by_pane: Default::default(),
|
last_leaders_by_pane: Default::default(),
|
||||||
@ -663,9 +659,15 @@ impl Workspace {
|
|||||||
active_call,
|
active_call,
|
||||||
_observe_current_user,
|
_observe_current_user,
|
||||||
};
|
};
|
||||||
this.project_remote_id_changed(this.project.read(cx).remote_id(), cx);
|
this.project_remote_id_changed(project.read(cx).remote_id(), cx);
|
||||||
cx.defer(|this, cx| this.update_window_title(cx));
|
cx.defer(|this, cx| this.update_window_title(cx));
|
||||||
|
|
||||||
|
if let Some(serialized_workspace) = serialized_workspace {
|
||||||
|
cx.defer(move |_, cx| {
|
||||||
|
Self::load_from_serialized_workspace(weak_handle, serialized_workspace, cx)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1315,6 +1317,7 @@ impl Workspace {
|
|||||||
pub fn add_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) {
|
pub fn add_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) {
|
||||||
let active_pane = self.active_pane().clone();
|
let active_pane = self.active_pane().clone();
|
||||||
Pane::add_item(self, &active_pane, item, true, true, None, cx);
|
Pane::add_item(self, &active_pane, item, true, true, None, cx);
|
||||||
|
self.serialize_workspace(None, cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open_path(
|
pub fn open_path(
|
||||||
@ -1519,6 +1522,7 @@ impl Workspace {
|
|||||||
entry.remove();
|
entry.remove();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.serialize_workspace(None, cx);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -2250,6 +2254,140 @@ impl Workspace {
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn workspace_id(&self, cx: &AppContext) -> WorkspaceId {
|
||||||
|
self.project()
|
||||||
|
.read(cx)
|
||||||
|
.visible_worktrees(cx)
|
||||||
|
.map(|worktree| worktree.read(cx).abs_path())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize_workspace(&self, old_id: Option<WorkspaceId>, cx: &mut MutableAppContext) {
|
||||||
|
let dock_pane = SerializedPane {
|
||||||
|
children: self
|
||||||
|
.dock
|
||||||
|
.pane()
|
||||||
|
.read(cx)
|
||||||
|
.items()
|
||||||
|
.filter_map(|item_handle| {
|
||||||
|
Some(SerializedItem {
|
||||||
|
kind: Arc::from(item_handle.serialized_item_kind()?),
|
||||||
|
item_id: item_handle.id(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let serialized_workspace = SerializedWorkspace {
|
||||||
|
workspace_id: self.workspace_id(cx),
|
||||||
|
dock_position: self.dock.position(),
|
||||||
|
dock_pane,
|
||||||
|
center_group: Default::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
cx.background()
|
||||||
|
.spawn(async move {
|
||||||
|
persistence::DB.save_workspace(old_id, &serialized_workspace);
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_from_serialized_workspace(
|
||||||
|
workspace: WeakViewHandle<Workspace>,
|
||||||
|
serialized_workspace: SerializedWorkspace,
|
||||||
|
cx: &mut MutableAppContext,
|
||||||
|
) {
|
||||||
|
// fn process_splits(
|
||||||
|
// pane_group: SerializedPaneGroup,
|
||||||
|
// parent: Option<PaneGroup>,
|
||||||
|
// workspace: ViewHandle<Workspace>,
|
||||||
|
// cx: &mut AsyncAppContext,
|
||||||
|
// ) {
|
||||||
|
// match pane_group {
|
||||||
|
// SerializedPaneGroup::Group { axis, children } => {
|
||||||
|
// process_splits(pane_group, parent)
|
||||||
|
// }
|
||||||
|
// SerializedPaneGroup::Pane(pane) => {
|
||||||
|
// process_pane(pane)
|
||||||
|
// },
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
async fn deserialize_pane(
|
||||||
|
project: ModelHandle<Project>,
|
||||||
|
pane: SerializedPane,
|
||||||
|
pane_handle: ViewHandle<Pane>,
|
||||||
|
workspace_id: WorkspaceId,
|
||||||
|
workspace: &ViewHandle<Workspace>,
|
||||||
|
cx: &mut AsyncAppContext,
|
||||||
|
) {
|
||||||
|
for item in pane.children {
|
||||||
|
let project = project.clone();
|
||||||
|
let workspace_id = workspace_id.clone();
|
||||||
|
let item_handle = pane_handle
|
||||||
|
.update(cx, |_, cx| {
|
||||||
|
if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind)
|
||||||
|
{
|
||||||
|
deserializer(
|
||||||
|
project,
|
||||||
|
workspace.downgrade(),
|
||||||
|
workspace_id,
|
||||||
|
item.item_id,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
Task::ready(Err(anyhow!(
|
||||||
|
"Deserializer does not exist for item kind: {}",
|
||||||
|
item.kind
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.log_err();
|
||||||
|
|
||||||
|
if let Some(item_handle) = item_handle {
|
||||||
|
workspace.update(cx, |workspace, cx| {
|
||||||
|
Pane::add_item(
|
||||||
|
workspace,
|
||||||
|
&pane_handle,
|
||||||
|
item_handle,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cx.spawn(|mut cx| async move {
|
||||||
|
if let Some(workspace) = workspace.upgrade(&cx) {
|
||||||
|
let (project, dock_pane_handle) = workspace.read_with(&cx, |workspace, _| {
|
||||||
|
(workspace.project().clone(), workspace.dock_pane().clone())
|
||||||
|
});
|
||||||
|
deserialize_pane(
|
||||||
|
project,
|
||||||
|
serialized_workspace.dock_pane,
|
||||||
|
dock_pane_handle,
|
||||||
|
serialized_workspace.workspace_id,
|
||||||
|
&workspace,
|
||||||
|
&mut cx,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Traverse the splits tree and add to things
|
||||||
|
// process_splits(serialized_workspace.center_group, None, workspace, &mut cx);
|
||||||
|
|
||||||
|
workspace.update(&mut cx, |workspace, cx| {
|
||||||
|
Dock::set_dock_position(workspace, serialized_workspace.dock_position, cx)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Entity for Workspace {
|
impl Entity for Workspace {
|
||||||
|
Binary file not shown.
Loading…
Reference in New Issue
Block a user