feat: run rustfmt with custom defined fmt configuration (#1848)

* chore: update rustfmt

* chore: apply rustfmt format
This commit is contained in:
Nathan.fooo 2023-02-13 09:29:49 +08:00 committed by GitHub
parent e2496e734c
commit 6bb1c4e89c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
459 changed files with 50554 additions and 46600 deletions

View File

@ -1,3 +1,3 @@
fn main() {
tauri_build::build()
tauri_build::build()
}

View File

@ -0,0 +1,12 @@
# https://rust-lang.github.io/rustfmt/?version=master&search=
max_width = 100
tab_spaces = 2
newline_style = "Auto"
match_block_trailing_comma = true
use_field_init_shorthand = true
use_try_shorthand = true
reorder_imports = true
reorder_modules = true
remove_nested_parens = true
merge_derives = true
edition = "2021"

View File

@ -1,10 +1,10 @@
use flowy_core::{get_client_server_configuration, AppFlowyCore, AppFlowyCoreConfig};
pub fn init_flowy_core() -> AppFlowyCore {
let data_path = tauri::api::path::data_dir().unwrap();
let path = format!("{}/AppFlowy", data_path.to_str().unwrap());
let server_config = get_client_server_configuration().unwrap();
let config = AppFlowyCoreConfig::new(&path, "AppFlowy".to_string(), server_config)
.log_filter("trace", vec!["appflowy_tauri".to_string()]);
AppFlowyCore::new(config)
let data_path = tauri::api::path::data_dir().unwrap();
let path = format!("{}/AppFlowy", data_path.to_str().unwrap());
let server_config = get_client_server_configuration().unwrap();
let config = AppFlowyCoreConfig::new(&path, "AppFlowy".to_string(), server_config)
.log_filter("trace", vec!["appflowy_tauri".to_string()]);
AppFlowyCore::new(config)
}

View File

@ -1,6 +1,6 @@
#![cfg_attr(
all(not(debug_assertions), target_os = "windows"),
windows_subsystem = "windows"
all(not(debug_assertions), target_os = "windows"),
windows_subsystem = "windows"
)]
mod init;
@ -14,28 +14,28 @@ use request::*;
use tauri::Manager;
fn main() {
let flowy_core = init_flowy_core();
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![invoke_request])
.manage(flowy_core)
.on_window_event(|_window_event| {})
.on_menu_event(|_menu| {})
.on_page_load(|window, _payload| {
let app_handler = window.app_handle();
register_notification_sender(TSNotificationSender::new(app_handler.clone()));
// tauri::async_runtime::spawn(async move {});
window.listen_global(AF_EVENT, move |event| {
on_event(app_handler.clone(), event);
});
})
.setup(|app| {
#[cfg(debug_assertions)]
{
let window = app.get_window("main").unwrap();
window.open_devtools();
}
Ok(())
})
.run(tauri::generate_context!())
.expect("error while running tauri application");
let flowy_core = init_flowy_core();
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![invoke_request])
.manage(flowy_core)
.on_window_event(|_window_event| {})
.on_menu_event(|_menu| {})
.on_page_load(|window, _payload| {
let app_handler = window.app_handle();
register_notification_sender(TSNotificationSender::new(app_handler.clone()));
// tauri::async_runtime::spawn(async move {});
window.listen_global(AF_EVENT, move |event| {
on_event(app_handler.clone(), event);
});
})
.setup(|app| {
#[cfg(debug_assertions)]
{
let window = app.get_window("main").unwrap();
window.open_devtools();
}
Ok(())
})
.run(tauri::generate_context!())
.expect("error while running tauri application");
}

View File

@ -12,23 +12,24 @@ pub fn on_event(app_handler: AppHandle<Wry>, event: Event) {}
#[allow(dead_code)]
pub fn send_notification<P: Serialize + Clone>(app_handler: AppHandle<Wry>, payload: P) {
app_handler.emit_all(AF_NOTIFICATION, payload).unwrap();
app_handler.emit_all(AF_NOTIFICATION, payload).unwrap();
}
pub struct TSNotificationSender {
handler: AppHandle<Wry>,
handler: AppHandle<Wry>,
}
impl TSNotificationSender {
pub fn new(handler: AppHandle<Wry>) -> Self {
Self { handler }
}
pub fn new(handler: AppHandle<Wry>) -> Self {
Self { handler }
}
}
impl NotificationSender for TSNotificationSender {
fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> {
self.handler
.emit_all(AF_NOTIFICATION, subject)
.map_err(|e| format!("{:?}", e))
}
fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> {
self
.handler
.emit_all(AF_NOTIFICATION, subject)
.map_err(|e| format!("{:?}", e))
}
}

View File

@ -1,46 +1,46 @@
use flowy_core::AppFlowyCore;
use lib_dispatch::prelude::{
AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode,
AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode,
};
use tauri::{AppHandle, Manager, State, Wry};
#[derive(Clone, Debug, serde::Deserialize)]
pub struct AFTauriRequest {
ty: String,
payload: Vec<u8>,
ty: String,
payload: Vec<u8>,
}
impl std::convert::From<AFTauriRequest> for AFPluginRequest {
fn from(event: AFTauriRequest) -> Self {
AFPluginRequest::new(event.ty).payload(event.payload)
}
fn from(event: AFTauriRequest) -> Self {
AFPluginRequest::new(event.ty).payload(event.payload)
}
}
#[derive(Clone, serde::Serialize)]
pub struct AFTauriResponse {
code: StatusCode,
payload: Vec<u8>,
code: StatusCode,
payload: Vec<u8>,
}
impl std::convert::From<AFPluginEventResponse> for AFTauriResponse {
fn from(response: AFPluginEventResponse) -> Self {
Self {
code: response.status_code,
payload: response.payload.to_vec(),
}
fn from(response: AFPluginEventResponse) -> Self {
Self {
code: response.status_code,
payload: response.payload.to_vec(),
}
}
}
// Learn more about Tauri commands at https://tauri.app/v1/guides/features/command
#[tracing::instrument(level = "trace", skip(app_handler))]
#[tauri::command]
pub async fn invoke_request(
request: AFTauriRequest,
app_handler: AppHandle<Wry>,
request: AFTauriRequest,
app_handler: AppHandle<Wry>,
) -> AFTauriResponse {
let request: AFPluginRequest = request.into();
let state: State<AppFlowyCore> = app_handler.state();
let dispatcher = state.inner().dispatcher();
let response = AFPluginDispatcher::async_send(dispatcher, request).await;
response.into()
let request: AFPluginRequest = request.into();
let state: State<AppFlowyCore> = app_handler.state();
let dispatcher = state.inner().dispatcher();
let response = AFPluginDispatcher::async_send(dispatcher, request).await;
response.into()
}

View File

@ -1,3 +1,3 @@
fn main() {
flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
flowy_codegen::protobuf_file::gen(env!("CARGO_PKG_NAME"));
}

View File

@ -2,25 +2,25 @@ use byteorder::{BigEndian, ByteOrder};
use std::mem::forget;
pub fn forget_rust(buf: Vec<u8>) -> *const u8 {
let ptr = buf.as_ptr();
forget(buf);
ptr
let ptr = buf.as_ptr();
forget(buf);
ptr
}
#[allow(unused_attributes)]
#[allow(dead_code)]
pub fn reclaim_rust(ptr: *mut u8, length: u32) {
unsafe {
let len: usize = length as usize;
Vec::from_raw_parts(ptr, len, len);
}
unsafe {
let len: usize = length as usize;
Vec::from_raw_parts(ptr, len, len);
}
}
pub fn extend_front_four_bytes_into_bytes(bytes: &[u8]) -> Vec<u8> {
let mut output = Vec::with_capacity(bytes.len() + 4);
let mut marker_bytes = [0; 4];
BigEndian::write_u32(&mut marker_bytes, bytes.len() as u32);
output.extend_from_slice(&marker_bytes);
output.extend_from_slice(bytes);
output
let mut output = Vec::with_capacity(bytes.len() + 4);
let mut marker_bytes = [0; 4];
BigEndian::write_u32(&mut marker_bytes, bytes.len() as u32);
output.extend_from_slice(&marker_bytes);
output.extend_from_slice(bytes);
output
}

View File

@ -7,8 +7,8 @@ mod util;
use crate::notification::DartNotificationSender;
use crate::{
c::{extend_front_four_bytes_into_bytes, forget_rust},
model::{FFIRequest, FFIResponse},
c::{extend_front_four_bytes_into_bytes, forget_rust},
model::{FFIRequest, FFIResponse},
};
use flowy_core::get_client_server_configuration;
use flowy_core::*;
@ -20,69 +20,74 @@ use parking_lot::RwLock;
use std::{ffi::CStr, os::raw::c_char};
lazy_static! {
static ref APPFLOWY_CORE: RwLock<Option<AppFlowyCore>> = RwLock::new(None);
static ref APPFLOWY_CORE: RwLock<Option<AppFlowyCore>> = RwLock::new(None);
}
#[no_mangle]
pub extern "C" fn init_sdk(path: *mut c_char) -> i64 {
let c_str: &CStr = unsafe { CStr::from_ptr(path) };
let path: &str = c_str.to_str().unwrap();
let c_str: &CStr = unsafe { CStr::from_ptr(path) };
let path: &str = c_str.to_str().unwrap();
let server_config = get_client_server_configuration().unwrap();
let log_crates = vec!["flowy-ffi".to_string()];
let config = AppFlowyCoreConfig::new(path, "appflowy".to_string(), server_config).log_filter("info", log_crates);
*APPFLOWY_CORE.write() = Some(AppFlowyCore::new(config));
let server_config = get_client_server_configuration().unwrap();
let log_crates = vec!["flowy-ffi".to_string()];
let config = AppFlowyCoreConfig::new(path, "appflowy".to_string(), server_config)
.log_filter("info", log_crates);
*APPFLOWY_CORE.write() = Some(AppFlowyCore::new(config));
0
0
}
#[no_mangle]
pub extern "C" fn async_event(port: i64, input: *const u8, len: usize) {
let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into();
log::trace!(
"[FFI]: {} Async Event: {:?} with {} port",
&request.id,
&request.event,
port
);
let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into();
log::trace!(
"[FFI]: {} Async Event: {:?} with {} port",
&request.id,
&request.event,
port
);
let dispatcher = match APPFLOWY_CORE.read().as_ref() {
None => {
log::error!("sdk not init yet.");
return;
}
Some(e) => e.event_dispatcher.clone(),
};
let _ = AFPluginDispatcher::async_send_with_callback(dispatcher, request, move |resp: AFPluginEventResponse| {
log::trace!("[FFI]: Post data to dart through {} port", port);
Box::pin(post_to_flutter(resp, port))
});
let dispatcher = match APPFLOWY_CORE.read().as_ref() {
None => {
log::error!("sdk not init yet.");
return;
},
Some(e) => e.event_dispatcher.clone(),
};
let _ = AFPluginDispatcher::async_send_with_callback(
dispatcher,
request,
move |resp: AFPluginEventResponse| {
log::trace!("[FFI]: Post data to dart through {} port", port);
Box::pin(post_to_flutter(resp, port))
},
);
}
#[no_mangle]
pub extern "C" fn sync_event(input: *const u8, len: usize) -> *const u8 {
let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into();
log::trace!("[FFI]: {} Sync Event: {:?}", &request.id, &request.event,);
let request: AFPluginRequest = FFIRequest::from_u8_pointer(input, len).into();
log::trace!("[FFI]: {} Sync Event: {:?}", &request.id, &request.event,);
let dispatcher = match APPFLOWY_CORE.read().as_ref() {
None => {
log::error!("sdk not init yet.");
return forget_rust(Vec::default());
}
Some(e) => e.event_dispatcher.clone(),
};
let _response = AFPluginDispatcher::sync_send(dispatcher, request);
let dispatcher = match APPFLOWY_CORE.read().as_ref() {
None => {
log::error!("sdk not init yet.");
return forget_rust(Vec::default());
},
Some(e) => e.event_dispatcher.clone(),
};
let _response = AFPluginDispatcher::sync_send(dispatcher, request);
// FFIResponse { }
let response_bytes = vec![];
let result = extend_front_four_bytes_into_bytes(&response_bytes);
forget_rust(result)
// FFIResponse { }
let response_bytes = vec![];
let result = extend_front_four_bytes_into_bytes(&response_bytes);
forget_rust(result)
}
#[no_mangle]
pub extern "C" fn set_stream_port(port: i64) -> i32 {
register_notification_sender(DartNotificationSender::new(port));
0
register_notification_sender(DartNotificationSender::new(port));
0
}
#[inline(never)]
@ -91,39 +96,39 @@ pub extern "C" fn link_me_please() {}
#[inline(always)]
async fn post_to_flutter(response: AFPluginEventResponse, port: i64) {
let isolate = allo_isolate::Isolate::new(port);
match isolate
.catch_unwind(async {
let ffi_resp = FFIResponse::from(response);
ffi_resp.into_bytes().unwrap().to_vec()
})
.await
{
Ok(_success) => {
log::trace!("[FFI]: Post data to dart success");
}
Err(e) => {
if let Some(msg) = e.downcast_ref::<&str>() {
log::error!("[FFI]: {:?}", msg);
} else {
log::error!("[FFI]: allo_isolate post panic");
}
}
}
let isolate = allo_isolate::Isolate::new(port);
match isolate
.catch_unwind(async {
let ffi_resp = FFIResponse::from(response);
ffi_resp.into_bytes().unwrap().to_vec()
})
.await
{
Ok(_success) => {
log::trace!("[FFI]: Post data to dart success");
},
Err(e) => {
if let Some(msg) = e.downcast_ref::<&str>() {
log::error!("[FFI]: {:?}", msg);
} else {
log::error!("[FFI]: allo_isolate post panic");
}
},
}
}
#[no_mangle]
pub extern "C" fn backend_log(level: i64, data: *const c_char) {
let c_str = unsafe { CStr::from_ptr(data) };
let log_str = c_str.to_str().unwrap();
let c_str = unsafe { CStr::from_ptr(data) };
let log_str = c_str.to_str().unwrap();
// Don't change the mapping relation between number and level
match level {
0 => tracing::info!("{}", log_str),
1 => tracing::debug!("{}", log_str),
2 => tracing::trace!("{}", log_str),
3 => tracing::warn!("{}", log_str),
4 => tracing::error!("{}", log_str),
_ => (),
}
// Don't change the mapping relation between number and level
match level {
0 => tracing::info!("{}", log_str),
1 => tracing::debug!("{}", log_str),
2 => tracing::trace!("{}", log_str),
3 => tracing::warn!("{}", log_str),
4 => tracing::error!("{}", log_str),
_ => (),
}
}

View File

@ -5,24 +5,24 @@ use std::convert::TryFrom;
#[derive(Default, ProtoBuf)]
pub struct FFIRequest {
#[pb(index = 1)]
pub(crate) event: String,
#[pb(index = 1)]
pub(crate) event: String,
#[pb(index = 2)]
pub(crate) payload: Vec<u8>,
#[pb(index = 2)]
pub(crate) payload: Vec<u8>,
}
impl FFIRequest {
pub fn from_u8_pointer(pointer: *const u8, len: usize) -> Self {
let buffer = unsafe { std::slice::from_raw_parts(pointer, len) }.to_vec();
let bytes = Bytes::from(buffer);
let request: FFIRequest = FFIRequest::try_from(bytes).unwrap();
request
}
pub fn from_u8_pointer(pointer: *const u8, len: usize) -> Self {
let buffer = unsafe { std::slice::from_raw_parts(pointer, len) }.to_vec();
let bytes = Bytes::from(buffer);
let request: FFIRequest = FFIRequest::try_from(bytes).unwrap();
request
}
}
impl std::convert::From<FFIRequest> for AFPluginRequest {
fn from(ffi_request: FFIRequest) -> Self {
AFPluginRequest::new(ffi_request.event).payload(ffi_request.payload)
}
fn from(ffi_request: FFIRequest) -> Self {
AFPluginRequest::new(ffi_request.event).payload(ffi_request.payload)
}
}

View File

@ -3,43 +3,43 @@ use lib_dispatch::prelude::{AFPluginEventResponse, Payload, StatusCode};
#[derive(ProtoBuf_Enum, Clone, Copy)]
pub enum FFIStatusCode {
Ok = 0,
Err = 1,
Internal = 2,
Ok = 0,
Err = 1,
Internal = 2,
}
impl std::default::Default for FFIStatusCode {
fn default() -> FFIStatusCode {
FFIStatusCode::Ok
}
fn default() -> FFIStatusCode {
FFIStatusCode::Ok
}
}
#[derive(ProtoBuf, Default)]
pub struct FFIResponse {
#[pb(index = 1)]
payload: Vec<u8>,
#[pb(index = 1)]
payload: Vec<u8>,
#[pb(index = 2)]
code: FFIStatusCode,
#[pb(index = 2)]
code: FFIStatusCode,
}
impl std::convert::From<AFPluginEventResponse> for FFIResponse {
fn from(resp: AFPluginEventResponse) -> Self {
let payload = match resp.payload {
Payload::Bytes(bytes) => bytes.to_vec(),
Payload::None => vec![],
};
fn from(resp: AFPluginEventResponse) -> Self {
let payload = match resp.payload {
Payload::Bytes(bytes) => bytes.to_vec(),
Payload::None => vec![],
};
let code = match resp.status_code {
StatusCode::Ok => FFIStatusCode::Ok,
StatusCode::Err => FFIStatusCode::Err,
};
let code = match resp.status_code {
StatusCode::Ok => FFIStatusCode::Ok,
StatusCode::Err => FFIStatusCode::Err,
};
// let msg = match resp.error {
// None => "".to_owned(),
// Some(e) => format!("{:?}", e),
// };
// let msg = match resp.error {
// None => "".to_owned(),
// Some(e) => format!("{:?}", e),
// };
FFIResponse { payload, code }
}
FFIResponse { payload, code }
}
}

View File

@ -5,21 +5,21 @@ use flowy_notification::NotificationSender;
use std::convert::TryInto;
pub struct DartNotificationSender {
isolate: Isolate,
isolate: Isolate,
}
impl DartNotificationSender {
pub fn new(port: i64) -> Self {
Self {
isolate: Isolate::new(port),
}
pub fn new(port: i64) -> Self {
Self {
isolate: Isolate::new(port),
}
}
}
impl NotificationSender for DartNotificationSender {
fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> {
let bytes: Bytes = subject.try_into().unwrap();
self.isolate.post(bytes.to_vec());
Ok(())
}
fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> {
let bytes: Bytes = subject.try_into().unwrap();
self.isolate.post(bytes.to_vec());
Ok(())
}
}

View File

@ -4,277 +4,296 @@
use crate::event_attrs::EventEnumAttrs;
use crate::node_attrs::NodeStructAttrs;
use crate::{is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE};
use crate::{
is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE,
};
use proc_macro2::Ident;
use syn::Meta::NameValue;
use syn::{self, punctuated::Punctuated};
pub struct ASTContainer<'a> {
/// The struct or enum name (without generics).
pub ident: syn::Ident,
/// The struct or enum name (without generics).
pub ident: syn::Ident,
pub node_type: Option<String>,
/// Attributes on the structure.
pub pb_attrs: PBAttrsContainer,
/// The contents of the struct or enum.
pub data: ASTData<'a>,
pub node_type: Option<String>,
/// Attributes on the structure.
pub pb_attrs: PBAttrsContainer,
/// The contents of the struct or enum.
pub data: ASTData<'a>,
}
impl<'a> ASTContainer<'a> {
pub fn from_ast(ast_result: &ASTResult, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
let attrs = PBAttrsContainer::from_ast(ast_result, ast);
// syn::DeriveInput
// 1. syn::DataUnion
// 2. syn::DataStruct
// 3. syn::DataEnum
let data = match &ast.data {
syn::Data::Struct(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
let (style, fields) = struct_from_ast(ast_result, &data.fields);
ASTData::Struct(style, fields)
}
syn::Data::Union(_) => {
ast_result.error_spanned_by(ast, "Does not support derive for unions");
return None;
}
syn::Data::Enum(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
ASTData::Enum(enum_from_ast(ast_result, &ast.ident, &data.variants, &ast.attrs))
}
};
pub fn from_ast(ast_result: &ASTResult, ast: &'a syn::DeriveInput) -> Option<ASTContainer<'a>> {
let attrs = PBAttrsContainer::from_ast(ast_result, ast);
// syn::DeriveInput
// 1. syn::DataUnion
// 2. syn::DataStruct
// 3. syn::DataEnum
let data = match &ast.data {
syn::Data::Struct(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
let (style, fields) = struct_from_ast(ast_result, &data.fields);
ASTData::Struct(style, fields)
},
syn::Data::Union(_) => {
ast_result.error_spanned_by(ast, "Does not support derive for unions");
return None;
},
syn::Data::Enum(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
ASTData::Enum(enum_from_ast(
ast_result,
&ast.ident,
&data.variants,
&ast.attrs,
))
},
};
let ident = ast.ident.clone();
let node_type = get_node_type(ast_result, &ident, &ast.attrs);
let item = ASTContainer {
ident,
pb_attrs: attrs,
node_type,
data,
};
Some(item)
}
let ident = ast.ident.clone();
let node_type = get_node_type(ast_result, &ident, &ast.attrs);
let item = ASTContainer {
ident,
pb_attrs: attrs,
node_type,
data,
};
Some(item)
}
}
pub enum ASTData<'a> {
Struct(ASTStyle, Vec<ASTField<'a>>),
Enum(Vec<ASTEnumVariant<'a>>),
Struct(ASTStyle, Vec<ASTField<'a>>),
Enum(Vec<ASTEnumVariant<'a>>),
}
impl<'a> ASTData<'a> {
pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a ASTField<'a>> + 'a> {
match self {
ASTData::Enum(variants) => Box::new(variants.iter().flat_map(|variant| variant.fields.iter())),
ASTData::Struct(_, fields) => Box::new(fields.iter()),
}
pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a ASTField<'a>> + 'a> {
match self {
ASTData::Enum(variants) => {
Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
},
ASTData::Struct(_, fields) => Box::new(fields.iter()),
}
}
pub fn all_variants(&'a self) -> Box<dyn Iterator<Item = &'a EventEnumAttrs> + 'a> {
match self {
ASTData::Enum(variants) => {
let iter = variants.iter().map(|variant| &variant.attrs);
Box::new(iter)
}
ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|_| None);
Box::new(iter)
}
}
pub fn all_variants(&'a self) -> Box<dyn Iterator<Item = &'a EventEnumAttrs> + 'a> {
match self {
ASTData::Enum(variants) => {
let iter = variants.iter().map(|variant| &variant.attrs);
Box::new(iter)
},
ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|_| None);
Box::new(iter)
},
}
}
pub fn all_idents(&'a self) -> Box<dyn Iterator<Item = &'a syn::Ident> + 'a> {
match self {
ASTData::Enum(variants) => Box::new(variants.iter().map(|v| &v.ident)),
ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|f| match &f.member {
syn::Member::Named(ident) => Some(ident),
_ => None,
});
Box::new(iter)
}
}
pub fn all_idents(&'a self) -> Box<dyn Iterator<Item = &'a syn::Ident> + 'a> {
match self {
ASTData::Enum(variants) => Box::new(variants.iter().map(|v| &v.ident)),
ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|f| match &f.member {
syn::Member::Named(ident) => Some(ident),
_ => None,
});
Box::new(iter)
},
}
}
}
/// A variant of an enum.
pub struct ASTEnumVariant<'a> {
pub ident: syn::Ident,
pub attrs: EventEnumAttrs,
pub style: ASTStyle,
pub fields: Vec<ASTField<'a>>,
pub original: &'a syn::Variant,
pub ident: syn::Ident,
pub attrs: EventEnumAttrs,
pub style: ASTStyle,
pub fields: Vec<ASTField<'a>>,
pub original: &'a syn::Variant,
}
impl<'a> ASTEnumVariant<'a> {
pub fn name(&self) -> String {
self.ident.to_string()
}
pub fn name(&self) -> String {
self.ident.to_string()
}
}
pub enum BracketCategory {
Other,
Opt,
Vec,
Map((String, String)),
Other,
Opt,
Vec,
Map((String, String)),
}
pub struct ASTField<'a> {
pub member: syn::Member,
pub pb_attrs: PBStructAttrs,
pub node_attrs: NodeStructAttrs,
pub ty: &'a syn::Type,
pub original: &'a syn::Field,
// If the field is Vec<String>, then the bracket_ty will be Vec
pub bracket_ty: Option<syn::Ident>,
// If the field is Vec<String>, then the bracket_inner_ty will be String
pub bracket_inner_ty: Option<syn::Ident>,
pub bracket_category: Option<BracketCategory>,
pub member: syn::Member,
pub pb_attrs: PBStructAttrs,
pub node_attrs: NodeStructAttrs,
pub ty: &'a syn::Type,
pub original: &'a syn::Field,
// If the field is Vec<String>, then the bracket_ty will be Vec
pub bracket_ty: Option<syn::Ident>,
// If the field is Vec<String>, then the bracket_inner_ty will be String
pub bracket_inner_ty: Option<syn::Ident>,
pub bracket_category: Option<BracketCategory>,
}
impl<'a> ASTField<'a> {
pub fn new(cx: &ASTResult, field: &'a syn::Field, index: usize) -> Result<Self, String> {
let mut bracket_inner_ty = None;
let mut bracket_ty = None;
let mut bracket_category = Some(BracketCategory::Other);
match parse_ty(cx, &field.ty) {
Ok(Some(inner)) => {
match inner.primitive_ty {
PrimitiveTy::Map(map_info) => {
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value)))
}
PrimitiveTy::Vec => {
bracket_category = Some(BracketCategory::Vec);
}
PrimitiveTy::Opt => {
bracket_category = Some(BracketCategory::Opt);
}
PrimitiveTy::Other => {
bracket_category = Some(BracketCategory::Other);
}
}
match *inner.bracket_ty_info {
Some(bracketed_inner_ty) => {
bracket_inner_ty = Some(bracketed_inner_ty.ident.clone());
bracket_ty = Some(inner.ident.clone());
}
None => {
bracket_ty = Some(inner.ident.clone());
}
}
}
Ok(None) => {
let msg = format!("Fail to get the ty inner type: {:?}", field);
return Err(msg);
}
Err(e) => {
eprintln!("ASTField parser failed: {:?} with error: {}", field, e);
return Err(e);
}
pub fn new(cx: &ASTResult, field: &'a syn::Field, index: usize) -> Result<Self, String> {
let mut bracket_inner_ty = None;
let mut bracket_ty = None;
let mut bracket_category = Some(BracketCategory::Other);
match parse_ty(cx, &field.ty) {
Ok(Some(inner)) => {
match inner.primitive_ty {
PrimitiveTy::Map(map_info) => {
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value)))
},
PrimitiveTy::Vec => {
bracket_category = Some(BracketCategory::Vec);
},
PrimitiveTy::Opt => {
bracket_category = Some(BracketCategory::Opt);
},
PrimitiveTy::Other => {
bracket_category = Some(BracketCategory::Other);
},
}
Ok(ASTField {
member: match &field.ident {
Some(ident) => syn::Member::Named(ident.clone()),
None => syn::Member::Unnamed(index.into()),
},
pb_attrs: PBStructAttrs::from_ast(cx, index, field),
node_attrs: NodeStructAttrs::from_ast(cx, index, field),
ty: &field.ty,
original: field,
bracket_ty,
bracket_inner_ty,
bracket_category,
})
match *inner.bracket_ty_info {
Some(bracketed_inner_ty) => {
bracket_inner_ty = Some(bracketed_inner_ty.ident.clone());
bracket_ty = Some(inner.ident.clone());
},
None => {
bracket_ty = Some(inner.ident.clone());
},
}
},
Ok(None) => {
let msg = format!("Fail to get the ty inner type: {:?}", field);
return Err(msg);
},
Err(e) => {
eprintln!("ASTField parser failed: {:?} with error: {}", field, e);
return Err(e);
},
}
pub fn ty_as_str(&self) -> String {
match self.bracket_inner_ty {
Some(ref ty) => ty.to_string(),
None => self.bracket_ty.as_ref().unwrap().clone().to_string(),
}
}
Ok(ASTField {
member: match &field.ident {
Some(ident) => syn::Member::Named(ident.clone()),
None => syn::Member::Unnamed(index.into()),
},
pb_attrs: PBStructAttrs::from_ast(cx, index, field),
node_attrs: NodeStructAttrs::from_ast(cx, index, field),
ty: &field.ty,
original: field,
bracket_ty,
bracket_inner_ty,
bracket_category,
})
}
pub fn name(&self) -> Option<syn::Ident> {
if let syn::Member::Named(ident) = &self.member {
Some(ident.clone())
} else {
None
}
pub fn ty_as_str(&self) -> String {
match self.bracket_inner_ty {
Some(ref ty) => ty.to_string(),
None => self.bracket_ty.as_ref().unwrap().clone().to_string(),
}
}
pub fn name(&self) -> Option<syn::Ident> {
if let syn::Member::Named(ident) = &self.member {
Some(ident.clone())
} else {
None
}
}
}
#[derive(Copy, Clone)]
pub enum ASTStyle {
Struct,
/// Many unnamed fields.
Tuple,
/// One unnamed field.
NewType,
/// No fields.
Unit,
Struct,
/// Many unnamed fields.
Tuple,
/// One unnamed field.
NewType,
/// No fields.
Unit,
}
pub fn struct_from_ast<'a>(cx: &ASTResult, fields: &'a syn::Fields) -> (ASTStyle, Vec<ASTField<'a>>) {
match fields {
syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
(ASTStyle::NewType, fields_from_ast(cx, &fields.unnamed))
}
syn::Fields::Unnamed(fields) => (ASTStyle::Tuple, fields_from_ast(cx, &fields.unnamed)),
syn::Fields::Unit => (ASTStyle::Unit, Vec::new()),
}
pub fn struct_from_ast<'a>(
cx: &ASTResult,
fields: &'a syn::Fields,
) -> (ASTStyle, Vec<ASTField<'a>>) {
match fields {
syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
(ASTStyle::NewType, fields_from_ast(cx, &fields.unnamed))
},
syn::Fields::Unnamed(fields) => (ASTStyle::Tuple, fields_from_ast(cx, &fields.unnamed)),
syn::Fields::Unit => (ASTStyle::Unit, Vec::new()),
}
}
pub fn enum_from_ast<'a>(
cx: &ASTResult,
ident: &syn::Ident,
variants: &'a Punctuated<syn::Variant, Token![,]>,
enum_attrs: &[syn::Attribute],
cx: &ASTResult,
ident: &syn::Ident,
variants: &'a Punctuated<syn::Variant, Token![,]>,
enum_attrs: &[syn::Attribute],
) -> Vec<ASTEnumVariant<'a>> {
variants
.iter()
.flat_map(|variant| {
let attrs = EventEnumAttrs::from_ast(cx, ident, variant, enum_attrs);
let (style, fields) = struct_from_ast(cx, &variant.fields);
Some(ASTEnumVariant {
ident: variant.ident.clone(),
attrs,
style,
fields,
original: variant,
})
})
.collect()
variants
.iter()
.flat_map(|variant| {
let attrs = EventEnumAttrs::from_ast(cx, ident, variant, enum_attrs);
let (style, fields) = struct_from_ast(cx, &variant.fields);
Some(ASTEnumVariant {
ident: variant.ident.clone(),
attrs,
style,
fields,
original: variant,
})
})
.collect()
}
fn fields_from_ast<'a>(cx: &ASTResult, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<ASTField<'a>> {
fields
.iter()
.enumerate()
.flat_map(|(index, field)| {
if is_recognizable_field(field) {
ASTField::new(cx, field, index).ok()
} else {
None
}
})
.collect()
fn fields_from_ast<'a>(
cx: &ASTResult,
fields: &'a Punctuated<syn::Field, Token![,]>,
) -> Vec<ASTField<'a>> {
fields
.iter()
.enumerate()
.flat_map(|(index, field)| {
if is_recognizable_field(field) {
ASTField::new(cx, field, index).ok()
} else {
None
}
})
.collect()
}
fn get_node_type(ast_result: &ASTResult, struct_name: &Ident, attrs: &[syn::Attribute]) -> Option<String> {
let mut node_type = None;
attrs
.iter()
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == NODE_TYPE))
.for_each(|attr| {
if let Ok(NameValue(named_value)) = attr.parse_meta() {
if node_type.is_some() {
ast_result.error_spanned_by(struct_name, "Duplicate node type definition");
}
if let syn::Lit::Str(s) = named_value.lit {
node_type = Some(s.value());
}
}
});
node_type
fn get_node_type(
ast_result: &ASTResult,
struct_name: &Ident,
attrs: &[syn::Attribute],
) -> Option<String> {
let mut node_type = None;
attrs
.iter()
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == NODE_TYPE))
.for_each(|attr| {
if let Ok(NameValue(named_value)) = attr.parse_meta() {
if node_type.is_some() {
ast_result.error_spanned_by(struct_name, "Duplicate node type definition");
}
if let syn::Lit::Str(s) = named_value.lit {
node_type = Some(s.value());
}
}
});
node_type
}

View File

@ -3,41 +3,42 @@ use std::{cell::RefCell, fmt::Display, thread};
#[derive(Default)]
pub struct ASTResult {
errors: RefCell<Option<Vec<syn::Error>>>,
errors: RefCell<Option<Vec<syn::Error>>>,
}
impl ASTResult {
pub fn new() -> Self {
ASTResult {
errors: RefCell::new(Some(Vec::new())),
}
pub fn new() -> Self {
ASTResult {
errors: RefCell::new(Some(Vec::new())),
}
}
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
self.errors
.borrow_mut()
.as_mut()
.unwrap()
.push(syn::Error::new_spanned(obj.into_token_stream(), msg));
}
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
self
.errors
.borrow_mut()
.as_mut()
.unwrap()
.push(syn::Error::new_spanned(obj.into_token_stream(), msg));
}
pub fn syn_error(&self, err: syn::Error) {
self.errors.borrow_mut().as_mut().unwrap().push(err);
}
pub fn syn_error(&self, err: syn::Error) {
self.errors.borrow_mut().as_mut().unwrap().push(err);
}
pub fn check(self) -> Result<(), Vec<syn::Error>> {
let errors = self.errors.borrow_mut().take().unwrap();
match errors.len() {
0 => Ok(()),
_ => Err(errors),
}
pub fn check(self) -> Result<(), Vec<syn::Error>> {
let errors = self.errors.borrow_mut().take().unwrap();
match errors.len() {
0 => Ok(()),
_ => Err(errors),
}
}
}
impl Drop for ASTResult {
fn drop(&mut self) {
if !thread::panicking() && self.errors.borrow().is_some() {
panic!("forgot to check for errors");
}
fn drop(&mut self) {
if !thread::panicking() && self.errors.borrow().is_some() {
panic!("forgot to check for errors");
}
}
}

View File

@ -1,145 +1,150 @@
use crate::{get_event_meta_items, parse_lit_str, symbol::*, ASTResult};
use syn::{
self,
Meta::{NameValue, Path},
NestedMeta::{Lit, Meta},
self,
Meta::{NameValue, Path},
NestedMeta::{Lit, Meta},
};
#[derive(Debug, Clone)]
pub struct EventAttrs {
input: Option<syn::Path>,
output: Option<syn::Path>,
error_ty: Option<String>,
pub ignore: bool,
input: Option<syn::Path>,
output: Option<syn::Path>,
error_ty: Option<String>,
pub ignore: bool,
}
#[derive(Debug, Clone)]
pub struct EventEnumAttrs {
pub enum_name: String,
pub enum_item_name: String,
pub value: String,
pub event_attrs: EventAttrs,
pub enum_name: String,
pub enum_item_name: String,
pub value: String,
pub event_attrs: EventAttrs,
}
impl EventEnumAttrs {
pub fn from_ast(
ast_result: &ASTResult,
ident: &syn::Ident,
variant: &syn::Variant,
enum_attrs: &[syn::Attribute],
) -> Self {
let enum_item_name = variant.ident.to_string();
let enum_name = ident.to_string();
let mut value = String::new();
if variant.discriminant.is_some() {
if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
int_value
} else {
unimplemented!()
};
value = lit_int.base10_digits().to_string();
}
}
let event_attrs = get_event_attrs_from(ast_result, &variant.attrs, enum_attrs);
EventEnumAttrs {
enum_name,
enum_item_name,
value,
event_attrs,
}
pub fn from_ast(
ast_result: &ASTResult,
ident: &syn::Ident,
variant: &syn::Variant,
enum_attrs: &[syn::Attribute],
) -> Self {
let enum_item_name = variant.ident.to_string();
let enum_name = ident.to_string();
let mut value = String::new();
if variant.discriminant.is_some() {
if let syn::Expr::Lit(ref expr_list) = variant.discriminant.as_ref().unwrap().1 {
let lit_int = if let syn::Lit::Int(ref int_value) = expr_list.lit {
int_value
} else {
unimplemented!()
};
value = lit_int.base10_digits().to_string();
}
}
let event_attrs = get_event_attrs_from(ast_result, &variant.attrs, enum_attrs);
EventEnumAttrs {
enum_name,
enum_item_name,
value,
event_attrs,
}
}
pub fn event_input(&self) -> Option<syn::Path> {
self.event_attrs.input.clone()
}
pub fn event_input(&self) -> Option<syn::Path> {
self.event_attrs.input.clone()
}
pub fn event_output(&self) -> Option<syn::Path> {
self.event_attrs.output.clone()
}
pub fn event_output(&self) -> Option<syn::Path> {
self.event_attrs.output.clone()
}
pub fn event_error(&self) -> String {
self.event_attrs.error_ty.as_ref().unwrap().clone()
}
pub fn event_error(&self) -> String {
self.event_attrs.error_ty.as_ref().unwrap().clone()
}
}
fn get_event_attrs_from(
ast_result: &ASTResult,
variant_attrs: &[syn::Attribute],
enum_attrs: &[syn::Attribute],
ast_result: &ASTResult,
variant_attrs: &[syn::Attribute],
enum_attrs: &[syn::Attribute],
) -> EventAttrs {
let mut event_attrs = EventAttrs {
input: None,
output: None,
error_ty: None,
ignore: false,
};
let mut event_attrs = EventAttrs {
input: None,
output: None,
error_ty: None,
ignore: false,
};
enum_attrs
.iter()
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
.for_each(|attr| {
if let Ok(NameValue(named_value)) = attr.parse_meta() {
if let syn::Lit::Str(s) = named_value.lit {
event_attrs.error_ty = Some(s.value());
} else {
eprintln!("{} should not be empty", EVENT_ERR);
}
} else {
eprintln!("❌ Can not find any {} on attr: {:#?}", EVENT_ERR, attr);
}
});
let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item {
Meta(NameValue(name_value)) => {
if name_value.path == EVENT_INPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let input_type = parse_lit_str(s)
.map_err(|_| {
ast_result
.error_spanned_by(s, format!("failed to parse request deserializer {:?}", s.value()))
})
.unwrap();
event_attrs.input = Some(input_type);
}
}
if name_value.path == EVENT_OUTPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let output_type = parse_lit_str(s)
.map_err(|_| {
ast_result
.error_spanned_by(s, format!("failed to parse response deserializer {:?}", s.value()))
})
.unwrap();
event_attrs.output = Some(output_type);
}
}
enum_attrs
.iter()
.filter(|attr| attr.path.segments.iter().any(|s| s.ident == EVENT_ERR))
.for_each(|attr| {
if let Ok(NameValue(named_value)) = attr.parse_meta() {
if let syn::Lit::Str(s) = named_value.lit {
event_attrs.error_ty = Some(s.value());
} else {
eprintln!("{} should not be empty", EVENT_ERR);
}
Meta(Path(word)) => {
if word == EVENT_IGNORE && attr.path == EVENT {
event_attrs.ignore = true;
}
} else {
eprintln!("❌ Can not find any {} on attr: {:#?}", EVENT_ERR, attr);
}
});
let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item
{
Meta(NameValue(name_value)) => {
if name_value.path == EVENT_INPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let input_type = parse_lit_str(s)
.map_err(|_| {
ast_result.error_spanned_by(
s,
format!("failed to parse request deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.input = Some(input_type);
}
Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"),
_ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
};
}
let attr_meta_items_info = variant_attrs
.iter()
.flat_map(|attr| match get_event_meta_items(ast_result, attr) {
Ok(items) => Some((attr, items)),
Err(_) => None,
})
.collect::<Vec<(&syn::Attribute, Vec<syn::NestedMeta>)>>();
if name_value.path == EVENT_OUTPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let output_type = parse_lit_str(s)
.map_err(|_| {
ast_result.error_spanned_by(
s,
format!("failed to parse response deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.output = Some(output_type);
}
}
},
Meta(Path(word)) => {
if word == EVENT_IGNORE && attr.path == EVENT {
event_attrs.ignore = true;
}
},
Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"),
_ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
};
for (attr, nested_metas) in attr_meta_items_info {
nested_metas
.iter()
.for_each(|meta_item| extract_event_attr(attr, meta_item))
}
let attr_meta_items_info = variant_attrs
.iter()
.flat_map(|attr| match get_event_meta_items(ast_result, attr) {
Ok(items) => Some((attr, items)),
Err(_) => None,
})
.collect::<Vec<(&syn::Attribute, Vec<syn::NestedMeta>)>>();
// eprintln!("😁{:#?}", event_attrs);
event_attrs
for (attr, nested_metas) in attr_meta_items_info {
nested_metas
.iter()
.for_each(|meta_item| extract_event_attr(attr, meta_item))
}
// eprintln!("😁{:#?}", event_attrs);
event_attrs
}

View File

@ -1,99 +1,106 @@
use crate::{get_node_meta_items, parse_lit_into_expr_path, symbol::*, ASTAttr, ASTResult};
use quote::ToTokens;
use syn::{
self, LitStr,
Meta::NameValue,
NestedMeta::{Lit, Meta},
self, LitStr,
Meta::NameValue,
NestedMeta::{Lit, Meta},
};
pub struct NodeStructAttrs {
pub rename: Option<LitStr>,
pub has_child: bool,
pub child_name: Option<LitStr>,
pub child_index: Option<syn::LitInt>,
pub get_node_value_with: Option<syn::ExprPath>,
pub set_node_value_with: Option<syn::ExprPath>,
pub with_children: Option<syn::ExprPath>,
pub rename: Option<LitStr>,
pub has_child: bool,
pub child_name: Option<LitStr>,
pub child_index: Option<syn::LitInt>,
pub get_node_value_with: Option<syn::ExprPath>,
pub set_node_value_with: Option<syn::ExprPath>,
pub with_children: Option<syn::ExprPath>,
}
impl NodeStructAttrs {
/// Extract out the `#[node(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, _index: usize, field: &syn::Field) -> Self {
let mut rename = ASTAttr::none(ast_result, RENAME_NODE);
let mut child_name = ASTAttr::none(ast_result, CHILD_NODE_NAME);
let mut child_index = ASTAttr::none(ast_result, CHILD_NODE_INDEX);
let mut get_node_value_with = ASTAttr::none(ast_result, GET_NODE_VALUE_WITH);
let mut set_node_value_with = ASTAttr::none(ast_result, SET_NODE_VALUE_WITH);
let mut with_children = ASTAttr::none(ast_result, WITH_CHILDREN);
/// Extract out the `#[node(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, _index: usize, field: &syn::Field) -> Self {
let mut rename = ASTAttr::none(ast_result, RENAME_NODE);
let mut child_name = ASTAttr::none(ast_result, CHILD_NODE_NAME);
let mut child_index = ASTAttr::none(ast_result, CHILD_NODE_INDEX);
let mut get_node_value_with = ASTAttr::none(ast_result, GET_NODE_VALUE_WITH);
let mut set_node_value_with = ASTAttr::none(ast_result, SET_NODE_VALUE_WITH);
let mut with_children = ASTAttr::none(ast_result, WITH_CHILDREN);
for meta_item in field
.attrs
.iter()
.flat_map(|attr| get_node_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse '#[node(rename = x)]'
Meta(NameValue(m)) if m.path == RENAME_NODE => {
if let syn::Lit::Str(lit) = &m.lit {
rename.set(&m.path, lit.clone());
}
}
for meta_item in field
.attrs
.iter()
.flat_map(|attr| get_node_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse '#[node(rename = x)]'
Meta(NameValue(m)) if m.path == RENAME_NODE => {
if let syn::Lit::Str(lit) = &m.lit {
rename.set(&m.path, lit.clone());
}
},
// Parse '#[node(child_name = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_NAME => {
if let syn::Lit::Str(lit) = &m.lit {
child_name.set(&m.path, lit.clone());
}
}
// Parse '#[node(child_name = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_NAME => {
if let syn::Lit::Str(lit) = &m.lit {
child_name.set(&m.path, lit.clone());
}
},
// Parse '#[node(child_index = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
child_index.set(&m.path, lit.clone());
}
}
// Parse '#[node(child_index = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
child_index.set(&m.path, lit.clone());
}
},
// Parse `#[node(get_node_value_with = "...")]`
Meta(NameValue(m)) if m.path == GET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, GET_NODE_VALUE_WITH, &m.lit) {
get_node_value_with.set(&m.path, path);
}
}
// Parse `#[node(get_node_value_with = "...")]`
Meta(NameValue(m)) if m.path == GET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, GET_NODE_VALUE_WITH, &m.lit) {
get_node_value_with.set(&m.path, path);
}
},
// Parse `#[node(set_node_value_with= "...")]`
Meta(NameValue(m)) if m.path == SET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SET_NODE_VALUE_WITH, &m.lit) {
set_node_value_with.set(&m.path, path);
}
}
// Parse `#[node(set_node_value_with= "...")]`
Meta(NameValue(m)) if m.path == SET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SET_NODE_VALUE_WITH, &m.lit) {
set_node_value_with.set(&m.path, path);
}
},
// Parse `#[node(with_children= "...")]`
Meta(NameValue(m)) if m.path == WITH_CHILDREN => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, WITH_CHILDREN, &m.lit) {
with_children.set(&m.path, path);
}
}
// Parse `#[node(with_children= "...")]`
Meta(NameValue(m)) if m.path == WITH_CHILDREN => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, WITH_CHILDREN, &m.lit) {
with_children.set(&m.path, path);
}
},
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
ast_result.error_spanned_by(meta_item.path(), format!("unknown node field attribute `{}`", path));
}
Meta(meta_item) => {
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown node field attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
}
}
}
let child_name = child_name.get();
NodeStructAttrs {
rename: rename.get(),
child_index: child_index.get(),
has_child: child_name.is_some(),
child_name,
get_node_value_with: get_node_value_with.get(),
set_node_value_with: set_node_value_with.get(),
with_children: with_children.get(),
}
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
},
}
}
let child_name = child_name.get();
NodeStructAttrs {
rename: rename.get(),
child_index: child_index.get(),
has_child: child_name.is_some(),
child_name,
get_node_value_with: get_node_value_with.get(),
set_node_value_with: set_node_value_with.get(),
with_children: with_children.get(),
}
}
}

View File

@ -4,441 +4,486 @@ use crate::{symbol::*, ASTResult};
use proc_macro2::{Group, Span, TokenStream, TokenTree};
use quote::ToTokens;
use syn::{
self,
parse::{self, Parse},
Meta::{List, NameValue, Path},
NestedMeta::{Lit, Meta},
self,
parse::{self, Parse},
Meta::{List, NameValue, Path},
NestedMeta::{Lit, Meta},
};
#[allow(dead_code)]
pub struct PBAttrsContainer {
name: String,
pb_struct_type: Option<syn::Type>,
pb_enum_type: Option<syn::Type>,
name: String,
pb_struct_type: Option<syn::Type>,
pb_enum_type: Option<syn::Type>,
}
impl PBAttrsContainer {
/// Extract out the `#[pb(...)]` attributes from an item.
pub fn from_ast(ast_result: &ASTResult, item: &syn::DeriveInput) -> Self {
let mut pb_struct_type = ASTAttr::none(ast_result, PB_STRUCT);
let mut pb_enum_type = ASTAttr::none(ast_result, PB_ENUM);
for meta_item in item
.attrs
.iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(struct = "Type")]
Meta(NameValue(m)) if m.path == PB_STRUCT => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) {
pb_struct_type.set_opt(&m.path, Some(into_ty));
}
}
/// Extract out the `#[pb(...)]` attributes from an item.
pub fn from_ast(ast_result: &ASTResult, item: &syn::DeriveInput) -> Self {
let mut pb_struct_type = ASTAttr::none(ast_result, PB_STRUCT);
let mut pb_enum_type = ASTAttr::none(ast_result, PB_ENUM);
for meta_item in item
.attrs
.iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(struct = "Type")]
Meta(NameValue(m)) if m.path == PB_STRUCT => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) {
pb_struct_type.set_opt(&m.path, Some(into_ty));
}
},
// Parse `#[pb(enum = "Type")]
Meta(NameValue(m)) if m.path == PB_ENUM => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) {
pb_enum_type.set_opt(&m.path, Some(into_ty));
}
}
// Parse `#[pb(enum = "Type")]
Meta(NameValue(m)) if m.path == PB_ENUM => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) {
pb_enum_type.set_opt(&m.path, Some(into_ty));
}
},
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
ast_result.error_spanned_by(meta_item.path(), format!("unknown container attribute `{}`", path));
}
Meta(meta_item) => {
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown container attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in container attribute");
}
}
}
match &item.data {
syn::Data::Struct(_) => {
pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident));
}
syn::Data::Enum(_) => {
pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident));
}
_ => {}
}
PBAttrsContainer {
name: item.ident.to_string(),
pb_struct_type: pb_struct_type.get(),
pb_enum_type: pb_enum_type.get(),
}
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in container attribute");
},
}
}
match &item.data {
syn::Data::Struct(_) => {
pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident));
},
syn::Data::Enum(_) => {
pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident));
},
_ => {},
}
pub fn pb_struct_type(&self) -> Option<&syn::Type> {
self.pb_struct_type.as_ref()
PBAttrsContainer {
name: item.ident.to_string(),
pb_struct_type: pb_struct_type.get(),
pb_enum_type: pb_enum_type.get(),
}
}
pub fn pb_enum_type(&self) -> Option<&syn::Type> {
self.pb_enum_type.as_ref()
}
pub fn pb_struct_type(&self) -> Option<&syn::Type> {
self.pb_struct_type.as_ref()
}
pub fn pb_enum_type(&self) -> Option<&syn::Type> {
self.pb_enum_type.as_ref()
}
}
pub struct ASTAttr<'c, T> {
ast_result: &'c ASTResult,
name: Symbol,
tokens: TokenStream,
value: Option<T>,
ast_result: &'c ASTResult,
name: Symbol,
tokens: TokenStream,
value: Option<T>,
}
impl<'c, T> ASTAttr<'c, T> {
pub(crate) fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
ASTAttr {
ast_result,
name,
tokens: TokenStream::new(),
value: None,
}
pub(crate) fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
ASTAttr {
ast_result,
name,
tokens: TokenStream::new(),
value: None,
}
}
pub(crate) fn set<A: ToTokens>(&mut self, obj: A, value: T) {
let tokens = obj.into_token_stream();
pub(crate) fn set<A: ToTokens>(&mut self, obj: A, value: T) {
let tokens = obj.into_token_stream();
if self.value.is_some() {
self.ast_result
.error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
} else {
self.tokens = tokens;
self.value = Some(value);
}
if self.value.is_some() {
self
.ast_result
.error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
} else {
self.tokens = tokens;
self.value = Some(value);
}
}
fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) {
if let Some(value) = value {
self.set(obj, value);
}
fn set_opt<A: ToTokens>(&mut self, obj: A, value: Option<T>) {
if let Some(value) = value {
self.set(obj, value);
}
}
pub(crate) fn set_if_none(&mut self, value: T) {
if self.value.is_none() {
self.value = Some(value);
}
pub(crate) fn set_if_none(&mut self, value: T) {
if self.value.is_none() {
self.value = Some(value);
}
}
pub(crate) fn get(self) -> Option<T> {
self.value
}
pub(crate) fn get(self) -> Option<T> {
self.value
}
#[allow(dead_code)]
fn get_with_tokens(self) -> Option<(TokenStream, T)> {
match self.value {
Some(v) => Some((self.tokens, v)),
None => None,
}
#[allow(dead_code)]
fn get_with_tokens(self) -> Option<(TokenStream, T)> {
match self.value {
Some(v) => Some((self.tokens, v)),
None => None,
}
}
}
pub struct PBStructAttrs {
#[allow(dead_code)]
name: String,
pb_index: Option<syn::LitInt>,
pb_one_of: bool,
skip_pb_serializing: bool,
skip_pb_deserializing: bool,
serialize_pb_with: Option<syn::ExprPath>,
deserialize_pb_with: Option<syn::ExprPath>,
#[allow(dead_code)]
name: String,
pb_index: Option<syn::LitInt>,
pb_one_of: bool,
skip_pb_serializing: bool,
skip_pb_deserializing: bool,
serialize_pb_with: Option<syn::ExprPath>,
deserialize_pb_with: Option<syn::ExprPath>,
}
pub fn is_recognizable_field(field: &syn::Field) -> bool {
field.attrs.iter().any(|attr| is_recognizable_attribute(attr))
field
.attrs
.iter()
.any(|attr| is_recognizable_attribute(attr))
}
impl PBStructAttrs {
/// Extract out the `#[pb(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, index: usize, field: &syn::Field) -> Self {
let mut pb_index = ASTAttr::none(ast_result, PB_INDEX);
let mut pb_one_of = BoolAttr::none(ast_result, PB_ONE_OF);
let mut serialize_pb_with = ASTAttr::none(ast_result, SERIALIZE_PB_WITH);
let mut skip_pb_serializing = BoolAttr::none(ast_result, SKIP_PB_SERIALIZING);
let mut deserialize_pb_with = ASTAttr::none(ast_result, DESERIALIZE_PB_WITH);
let mut skip_pb_deserializing = BoolAttr::none(ast_result, SKIP_PB_DESERIALIZING);
/// Extract out the `#[pb(...)]` attributes from a struct field.
pub fn from_ast(ast_result: &ASTResult, index: usize, field: &syn::Field) -> Self {
let mut pb_index = ASTAttr::none(ast_result, PB_INDEX);
let mut pb_one_of = BoolAttr::none(ast_result, PB_ONE_OF);
let mut serialize_pb_with = ASTAttr::none(ast_result, SERIALIZE_PB_WITH);
let mut skip_pb_serializing = BoolAttr::none(ast_result, SKIP_PB_SERIALIZING);
let mut deserialize_pb_with = ASTAttr::none(ast_result, DESERIALIZE_PB_WITH);
let mut skip_pb_deserializing = BoolAttr::none(ast_result, SKIP_PB_DESERIALIZING);
let ident = match &field.ident {
Some(ident) => ident.to_string(),
None => index.to_string(),
};
let ident = match &field.ident {
Some(ident) => ident.to_string(),
None => index.to_string(),
};
for meta_item in field
.attrs
.iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(skip)]`
Meta(Path(word)) if word == SKIP => {
skip_pb_serializing.set_true(word);
skip_pb_deserializing.set_true(word);
}
for meta_item in field
.attrs
.iter()
.flat_map(|attr| get_pb_meta_items(ast_result, attr))
.flatten()
{
match &meta_item {
// Parse `#[pb(skip)]`
Meta(Path(word)) if word == SKIP => {
skip_pb_serializing.set_true(word);
skip_pb_deserializing.set_true(word);
},
// Parse '#[pb(index = x)]'
Meta(NameValue(m)) if m.path == PB_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
pb_index.set(&m.path, lit.clone());
}
}
// Parse '#[pb(index = x)]'
Meta(NameValue(m)) if m.path == PB_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
pb_index.set(&m.path, lit.clone());
}
},
// Parse `#[pb(one_of)]`
Meta(Path(path)) if path == PB_ONE_OF => {
pb_one_of.set_true(path);
}
// Parse `#[pb(one_of)]`
Meta(Path(path)) if path == PB_ONE_OF => {
pb_one_of.set_true(path);
},
// Parse `#[pb(serialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == SERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_PB_WITH, &m.lit) {
serialize_pb_with.set(&m.path, path);
}
}
// Parse `#[pb(serialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == SERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_PB_WITH, &m.lit) {
serialize_pb_with.set(&m.path, path);
}
},
// Parse `#[pb(deserialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == DESERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_PB_WITH, &m.lit) {
deserialize_pb_with.set(&m.path, path);
}
}
// Parse `#[pb(deserialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == DESERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_PB_WITH, &m.lit) {
deserialize_pb_with.set(&m.path, path);
}
},
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
ast_result.error_spanned_by(meta_item.path(), format!("unknown pb field attribute `{}`", path));
}
Meta(meta_item) => {
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown pb field attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
}
}
}
PBStructAttrs {
name: ident,
pb_index: pb_index.get(),
pb_one_of: pb_one_of.get(),
skip_pb_serializing: skip_pb_serializing.get(),
skip_pb_deserializing: skip_pb_deserializing.get(),
serialize_pb_with: serialize_pb_with.get(),
deserialize_pb_with: deserialize_pb_with.get(),
}
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
},
}
}
#[allow(dead_code)]
pub fn pb_index(&self) -> Option<String> {
self.pb_index.as_ref().map(|lit| lit.base10_digits().to_string())
PBStructAttrs {
name: ident,
pb_index: pb_index.get(),
pb_one_of: pb_one_of.get(),
skip_pb_serializing: skip_pb_serializing.get(),
skip_pb_deserializing: skip_pb_deserializing.get(),
serialize_pb_with: serialize_pb_with.get(),
deserialize_pb_with: deserialize_pb_with.get(),
}
}
pub fn is_one_of(&self) -> bool {
self.pb_one_of
}
#[allow(dead_code)]
pub fn pb_index(&self) -> Option<String> {
self
.pb_index
.as_ref()
.map(|lit| lit.base10_digits().to_string())
}
pub fn serialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.serialize_pb_with.as_ref()
}
pub fn is_one_of(&self) -> bool {
self.pb_one_of
}
pub fn deserialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.deserialize_pb_with.as_ref()
}
pub fn serialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.serialize_pb_with.as_ref()
}
pub fn skip_pb_serializing(&self) -> bool {
self.skip_pb_serializing
}
pub fn deserialize_pb_with(&self) -> Option<&syn::ExprPath> {
self.deserialize_pb_with.as_ref()
}
pub fn skip_pb_deserializing(&self) -> bool {
self.skip_pb_deserializing
}
pub fn skip_pb_serializing(&self) -> bool {
self.skip_pb_serializing
}
pub fn skip_pb_deserializing(&self) -> bool {
self.skip_pb_deserializing
}
}
pub enum Default {
/// Field must always be specified because it does not have a default.
None,
/// The default is given by `std::default::Default::default()`.
Default,
/// The default is given by this function.
Path(syn::ExprPath),
/// Field must always be specified because it does not have a default.
None,
/// The default is given by `std::default::Default::default()`.
Default,
/// The default is given by this function.
Path(syn::ExprPath),
}
pub fn is_recognizable_attribute(attr: &syn::Attribute) -> bool {
attr.path == PB_ATTRS || attr.path == EVENT || attr.path == NODE_ATTRS || attr.path == NODES_ATTRS
attr.path == PB_ATTRS || attr.path == EVENT || attr.path == NODE_ATTRS || attr.path == NODES_ATTRS
}
pub fn get_pb_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != PB_ATTRS {
return Ok(vec![]);
}
pub fn get_pb_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != PB_ATTRS {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => {
cx.error_spanned_by(other, "expected #[pb(...)]");
Err(())
}
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
}
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => {
cx.error_spanned_by(other, "expected #[pb(...)]");
Err(())
},
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
},
}
}
pub fn get_node_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != NODE_ATTRS && attr.path != NODES_ATTRS {
return Ok(vec![]);
}
pub fn get_node_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != NODE_ATTRS && attr.path != NODES_ATTRS {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(_) => Ok(vec![]),
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[node(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
}
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(_) => Ok(vec![]),
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[node(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
},
}
}
pub fn get_event_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != EVENT {
return Ok(vec![]);
}
pub fn get_event_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != EVENT {
return Ok(vec![]);
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => {
cx.error_spanned_by(other, "expected #[event(...)]");
Err(())
}
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[event(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
}
}
// http://strymon.systems.ethz.ch/typename/syn/enum.Meta.html
match attr.parse_meta() {
Ok(List(meta)) => Ok(meta.nested.into_iter().collect()),
Ok(other) => {
cx.error_spanned_by(other, "expected #[event(...)]");
Err(())
},
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[event(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
},
}
}
pub fn parse_lit_into_expr_path(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &syn::Lit,
ast_result: &ASTResult,
attr_name: Symbol,
lit: &syn::Lit,
) -> Result<syn::ExprPath, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string)
.map_err(|_| ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())))
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string).map_err(|_| {
ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value()))
})
}
fn get_lit_str<'a>(ast_result: &ASTResult, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> {
if let syn::Lit::Str(lit) = lit {
Ok(lit)
} else {
ast_result.error_spanned_by(
lit,
format!(
"expected pb {} attribute to be a string: `{} = \"...\"`",
attr_name, attr_name
),
);
Err(())
}
fn get_lit_str<'a>(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &'a syn::Lit,
) -> Result<&'a syn::LitStr, ()> {
if let syn::Lit::Str(lit) = lit {
Ok(lit)
} else {
ast_result.error_spanned_by(
lit,
format!(
"expected pb {} attribute to be a string: `{} = \"...\"`",
attr_name, attr_name
),
);
Err(())
}
}
fn parse_lit_into_ty(ast_result: &ASTResult, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::Type, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
fn parse_lit_into_ty(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &syn::Lit,
) -> Result<syn::Type, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string).map_err(|_| {
ast_result.error_spanned_by(
lit,
format!("failed to parse type: {} = {:?}", attr_name, string.value()),
)
})
parse_lit_str(string).map_err(|_| {
ast_result.error_spanned_by(
lit,
format!("failed to parse type: {} = {:?}", attr_name, string.value()),
)
})
}
pub fn parse_lit_str<T>(s: &syn::LitStr) -> parse::Result<T>
where
T: Parse,
T: Parse,
{
let tokens = spanned_tokens(s)?;
syn::parse2(tokens)
let tokens = spanned_tokens(s)?;
syn::parse2(tokens)
}
fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> {
let stream = syn::parse_str(&s.value())?;
Ok(respan_token_stream(stream, s.span()))
let stream = syn::parse_str(&s.value())?;
Ok(respan_token_stream(stream, s.span()))
}
fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
stream.into_iter().map(|token| respan_token_tree(token, span)).collect()
stream
.into_iter()
.map(|token| respan_token_tree(token, span))
.collect()
}
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
if let TokenTree::Group(g) = &mut token {
*g = Group::new(g.delimiter(), respan_token_stream(g.stream(), span));
}
token.set_span(span);
token
if let TokenTree::Group(g) = &mut token {
*g = Group::new(g.delimiter(), respan_token_stream(g.stream(), span));
}
token.set_span(span);
token
}
fn default_pb_type(ast_result: &ASTResult, ident: &syn::Ident) -> syn::Type {
let take_ident = ident.to_string();
let lit_str = syn::LitStr::new(&take_ident, ident.span());
if let Ok(tokens) = spanned_tokens(&lit_str) {
if let Ok(pb_struct_ty) = syn::parse2(tokens) {
return pb_struct_ty;
}
let take_ident = ident.to_string();
let lit_str = syn::LitStr::new(&take_ident, ident.span());
if let Ok(tokens) = spanned_tokens(&lit_str) {
if let Ok(pb_struct_ty) = syn::parse2(tokens) {
return pb_struct_ty;
}
ast_result.error_spanned_by(ident, format!("❌ Can't find {} protobuf struct", take_ident));
panic!()
}
ast_result.error_spanned_by(
ident,
format!("❌ Can't find {} protobuf struct", take_ident),
);
panic!()
}
#[allow(dead_code)]
pub fn is_option(ty: &syn::Type) -> bool {
let path = match ungroup(ty) {
syn::Type::Path(ty) => &ty.path,
_ => {
return false;
}
};
let seg = match path.segments.last() {
Some(seg) => seg,
None => {
return false;
}
};
let args = match &seg.arguments {
syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
_ => {
return false;
}
};
seg.ident == "Option" && args.len() == 1
let path = match ungroup(ty) {
syn::Type::Path(ty) => &ty.path,
_ => {
return false;
},
};
let seg = match path.segments.last() {
Some(seg) => seg,
None => {
return false;
},
};
let args = match &seg.arguments {
syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
_ => {
return false;
},
};
seg.ident == "Option" && args.len() == 1
}
#[allow(dead_code)]
pub fn ungroup(mut ty: &syn::Type) -> &syn::Type {
while let syn::Type::Group(group) = ty {
ty = &group.elem;
}
ty
while let syn::Type::Group(group) = ty {
ty = &group.elem;
}
ty
}
struct BoolAttr<'c>(ASTAttr<'c, ()>);
impl<'c> BoolAttr<'c> {
fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
BoolAttr(ASTAttr::none(ast_result, name))
}
fn none(ast_result: &'c ASTResult, name: Symbol) -> Self {
BoolAttr(ASTAttr::none(ast_result, name))
}
fn set_true<A: ToTokens>(&mut self, obj: A) {
self.0.set(obj, ());
}
fn set_true<A: ToTokens>(&mut self, obj: A) {
self.0.set(obj, ());
}
fn get(&self) -> bool {
self.0.value.is_some()
}
fn get(&self) -> bool {
self.0.value.is_some()
}
}

View File

@ -48,31 +48,31 @@ pub const GET_MUT_VEC_ELEMENT_WITH: Symbol = Symbol("get_mut_element_with");
pub const WITH_CHILDREN: Symbol = Symbol("with_children");
impl PartialEq<Symbol> for Ident {
fn eq(&self, word: &Symbol) -> bool {
self == word.0
}
fn eq(&self, word: &Symbol) -> bool {
self == word.0
}
}
impl<'a> PartialEq<Symbol> for &'a Ident {
fn eq(&self, word: &Symbol) -> bool {
*self == word.0
}
fn eq(&self, word: &Symbol) -> bool {
*self == word.0
}
}
impl PartialEq<Symbol> for Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl<'a> PartialEq<Symbol> for &'a Path {
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
fn eq(&self, word: &Symbol) -> bool {
self.is_ident(word.0)
}
}
impl Display for Symbol {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(self.0)
}
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(self.0)
}
}

View File

@ -3,151 +3,154 @@ use syn::{self, AngleBracketedGenericArguments, PathSegment};
#[derive(Eq, PartialEq, Debug)]
pub enum PrimitiveTy {
Map(MapInfo),
Vec,
Opt,
Other,
Map(MapInfo),
Vec,
Opt,
Other,
}
#[derive(Debug)]
pub struct TyInfo<'a> {
pub ident: &'a syn::Ident,
pub ty: &'a syn::Type,
pub primitive_ty: PrimitiveTy,
pub bracket_ty_info: Box<Option<TyInfo<'a>>>,
pub ident: &'a syn::Ident,
pub ty: &'a syn::Type,
pub primitive_ty: PrimitiveTy,
pub bracket_ty_info: Box<Option<TyInfo<'a>>>,
}
#[derive(Debug, Eq, PartialEq)]
pub struct MapInfo {
pub key: String,
pub value: String,
pub key: String,
pub value: String,
}
impl MapInfo {
fn new(key: String, value: String) -> Self {
MapInfo { key, value }
}
fn new(key: String, value: String) -> Self {
MapInfo { key, value }
}
}
impl<'a> TyInfo<'a> {
#[allow(dead_code)]
pub fn bracketed_ident(&'a self) -> &'a syn::Ident {
match self.bracket_ty_info.as_ref() {
Some(b_ty) => b_ty.ident,
None => {
panic!()
}
}
#[allow(dead_code)]
pub fn bracketed_ident(&'a self) -> &'a syn::Ident {
match self.bracket_ty_info.as_ref() {
Some(b_ty) => b_ty.ident,
None => {
panic!()
},
}
}
}
pub fn parse_ty<'a>(ast_result: &ASTResult, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>, String> {
// Type -> TypePath -> Path -> PathSegment -> PathArguments ->
// AngleBracketedGenericArguments -> GenericArgument -> Type.
if let syn::Type::Path(ref p) = ty {
if p.path.segments.len() != 1 {
return Ok(None);
}
let seg = match p.path.segments.last() {
Some(seg) => seg,
None => return Ok(None),
};
let _is_option = seg.ident == "Option";
return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
match seg.ident.to_string().as_ref() {
"HashMap" => generate_hashmap_ty_info(ast_result, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(ast_result, seg, bracketed),
"Option" => generate_option_ty_info(ast_result, ty, seg, bracketed),
_ => {
let msg = format!("Unsupported type: {}", seg.ident);
ast_result.error_spanned_by(&seg.ident, &msg);
return Err(msg);
}
}
} else {
return Ok(Some(TyInfo {
ident: &seg.ident,
ty,
primitive_ty: PrimitiveTy::Other,
bracket_ty_info: Box::new(None),
}));
};
pub fn parse_ty<'a>(
ast_result: &ASTResult,
ty: &'a syn::Type,
) -> Result<Option<TyInfo<'a>>, String> {
// Type -> TypePath -> Path -> PathSegment -> PathArguments ->
// AngleBracketedGenericArguments -> GenericArgument -> Type.
if let syn::Type::Path(ref p) = ty {
if p.path.segments.len() != 1 {
return Ok(None);
}
Err("Unsupported inner type, get inner type fail".to_string())
let seg = match p.path.segments.last() {
Some(seg) => seg,
None => return Ok(None),
};
let _is_option = seg.ident == "Option";
return if let syn::PathArguments::AngleBracketed(ref bracketed) = seg.arguments {
match seg.ident.to_string().as_ref() {
"HashMap" => generate_hashmap_ty_info(ast_result, ty, seg, bracketed),
"Vec" => generate_vec_ty_info(ast_result, seg, bracketed),
"Option" => generate_option_ty_info(ast_result, ty, seg, bracketed),
_ => {
let msg = format!("Unsupported type: {}", seg.ident);
ast_result.error_spanned_by(&seg.ident, &msg);
return Err(msg);
},
}
} else {
return Ok(Some(TyInfo {
ident: &seg.ident,
ty,
primitive_ty: PrimitiveTy::Other,
bracket_ty_info: Box::new(None),
}));
};
}
Err("Unsupported inner type, get inner type fail".to_string())
}
fn parse_bracketed(bracketed: &AngleBracketedGenericArguments) -> Vec<&syn::Type> {
bracketed
.args
.iter()
.flat_map(|arg| {
if let syn::GenericArgument::Type(ref ty_in_bracket) = arg {
Some(ty_in_bracket)
} else {
None
}
})
.collect::<Vec<&syn::Type>>()
bracketed
.args
.iter()
.flat_map(|arg| {
if let syn::GenericArgument::Type(ref ty_in_bracket) = arg {
Some(ty_in_bracket)
} else {
None
}
})
.collect::<Vec<&syn::Type>>()
}
pub fn generate_hashmap_ty_info<'a>(
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
// The args of map must greater than 2
if bracketed.args.len() != 2 {
return Ok(None);
}
let types = parse_bracketed(bracketed);
let key = parse_ty(ast_result, types[0])?.unwrap().ident.to_string();
let value = parse_ty(ast_result, types[1])?.unwrap().ident.to_string();
let bracket_ty_info = Box::new(parse_ty(ast_result, types[1])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
bracket_ty_info,
}))
// The args of map must greater than 2
if bracketed.args.len() != 2 {
return Ok(None);
}
let types = parse_bracketed(bracketed);
let key = parse_ty(ast_result, types[0])?.unwrap().ident.to_string();
let value = parse_ty(ast_result, types[1])?.unwrap().ident.to_string();
let bracket_ty_info = Box::new(parse_ty(ast_result, types[1])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
primitive_ty: PrimitiveTy::Map(MapInfo::new(key, value)),
bracket_ty_info,
}))
}
fn generate_option_ty_info<'a>(
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
ast_result: &ASTResult,
ty: &'a syn::Type,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
let types = parse_bracketed(bracketed);
let bracket_ty_info = Box::new(parse_ty(ast_result, types[0])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
primitive_ty: PrimitiveTy::Opt,
bracket_ty_info,
}))
assert_eq!(path_segment.ident.to_string(), "Option".to_string());
let types = parse_bracketed(bracketed);
let bracket_ty_info = Box::new(parse_ty(ast_result, types[0])?);
Ok(Some(TyInfo {
ident: &path_segment.ident,
ty,
primitive_ty: PrimitiveTy::Opt,
bracket_ty_info,
}))
}
fn generate_vec_ty_info<'a>(
ast_result: &ASTResult,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
ast_result: &ASTResult,
path_segment: &'a PathSegment,
bracketed: &'a AngleBracketedGenericArguments,
) -> Result<Option<TyInfo<'a>>, String> {
if bracketed.args.len() != 1 {
return Ok(None);
}
if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
let bracketed_ty_info = Box::new(parse_ty(ast_result, bracketed_type)?);
return Ok(Some(TyInfo {
ident: &path_segment.ident,
ty: bracketed_type,
primitive_ty: PrimitiveTy::Vec,
bracket_ty_info: bracketed_ty_info,
}));
}
Ok(None)
if bracketed.args.len() != 1 {
return Ok(None);
}
if let syn::GenericArgument::Type(ref bracketed_type) = bracketed.args.first().unwrap() {
let bracketed_ty_info = Box::new(parse_ty(ast_result, bracketed_type)?);
return Ok(Some(TyInfo {
ident: &path_segment.ident,
ty: bracketed_type,
primitive_ty: PrimitiveTy::Vec,
bracket_ty_info: bracketed_ty_info,
}));
}
Ok(None)
}

View File

@ -1,7 +1,9 @@
use crate::errors::{SyncError, SyncResult};
use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions;
use grid_model::{gen_block_id, gen_row_id, CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision};
use grid_model::{
gen_block_id, gen_row_id, CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision,
};
use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
use revision_model::Revision;
@ -14,425 +16,463 @@ pub type GridBlockOperationsBuilder = DeltaBuilder;
#[derive(Debug, Clone)]
pub struct GridBlockRevisionPad {
block: DatabaseBlockRevision,
operations: GridBlockOperations,
block: DatabaseBlockRevision,
operations: GridBlockOperations,
}
impl std::ops::Deref for GridBlockRevisionPad {
type Target = DatabaseBlockRevision;
type Target = DatabaseBlockRevision;
fn deref(&self) -> &Self::Target {
&self.block
}
fn deref(&self) -> &Self::Target {
&self.block
}
}
impl GridBlockRevisionPad {
pub fn duplicate_data(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
let duplicated_rows = self
.block
.rows
pub fn duplicate_data(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
let duplicated_rows = self
.block
.rows
.iter()
.map(|row| {
let mut duplicated_row = row.as_ref().clone();
duplicated_row.id = gen_row_id();
duplicated_row.block_id = duplicated_block_id.to_string();
Arc::new(duplicated_row)
})
.collect::<Vec<Arc<RowRevision>>>();
DatabaseBlockRevision {
block_id: duplicated_block_id.to_string(),
rows: duplicated_rows,
}
}
pub fn from_operations(operations: GridBlockOperations) -> SyncResult<Self> {
let s = operations.content()?;
let revision: DatabaseBlockRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize operations to GridBlockRevision failed: {}", e);
tracing::error!("{}", s);
SyncError::internal().context(msg)
})?;
Ok(Self {
block: revision,
operations,
})
}
pub fn from_revisions(_grid_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: GridBlockOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
#[tracing::instrument(level = "trace", skip(self, row), err)]
pub fn add_row_rev(
&mut self,
row: RowRevision,
start_row_id: Option<String>,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
if let Some(start_row_id) = start_row_id {
if !start_row_id.is_empty() {
if let Some(index) = rows.iter().position(|row| row.id == start_row_id) {
rows.insert(index + 1, Arc::new(row));
return Ok(Some(()));
}
}
}
rows.push(Arc::new(row));
Ok(Some(()))
})
}
pub fn delete_rows(
&mut self,
row_ids: Vec<Cow<'_, String>>,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
rows.retain(|row| !row_ids.contains(&Cow::Borrowed(&row.id)));
Ok(Some(()))
})
}
pub fn get_row_rev(&self, row_id: &str) -> Option<(usize, Arc<RowRevision>)> {
for (index, row) in self.block.rows.iter().enumerate() {
if row.id == row_id {
return Some((index, row.clone()));
}
}
None
}
pub fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> SyncResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
match row_ids {
None => Ok(self.block.rows.clone()),
Some(row_ids) => {
let row_map = self
.block
.rows
.iter()
.map(|row| (row.id.as_str(), row.clone()))
.collect::<HashMap<&str, Arc<RowRevision>>>();
Ok(
row_ids
.iter()
.map(|row| {
let mut duplicated_row = row.as_ref().clone();
duplicated_row.id = gen_row_id();
duplicated_row.block_id = duplicated_block_id.to_string();
Arc::new(duplicated_row)
.flat_map(|row_id| {
let row_id = row_id.as_ref().as_ref();
match row_map.get(row_id) {
None => {
tracing::error!("Can't find the row with id: {}", row_id);
None
},
Some(row) => Some(row.clone()),
}
})
.collect::<Vec<Arc<RowRevision>>>();
DatabaseBlockRevision {
block_id: duplicated_block_id.to_string(),
rows: duplicated_rows,
.collect::<Vec<_>>(),
)
},
}
}
pub fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> SyncResult<Vec<CellRevision>> {
let rows = self.get_row_revs(row_ids)?;
let cell_revs = rows
.iter()
.flat_map(|row| {
let cell_rev = row.cells.get(field_id)?;
Some(cell_rev.clone())
})
.collect::<Vec<CellRevision>>();
Ok(cell_revs)
}
pub fn number_of_rows(&self) -> i32 {
self.block.rows.len() as i32
}
pub fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.block.rows.iter().position(|row| row.id == row_id)
}
pub fn update_row(
&mut self,
changeset: RowChangeset,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
let row_id = changeset.row_id.clone();
self.modify_row(&row_id, |row| {
let mut is_changed = None;
if let Some(height) = changeset.height {
row.height = height;
is_changed = Some(());
}
if let Some(visibility) = changeset.visibility {
row.visibility = visibility;
is_changed = Some(());
}
if !changeset.cell_by_field_id.is_empty() {
is_changed = Some(());
changeset
.cell_by_field_id
.into_iter()
.for_each(|(field_id, cell)| {
row.cells.insert(field_id, cell);
})
}
Ok(is_changed)
})
}
pub fn move_row(
&mut self,
row_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|row_revs| {
if let Some(position) = row_revs.iter().position(|row_rev| row_rev.id == row_id) {
debug_assert_eq!(from, position);
let row_rev = row_revs.remove(position);
if to > row_revs.len() {
Err(SyncError::out_of_bound())
} else {
row_revs.insert(to, row_rev);
Ok(Some(()))
}
}
} else {
Ok(None)
}
})
}
pub fn from_operations(operations: GridBlockOperations) -> SyncResult<Self> {
let s = operations.content()?;
let revision: DatabaseBlockRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize operations to GridBlockRevision failed: {}", e);
tracing::error!("{}", s);
SyncError::internal().context(msg)
})?;
Ok(Self {
block: revision,
operations,
})
}
pub fn from_revisions(_grid_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: GridBlockOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
#[tracing::instrument(level = "trace", skip(self, row), err)]
pub fn add_row_rev(
&mut self,
row: RowRevision,
start_row_id: Option<String>,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
if let Some(start_row_id) = start_row_id {
if !start_row_id.is_empty() {
if let Some(index) = rows.iter().position(|row| row.id == start_row_id) {
rows.insert(index + 1, Arc::new(row));
return Ok(Some(()));
}
}
}
rows.push(Arc::new(row));
Ok(Some(()))
})
}
pub fn delete_rows(&mut self, row_ids: Vec<Cow<'_, String>>) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
rows.retain(|row| !row_ids.contains(&Cow::Borrowed(&row.id)));
Ok(Some(()))
})
}
pub fn get_row_rev(&self, row_id: &str) -> Option<(usize, Arc<RowRevision>)> {
for (index, row) in self.block.rows.iter().enumerate() {
if row.id == row_id {
return Some((index, row.clone()));
}
pub fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
where
F: for<'a> FnOnce(&'a mut Vec<Arc<RowRevision>>) -> SyncResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.block.rows)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.revision_json()?;
let new = self.revision_json()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
tracing::trace!(
"[GridBlockRevision] Composing operations {}",
operations.json_str()
);
self.operations = self.operations.compose(&operations)?;
Ok(Some(GridBlockRevisionChangeset {
operations,
md5: md5(&self.operations.json_bytes()),
}))
},
}
None
},
}
}
pub fn get_row_revs<T>(&self, row_ids: Option<Vec<Cow<'_, T>>>) -> SyncResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
match row_ids {
None => Ok(self.block.rows.clone()),
Some(row_ids) => {
let row_map = self
.block
.rows
.iter()
.map(|row| (row.id.as_str(), row.clone()))
.collect::<HashMap<&str, Arc<RowRevision>>>();
fn modify_row<F>(&mut self, row_id: &str, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
where
F: FnOnce(&mut RowRevision) -> SyncResult<Option<()>>,
{
self.modify(|rows| {
if let Some(row_rev) = rows.iter_mut().find(|row_rev| row_id == row_rev.id) {
f(Arc::make_mut(row_rev))
} else {
tracing::warn!("[BlockMetaPad]: Can't find any row with id: {}", row_id);
Ok(None)
}
})
}
Ok(row_ids
.iter()
.flat_map(|row_id| {
let row_id = row_id.as_ref().as_ref();
match row_map.get(row_id) {
None => {
tracing::error!("Can't find the row with id: {}", row_id);
None
}
Some(row) => Some(row.clone()),
}
})
.collect::<Vec<_>>())
}
}
}
pub fn revision_json(&self) -> SyncResult<String> {
serde_json::to_string(&self.block)
.map_err(|e| SyncError::internal().context(format!("serial block to json failed: {}", e)))
}
pub fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> SyncResult<Vec<CellRevision>> {
let rows = self.get_row_revs(row_ids)?;
let cell_revs = rows
.iter()
.flat_map(|row| {
let cell_rev = row.cells.get(field_id)?;
Some(cell_rev.clone())
})
.collect::<Vec<CellRevision>>();
Ok(cell_revs)
}
pub fn number_of_rows(&self) -> i32 {
self.block.rows.len() as i32
}
pub fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.block.rows.iter().position(|row| row.id == row_id)
}
pub fn update_row(&mut self, changeset: RowChangeset) -> SyncResult<Option<GridBlockRevisionChangeset>> {
let row_id = changeset.row_id.clone();
self.modify_row(&row_id, |row| {
let mut is_changed = None;
if let Some(height) = changeset.height {
row.height = height;
is_changed = Some(());
}
if let Some(visibility) = changeset.visibility {
row.visibility = visibility;
is_changed = Some(());
}
if !changeset.cell_by_field_id.is_empty() {
is_changed = Some(());
changeset.cell_by_field_id.into_iter().for_each(|(field_id, cell)| {
row.cells.insert(field_id, cell);
})
}
Ok(is_changed)
})
}
pub fn move_row(&mut self, row_id: &str, from: usize, to: usize) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|row_revs| {
if let Some(position) = row_revs.iter().position(|row_rev| row_rev.id == row_id) {
debug_assert_eq!(from, position);
let row_rev = row_revs.remove(position);
if to > row_revs.len() {
Err(SyncError::out_of_bound())
} else {
row_revs.insert(to, row_rev);
Ok(Some(()))
}
} else {
Ok(None)
}
})
}
pub fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
where
F: for<'a> FnOnce(&'a mut Vec<Arc<RowRevision>>) -> SyncResult<Option<()>>,
{
let cloned_self = self.clone();
match f(&mut self.block.rows)? {
None => Ok(None),
Some(_) => {
let old = cloned_self.revision_json()?;
let new = self.revision_json()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
tracing::trace!("[GridBlockRevision] Composing operations {}", operations.json_str());
self.operations = self.operations.compose(&operations)?;
Ok(Some(GridBlockRevisionChangeset {
operations,
md5: md5(&self.operations.json_bytes()),
}))
}
}
}
}
}
fn modify_row<F>(&mut self, row_id: &str, f: F) -> SyncResult<Option<GridBlockRevisionChangeset>>
where
F: FnOnce(&mut RowRevision) -> SyncResult<Option<()>>,
{
self.modify(|rows| {
if let Some(row_rev) = rows.iter_mut().find(|row_rev| row_id == row_rev.id) {
f(Arc::make_mut(row_rev))
} else {
tracing::warn!("[BlockMetaPad]: Can't find any row with id: {}", row_id);
Ok(None)
}
})
}
pub fn revision_json(&self) -> SyncResult<String> {
serde_json::to_string(&self.block)
.map_err(|e| SyncError::internal().context(format!("serial block to json failed: {}", e)))
}
pub fn operations_json_str(&self) -> String {
self.operations.json_str()
}
pub fn operations_json_str(&self) -> String {
self.operations.json_str()
}
}
pub struct GridBlockRevisionChangeset {
pub operations: GridBlockOperations,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
pub operations: GridBlockOperations,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
}
pub fn make_database_block_operations(block_rev: &DatabaseBlockRevision) -> GridBlockOperations {
let json = serde_json::to_string(&block_rev).unwrap();
GridBlockOperationsBuilder::new().insert(&json).build()
let json = serde_json::to_string(&block_rev).unwrap();
GridBlockOperationsBuilder::new().insert(&json).build()
}
pub fn make_grid_block_revisions(_user_id: &str, grid_block_meta_data: &DatabaseBlockRevision) -> Vec<Revision> {
let operations = make_database_block_operations(grid_block_meta_data);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_block_meta_data.block_id, bytes);
vec![revision]
pub fn make_grid_block_revisions(
_user_id: &str,
grid_block_meta_data: &DatabaseBlockRevision,
) -> Vec<Revision> {
let operations = make_database_block_operations(grid_block_meta_data);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_block_meta_data.block_id, bytes);
vec![revision]
}
impl std::default::Default for GridBlockRevisionPad {
fn default() -> Self {
let block_revision = DatabaseBlockRevision {
block_id: gen_block_id(),
rows: vec![],
};
fn default() -> Self {
let block_revision = DatabaseBlockRevision {
block_id: gen_block_id(),
rows: vec![],
};
let operations = make_database_block_operations(&block_revision);
GridBlockRevisionPad {
block: block_revision,
operations,
}
let operations = make_database_block_operations(&block_revision);
GridBlockRevisionPad {
block: block_revision,
operations,
}
}
}
#[cfg(test)]
mod tests {
use crate::client_database::{GridBlockOperations, GridBlockRevisionPad};
use grid_model::{RowChangeset, RowRevision};
use crate::client_database::{GridBlockOperations, GridBlockRevisionPad};
use grid_model::{RowChangeset, RowRevision};
use std::borrow::Cow;
use std::borrow::Cow;
#[test]
fn block_meta_add_row() {
let mut pad = test_pad();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
#[test]
fn block_meta_add_row() {
let mut pad = test_pad();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let change = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
assert_eq!(pad.rows.first().unwrap().as_ref(), &row);
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
assert_eq!(pad.rows.first().unwrap().as_ref(), &row);
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
}
#[test]
fn block_meta_insert_row() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
let change = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":90},{"insert":",{\"id\":\"2\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad
.add_row_rev(row_3.clone(), Some("2".to_string()))
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":157},{"insert":",{\"id\":\"3\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
assert_eq!(*pad.rows[2], row_3);
}
fn test_row_rev(id: &str, pad: &GridBlockRevisionPad) -> RowRevision {
RowRevision {
id: id.to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
}
}
#[test]
fn block_meta_insert_row() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
#[test]
fn block_meta_insert_row2() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
let change = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"insert":"{\"id\":\"1\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad
.add_row_rev(row_3.clone(), Some("1".to_string()))
.unwrap()
.unwrap();
let change = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":90},{"insert":",{\"id\":\"2\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_3);
assert_eq!(*pad.rows[2], row_2);
}
let change = pad.add_row_rev(row_3.clone(), Some("2".to_string())).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":157},{"insert":",{\"id\":\"3\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
#[test]
fn block_meta_insert_row3() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
assert_eq!(*pad.rows[2], row_3);
}
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad
.add_row_rev(row_3.clone(), Some("".to_string()))
.unwrap()
.unwrap();
fn test_row_rev(id: &str, pad: &GridBlockRevisionPad) -> RowRevision {
RowRevision {
id: id.to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
}
}
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
assert_eq!(*pad.rows[2], row_3);
}
#[test]
fn block_meta_insert_row2() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
#[test]
fn block_meta_delete_row() {
let mut pad = test_pad();
let pre_json_str = pad.operations_json_str();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_3.clone(), Some("1".to_string())).unwrap().unwrap();
let _ = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
let change = pad
.delete_rows(vec![Cow::Borrowed(&row.id)])
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"delete":66},{"retain":2}]"#
);
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_3);
assert_eq!(*pad.rows[2], row_2);
}
assert_eq!(pad.operations_json_str(), pre_json_str);
}
#[test]
fn block_meta_insert_row3() {
let mut pad = test_pad();
let row_1 = test_row_rev("1", &pad);
let row_2 = test_row_rev("2", &pad);
let row_3 = test_row_rev("3", &pad);
#[test]
fn block_meta_update_row() {
let mut pad = test_pad();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_3.clone(), Some("".to_string())).unwrap().unwrap();
let changeset = RowChangeset {
row_id: row.id.clone(),
height: Some(100),
visibility: Some(true),
cell_by_field_id: Default::default(),
};
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
assert_eq!(*pad.rows[2], row_3);
}
let _ = pad.add_row_rev(row, None).unwrap().unwrap();
let change = pad.update_row(changeset).unwrap().unwrap();
#[test]
fn block_meta_delete_row() {
let mut pad = test_pad();
let pre_json_str = pad.operations_json_str();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
assert_eq!(
change.operations.json_str(),
r#"[{"retain":69},{"insert":"10"},{"retain":15},{"insert":"tru"},{"delete":4},{"retain":4}]"#
);
let _ = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
let change = pad.delete_rows(vec![Cow::Borrowed(&row.id)]).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"delete":66},{"retain":2}]"#
);
assert_eq!(
pad.revision_json().unwrap(),
r#"{"block_id":"1","rows":[{"id":"1","block_id":"1","cells":[],"height":100,"visibility":true}]}"#
);
}
assert_eq!(pad.operations_json_str(), pre_json_str);
}
#[test]
fn block_meta_update_row() {
let mut pad = test_pad();
let row = RowRevision {
id: "1".to_string(),
block_id: pad.block_id.clone(),
cells: Default::default(),
height: 0,
visibility: false,
};
let changeset = RowChangeset {
row_id: row.id.clone(),
height: Some(100),
visibility: Some(true),
cell_by_field_id: Default::default(),
};
let _ = pad.add_row_rev(row, None).unwrap().unwrap();
let change = pad.update_row(changeset).unwrap().unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":69},{"insert":"10"},{"retain":15},{"insert":"tru"},{"delete":4},{"retain":4}]"#
);
assert_eq!(
pad.revision_json().unwrap(),
r#"{"block_id":"1","rows":[{"id":"1","block_id":"1","cells":[],"height":100,"visibility":true}]}"#
);
}
fn test_pad() -> GridBlockRevisionPad {
let operations = GridBlockOperations::from_json(r#"[{"insert":"{\"block_id\":\"1\",\"rows\":[]}"}]"#).unwrap();
GridBlockRevisionPad::from_operations(operations).unwrap()
}
fn test_pad() -> GridBlockRevisionPad {
let operations =
GridBlockOperations::from_json(r#"[{"insert":"{\"block_id\":\"1\",\"rows\":[]}"}]"#).unwrap();
GridBlockRevisionPad::from_operations(operations).unwrap()
}
}

View File

@ -1,70 +1,75 @@
use crate::errors::{SyncError, SyncResult};
use grid_model::{BuildDatabaseContext, DatabaseBlockRevision, FieldRevision, GridBlockMetaRevision, RowRevision};
use grid_model::{
BuildDatabaseContext, DatabaseBlockRevision, FieldRevision, GridBlockMetaRevision, RowRevision,
};
use std::sync::Arc;
pub struct DatabaseBuilder {
build_context: BuildDatabaseContext,
build_context: BuildDatabaseContext,
}
impl std::default::Default for DatabaseBuilder {
fn default() -> Self {
let mut build_context = BuildDatabaseContext::new();
fn default() -> Self {
let mut build_context = BuildDatabaseContext::new();
let block_meta = GridBlockMetaRevision::new();
let block_meta_data = DatabaseBlockRevision {
block_id: block_meta.block_id.clone(),
rows: vec![],
};
let block_meta = GridBlockMetaRevision::new();
let block_meta_data = DatabaseBlockRevision {
block_id: block_meta.block_id.clone(),
rows: vec![],
};
build_context.block_metas.push(block_meta);
build_context.blocks.push(block_meta_data);
build_context.block_metas.push(block_meta);
build_context.blocks.push(block_meta_data);
DatabaseBuilder { build_context }
}
DatabaseBuilder { build_context }
}
}
impl DatabaseBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn add_field(&mut self, field: FieldRevision) {
self.build_context.field_revs.push(Arc::new(field));
}
pub fn new() -> Self {
Self::default()
}
pub fn add_field(&mut self, field: FieldRevision) {
self.build_context.field_revs.push(Arc::new(field));
}
pub fn add_row(&mut self, row_rev: RowRevision) {
let block_meta_rev = self.build_context.block_metas.first_mut().unwrap();
let block_rev = self.build_context.blocks.first_mut().unwrap();
block_rev.rows.push(Arc::new(row_rev));
block_meta_rev.row_count += 1;
}
pub fn add_row(&mut self, row_rev: RowRevision) {
let block_meta_rev = self.build_context.block_metas.first_mut().unwrap();
let block_rev = self.build_context.blocks.first_mut().unwrap();
block_rev.rows.push(Arc::new(row_rev));
block_meta_rev.row_count += 1;
}
pub fn add_empty_row(&mut self) {
let row = RowRevision::new(self.block_id());
self.add_row(row);
}
pub fn add_empty_row(&mut self) {
let row = RowRevision::new(self.block_id());
self.add_row(row);
}
pub fn field_revs(&self) -> &Vec<Arc<FieldRevision>> {
&self.build_context.field_revs
}
pub fn field_revs(&self) -> &Vec<Arc<FieldRevision>> {
&self.build_context.field_revs
}
pub fn block_id(&self) -> &str {
&self.build_context.block_metas.first().unwrap().block_id
}
pub fn block_id(&self) -> &str {
&self.build_context.block_metas.first().unwrap().block_id
}
pub fn build(self) -> BuildDatabaseContext {
self.build_context
}
pub fn build(self) -> BuildDatabaseContext {
self.build_context
}
}
#[allow(dead_code)]
fn check_rows(fields: &[FieldRevision], rows: &[RowRevision]) -> SyncResult<()> {
let field_ids = fields.iter().map(|field| &field.id).collect::<Vec<&String>>();
for row in rows {
let cell_field_ids = row.cells.keys().into_iter().collect::<Vec<&String>>();
if cell_field_ids != field_ids {
let msg = format!("{:?} contains invalid cells", row);
return Err(SyncError::internal().context(msg));
}
let field_ids = fields
.iter()
.map(|field| &field.id)
.collect::<Vec<&String>>();
for row in rows {
let cell_field_ids = row.cells.keys().into_iter().collect::<Vec<&String>>();
if cell_field_ids != field_ids {
let msg = format!("{:?} contains invalid cells", row);
return Err(SyncError::internal().context(msg));
}
Ok(())
}
Ok(())
}

View File

@ -2,8 +2,8 @@ use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions;
use grid_model::{
gen_block_id, gen_grid_id, DatabaseRevision, FieldRevision, FieldTypeRevision, GridBlockMetaRevision,
GridBlockMetaRevisionChangeset,
gen_block_id, gen_grid_id, DatabaseRevision, FieldRevision, FieldTypeRevision,
GridBlockMetaRevision, GridBlockMetaRevisionChangeset,
};
use lib_infra::util::md5;
use lib_infra::util::move_vec_element;
@ -17,264 +17,301 @@ pub type DatabaseOperationsBuilder = DeltaOperationBuilder<EmptyAttributes>;
#[derive(Clone)]
pub struct DatabaseRevisionPad {
grid_rev: Arc<DatabaseRevision>,
operations: DatabaseOperations,
grid_rev: Arc<DatabaseRevision>,
operations: DatabaseOperations,
}
pub trait JsonDeserializer {
fn deserialize(&self, type_option_data: Vec<u8>) -> SyncResult<String>;
fn deserialize(&self, type_option_data: Vec<u8>) -> SyncResult<String>;
}
impl DatabaseRevisionPad {
pub fn grid_id(&self) -> String {
self.grid_rev.grid_id.clone()
}
pub async fn duplicate_grid_block_meta(&self) -> (Vec<FieldRevision>, Vec<GridBlockMetaRevision>) {
let fields = self
.grid_rev
.fields
.iter()
.map(|field_rev| field_rev.as_ref().clone())
.collect();
pub fn grid_id(&self) -> String {
self.grid_rev.grid_id.clone()
}
pub async fn duplicate_grid_block_meta(
&self,
) -> (Vec<FieldRevision>, Vec<GridBlockMetaRevision>) {
let fields = self
.grid_rev
.fields
.iter()
.map(|field_rev| field_rev.as_ref().clone())
.collect();
let blocks = self
.grid_rev
.blocks
.iter()
.map(|block| {
let mut duplicated_block = (**block).clone();
duplicated_block.block_id = gen_block_id();
duplicated_block
})
.collect::<Vec<GridBlockMetaRevision>>();
let blocks = self
.grid_rev
.blocks
.iter()
.map(|block| {
let mut duplicated_block = (**block).clone();
duplicated_block.block_id = gen_block_id();
duplicated_block
})
.collect::<Vec<GridBlockMetaRevision>>();
(fields, blocks)
}
(fields, blocks)
}
pub fn from_operations(operations: DatabaseOperations) -> SyncResult<Self> {
let content = operations.content()?;
let grid: DatabaseRevision = serde_json::from_str(&content).map_err(|e| {
let msg = format!("Deserialize operations to grid failed: {}", e);
tracing::error!("{}", msg);
SyncError::internal().context(msg)
})?;
pub fn from_operations(operations: DatabaseOperations) -> SyncResult<Self> {
let content = operations.content()?;
let grid: DatabaseRevision = serde_json::from_str(&content).map_err(|e| {
let msg = format!("Deserialize operations to grid failed: {}", e);
tracing::error!("{}", msg);
SyncError::internal().context(msg)
})?;
Ok(Self {
grid_rev: Arc::new(grid),
operations,
})
}
Ok(Self {
grid_rev: Arc::new(grid),
operations,
})
}
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: DatabaseOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: DatabaseOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(operations)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub fn create_field_rev(
&mut self,
new_field_rev: FieldRevision,
start_field_id: Option<String>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
// Check if the field exists or not
if grid_meta
.fields
.iter()
.any(|field_rev| field_rev.id == new_field_rev.id)
{
tracing::error!("Duplicate grid field");
return Ok(None);
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub fn create_field_rev(
&mut self,
new_field_rev: FieldRevision,
start_field_id: Option<String>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
// Check if the field exists or not
if grid_meta
.fields
.iter()
.any(|field_rev| field_rev.id == new_field_rev.id)
{
tracing::error!("Duplicate grid field");
return Ok(None);
}
let insert_index = match start_field_id {
None => None,
Some(start_field_id) => grid_meta.fields.iter().position(|field| field.id == start_field_id),
};
let new_field_rev = Arc::new(new_field_rev);
match insert_index {
None => grid_meta.fields.push(new_field_rev),
Some(index) => grid_meta.fields.insert(index, new_field_rev),
}
let insert_index = match start_field_id {
None => None,
Some(start_field_id) => grid_meta
.fields
.iter()
.position(|field| field.id == start_field_id),
};
let new_field_rev = Arc::new(new_field_rev);
match insert_index {
None => grid_meta.fields.push(new_field_rev),
Some(index) => grid_meta.fields.insert(index, new_field_rev),
}
Ok(Some(()))
})
}
pub fn delete_field_rev(
&mut self,
field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_id)
{
None => Ok(None),
Some(index) => {
if grid_meta.fields[index].is_primary {
Err(SyncError::can_not_delete_primary_field())
} else {
grid_meta.fields.remove(index);
Ok(Some(()))
})
}
}
},
}
})
}
pub fn delete_field_rev(&mut self, field_id: &str) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_id) {
None => Ok(None),
Some(index) => {
if grid_meta.fields[index].is_primary {
Err(SyncError::can_not_delete_primary_field())
} else {
grid_meta.fields.remove(index);
Ok(Some(()))
}
}
},
)
}
pub fn duplicate_field_rev(
&mut self,
field_id: &str,
duplicated_field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_id) {
None => Ok(None),
Some(index) => {
let mut duplicate_field_rev = grid_meta.fields[index].as_ref().clone();
duplicate_field_rev.id = duplicated_field_id.to_string();
duplicate_field_rev.name = format!("{} (copy)", duplicate_field_rev.name);
grid_meta.fields.insert(index + 1, Arc::new(duplicate_field_rev));
Ok(Some(()))
}
},
)
}
/// Modifies the current field type of the [FieldTypeRevision]
///
/// # Arguments
///
/// * `field_id`: the id of the field
/// * `field_type`: the new field type of the field
/// * `make_default_type_option`: create the field type's type-option data
/// * `type_option_transform`: create the field type's type-option data
///
///
pub fn switch_to_field<DT, TT, T>(
&mut self,
field_id: &str,
new_field_type: T,
make_default_type_option: DT,
type_option_transform: TT,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
DT: FnOnce() -> String,
TT: FnOnce(FieldTypeRevision, Option<String>, String) -> String,
T: Into<FieldTypeRevision>,
{
let new_field_type = new_field_type.into();
self.modify_grid(|grid_meta| {
match grid_meta.fields.iter_mut().find(|field_rev| field_rev.id == field_id) {
None => {
tracing::warn!("Can not find the field with id: {}", field_id);
Ok(None)
}
Some(field_rev) => {
let mut_field_rev = Arc::make_mut(field_rev);
let old_field_type_rev = mut_field_rev.ty;
let old_field_type_option = mut_field_rev
.get_type_option_str(mut_field_rev.ty)
.map(|value| value.to_owned());
match mut_field_rev.get_type_option_str(new_field_type) {
Some(new_field_type_option) => {
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option.to_owned(),
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
}
None => {
// If the type-option data isn't exist before, creating the default type-option data.
let new_field_type_option = make_default_type_option();
let transformed_type_option =
type_option_transform(old_field_type_rev, old_field_type_option, new_field_type_option);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
}
}
mut_field_rev.ty = new_field_type;
Ok(Some(()))
}
}
})
}
pub fn replace_field_rev(
&mut self,
field_rev: Arc<FieldRevision>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_rev.id) {
None => Ok(None),
Some(index) => {
grid_meta.fields.remove(index);
grid_meta.fields.insert(index, field_rev);
Ok(Some(()))
}
},
)
}
pub fn move_field(
&mut self,
field_id: &str,
from_index: usize,
to_index: usize,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match move_vec_element(
&mut grid_meta.fields,
|field| field.id == field_id,
from_index,
to_index,
)
.map_err(internal_sync_error)?
{
true => Ok(Some(())),
false => Ok(None),
}
})
}
pub fn contain_field(&self, field_id: &str) -> bool {
self.grid_rev.fields.iter().any(|field| field.id == field_id)
}
pub fn get_field_rev(&self, field_id: &str) -> Option<(usize, &Arc<FieldRevision>)> {
self.grid_rev
pub fn duplicate_field_rev(
&mut self,
field_id: &str,
duplicated_field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_id)
{
None => Ok(None),
Some(index) => {
let mut duplicate_field_rev = grid_meta.fields[index].as_ref().clone();
duplicate_field_rev.id = duplicated_field_id.to_string();
duplicate_field_rev.name = format!("{} (copy)", duplicate_field_rev.name);
grid_meta
.fields
.iter()
.enumerate()
.find(|(_, field)| field.id == field_id)
.insert(index + 1, Arc::new(duplicate_field_rev));
Ok(Some(()))
},
}
})
}
/// Modifies the current field type of the [FieldTypeRevision]
///
/// # Arguments
///
/// * `field_id`: the id of the field
/// * `field_type`: the new field type of the field
/// * `make_default_type_option`: create the field type's type-option data
/// * `type_option_transform`: create the field type's type-option data
///
///
pub fn switch_to_field<DT, TT, T>(
&mut self,
field_id: &str,
new_field_type: T,
make_default_type_option: DT,
type_option_transform: TT,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
DT: FnOnce() -> String,
TT: FnOnce(FieldTypeRevision, Option<String>, String) -> String,
T: Into<FieldTypeRevision>,
{
let new_field_type = new_field_type.into();
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter_mut()
.find(|field_rev| field_rev.id == field_id)
{
None => {
tracing::warn!("Can not find the field with id: {}", field_id);
Ok(None)
},
Some(field_rev) => {
let mut_field_rev = Arc::make_mut(field_rev);
let old_field_type_rev = mut_field_rev.ty;
let old_field_type_option = mut_field_rev
.get_type_option_str(mut_field_rev.ty)
.map(|value| value.to_owned());
match mut_field_rev.get_type_option_str(new_field_type) {
Some(new_field_type_option) => {
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option.to_owned(),
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
},
None => {
// If the type-option data isn't exist before, creating the default type-option data.
let new_field_type_option = make_default_type_option();
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option,
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
},
}
mut_field_rev.ty = new_field_type;
Ok(Some(()))
},
}
})
}
pub fn replace_field_rev(
&mut self,
field_rev: Arc<FieldRevision>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_rev.id)
{
None => Ok(None),
Some(index) => {
grid_meta.fields.remove(index);
grid_meta.fields.insert(index, field_rev);
Ok(Some(()))
},
}
})
}
pub fn move_field(
&mut self,
field_id: &str,
from_index: usize,
to_index: usize,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match move_vec_element(
&mut grid_meta.fields,
|field| field.id == field_id,
from_index,
to_index,
)
.map_err(internal_sync_error)?
{
true => Ok(Some(())),
false => Ok(None),
}
})
}
pub fn contain_field(&self, field_id: &str) -> bool {
self
.grid_rev
.fields
.iter()
.any(|field| field.id == field_id)
}
pub fn get_field_rev(&self, field_id: &str) -> Option<(usize, &Arc<FieldRevision>)> {
self
.grid_rev
.fields
.iter()
.enumerate()
.find(|(_, field)| field.id == field_id)
}
pub fn get_field_revs(
&self,
field_ids: Option<Vec<String>>,
) -> SyncResult<Vec<Arc<FieldRevision>>> {
match field_ids {
None => Ok(self.grid_rev.fields.clone()),
Some(field_ids) => {
let field_by_field_id = self
.grid_rev
.fields
.iter()
.map(|field| (&field.id, field))
.collect::<HashMap<&String, &Arc<FieldRevision>>>();
let fields = field_ids
.iter()
.flat_map(|field_id| match field_by_field_id.get(&field_id) {
None => {
tracing::error!("Can't find the field with id: {}", field_id);
None
},
Some(field) => Some((*field).clone()),
})
.collect::<Vec<Arc<FieldRevision>>>();
Ok(fields)
},
}
}
pub fn get_field_revs(&self, field_ids: Option<Vec<String>>) -> SyncResult<Vec<Arc<FieldRevision>>> {
match field_ids {
None => Ok(self.grid_rev.fields.clone()),
Some(field_ids) => {
let field_by_field_id = self
.grid_rev
.fields
.iter()
.map(|field| (&field.id, field))
.collect::<HashMap<&String, &Arc<FieldRevision>>>();
let fields = field_ids
.iter()
.flat_map(|field_id| match field_by_field_id.get(&field_id) {
None => {
tracing::error!("Can't find the field with id: {}", field_id);
None
}
Some(field) => Some((*field).clone()),
})
.collect::<Vec<Arc<FieldRevision>>>();
Ok(fields)
}
}
}
pub fn create_block_meta_rev(
&mut self,
block: GridBlockMetaRevision,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
pub fn create_block_meta_rev(
&mut self,
block: GridBlockMetaRevision,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
if grid_meta.blocks.iter().any(|b| b.block_id == block.block_id) {
tracing::warn!("Duplicate grid block");
Ok(None)
@ -294,142 +331,158 @@ impl DatabaseRevisionPad {
Ok(Some(()))
}
})
}
}
pub fn get_block_meta_revs(&self) -> Vec<Arc<GridBlockMetaRevision>> {
self.grid_rev.blocks.clone()
}
pub fn get_block_meta_revs(&self) -> Vec<Arc<GridBlockMetaRevision>> {
self.grid_rev.blocks.clone()
}
pub fn update_block_rev(
&mut self,
changeset: GridBlockMetaRevisionChangeset,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
let block_id = changeset.block_id.clone();
self.modify_block(&block_id, |block| {
let mut is_changed = None;
pub fn update_block_rev(
&mut self,
changeset: GridBlockMetaRevisionChangeset,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
let block_id = changeset.block_id.clone();
self.modify_block(&block_id, |block| {
let mut is_changed = None;
if let Some(row_count) = changeset.row_count {
block.row_count = row_count;
is_changed = Some(());
}
if let Some(row_count) = changeset.row_count {
block.row_count = row_count;
is_changed = Some(());
}
if let Some(start_row_index) = changeset.start_row_index {
block.start_row_index = start_row_index;
is_changed = Some(());
}
if let Some(start_row_index) = changeset.start_row_index {
block.start_row_index = start_row_index;
is_changed = Some(());
}
Ok(is_changed)
})
}
Ok(is_changed)
})
}
pub fn database_md5(&self) -> String {
md5(&self.operations.json_bytes())
}
pub fn database_md5(&self) -> String {
md5(&self.operations.json_bytes())
}
pub fn operations_json_str(&self) -> String {
self.operations.json_str()
}
pub fn operations_json_str(&self) -> String {
self.operations.json_str()
}
pub fn get_fields(&self) -> &[Arc<FieldRevision>] {
&self.grid_rev.fields
}
pub fn get_fields(&self) -> &[Arc<FieldRevision>] {
&self.grid_rev.fields
}
fn modify_grid<F>(&mut self, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut DatabaseRevision) -> SyncResult<Option<()>>,
{
let cloned_grid = self.grid_rev.clone();
match f(Arc::make_mut(&mut self.grid_rev))? {
None => Ok(None),
Some(_) => {
let old = make_database_rev_json_str(&cloned_grid)?;
let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
Ok(Some(DatabaseRevisionChangeset {
operations,
md5: self.database_md5(),
}))
}
}
}
fn modify_grid<F>(&mut self, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut DatabaseRevision) -> SyncResult<Option<()>>,
{
let cloned_grid = self.grid_rev.clone();
match f(Arc::make_mut(&mut self.grid_rev))? {
None => Ok(None),
Some(_) => {
let old = make_database_rev_json_str(&cloned_grid)?;
let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
Ok(Some(DatabaseRevisionChangeset {
operations,
md5: self.database_md5(),
}))
},
}
},
}
}
fn modify_block<F>(&mut self, block_id: &str, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut GridBlockMetaRevision) -> SyncResult<Option<()>>,
{
self.modify_grid(
|grid_rev| match grid_rev.blocks.iter().position(|block| block.block_id == block_id) {
None => {
tracing::warn!("[GridMetaPad]: Can't find any block with id: {}", block_id);
Ok(None)
}
Some(index) => {
let block_rev = Arc::make_mut(&mut grid_rev.blocks[index]);
f(block_rev)
}
},
)
}
fn modify_block<F>(
&mut self,
block_id: &str,
f: F,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut GridBlockMetaRevision) -> SyncResult<Option<()>>,
{
self.modify_grid(|grid_rev| {
match grid_rev
.blocks
.iter()
.position(|block| block.block_id == block_id)
{
None => {
tracing::warn!("[GridMetaPad]: Can't find any block with id: {}", block_id);
Ok(None)
},
Some(index) => {
let block_rev = Arc::make_mut(&mut grid_rev.blocks[index]);
f(block_rev)
},
}
})
}
pub fn modify_field<F>(&mut self, field_id: &str, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut FieldRevision) -> SyncResult<Option<()>>,
{
self.modify_grid(
|grid_rev| match grid_rev.fields.iter().position(|field| field.id == field_id) {
None => {
tracing::warn!("[GridMetaPad]: Can't find any field with id: {}", field_id);
Ok(None)
}
Some(index) => {
let mut_field_rev = Arc::make_mut(&mut grid_rev.fields[index]);
f(mut_field_rev)
}
},
)
}
pub fn modify_field<F>(
&mut self,
field_id: &str,
f: F,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut FieldRevision) -> SyncResult<Option<()>>,
{
self.modify_grid(|grid_rev| {
match grid_rev
.fields
.iter()
.position(|field| field.id == field_id)
{
None => {
tracing::warn!("[GridMetaPad]: Can't find any field with id: {}", field_id);
Ok(None)
},
Some(index) => {
let mut_field_rev = Arc::make_mut(&mut grid_rev.fields[index]);
f(mut_field_rev)
},
}
})
}
pub fn json_str(&self) -> SyncResult<String> {
make_database_rev_json_str(&self.grid_rev)
}
pub fn json_str(&self) -> SyncResult<String> {
make_database_rev_json_str(&self.grid_rev)
}
}
pub fn make_database_rev_json_str(grid_revision: &DatabaseRevision) -> SyncResult<String> {
let json = serde_json::to_string(grid_revision)
.map_err(|err| internal_sync_error(format!("Serialize grid to json str failed. {:?}", err)))?;
Ok(json)
let json = serde_json::to_string(grid_revision)
.map_err(|err| internal_sync_error(format!("Serialize grid to json str failed. {:?}", err)))?;
Ok(json)
}
pub struct DatabaseRevisionChangeset {
pub operations: DatabaseOperations,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
pub operations: DatabaseOperations,
/// md5: the md5 of the grid after applying the change.
pub md5: String,
}
pub fn make_database_operations(grid_rev: &DatabaseRevision) -> DatabaseOperations {
let json = serde_json::to_string(&grid_rev).unwrap();
DatabaseOperationsBuilder::new().insert(&json).build()
let json = serde_json::to_string(&grid_rev).unwrap();
DatabaseOperationsBuilder::new().insert(&json).build()
}
pub fn make_database_revisions(_user_id: &str, grid_rev: &DatabaseRevision) -> Vec<Revision> {
let operations = make_database_operations(grid_rev);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_rev.grid_id, bytes);
vec![revision]
let operations = make_database_operations(grid_rev);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_rev.grid_id, bytes);
vec![revision]
}
impl std::default::Default for DatabaseRevisionPad {
fn default() -> Self {
let grid = DatabaseRevision::new(&gen_grid_id());
let operations = make_database_operations(&grid);
DatabaseRevisionPad {
grid_rev: Arc::new(grid),
operations,
}
fn default() -> Self {
let grid = DatabaseRevision::new(&gen_grid_id());
let operations = make_database_operations(&grid);
DatabaseRevisionPad {
grid_rev: Arc::new(grid),
operations,
}
}
}

View File

@ -2,8 +2,8 @@ use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions;
use grid_model::{
DatabaseViewRevision, FieldRevision, FieldTypeRevision, FilterRevision, GroupConfigurationRevision, LayoutRevision,
SortRevision,
DatabaseViewRevision, FieldRevision, FieldTypeRevision, FilterRevision,
GroupConfigurationRevision, LayoutRevision, SortRevision,
};
use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
@ -15,305 +15,334 @@ pub type GridViewOperationsBuilder = DeltaBuilder;
#[derive(Debug, Clone)]
pub struct GridViewRevisionPad {
view: Arc<DatabaseViewRevision>,
operations: GridViewOperations,
view: Arc<DatabaseViewRevision>,
operations: GridViewOperations,
}
impl std::ops::Deref for GridViewRevisionPad {
type Target = DatabaseViewRevision;
type Target = DatabaseViewRevision;
fn deref(&self) -> &Self::Target {
&self.view
}
fn deref(&self) -> &Self::Target {
&self.view
}
}
impl GridViewRevisionPad {
// For the moment, the view_id is equal to grid_id. The grid_id represents the database id.
// A database can be referenced by multiple views.
pub fn new(grid_id: String, view_id: String, layout: LayoutRevision) -> Self {
let view = Arc::new(DatabaseViewRevision::new(grid_id, view_id, layout));
let json = serde_json::to_string(&view).unwrap();
let operations = GridViewOperationsBuilder::new().insert(&json).build();
Self { view, operations }
}
// For the moment, the view_id is equal to grid_id. The grid_id represents the database id.
// A database can be referenced by multiple views.
pub fn new(grid_id: String, view_id: String, layout: LayoutRevision) -> Self {
let view = Arc::new(DatabaseViewRevision::new(grid_id, view_id, layout));
let json = serde_json::to_string(&view).unwrap();
let operations = GridViewOperationsBuilder::new().insert(&json).build();
Self { view, operations }
}
pub fn from_operations(view_id: &str, operations: GridViewOperations) -> SyncResult<Self> {
if operations.is_empty() {
return Ok(GridViewRevisionPad::new(
view_id.to_owned(),
view_id.to_owned(),
LayoutRevision::Grid,
));
}
let s = operations.content()?;
let view: DatabaseViewRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize operations to GridViewRevision failed: {}", e);
tracing::error!("parsing json: {}", s);
SyncError::internal().context(msg)
})?;
Ok(Self {
view: Arc::new(view),
operations,
})
}
pub fn from_revisions(view_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: GridViewOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(view_id, operations)
}
pub fn get_groups_by_field_revs(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<GroupConfigurationRevision>> {
self.groups.get_objects_by_field_revs(field_revs)
}
pub fn get_all_groups(&self) -> Vec<Arc<GroupConfigurationRevision>> {
self.groups.get_all_objects()
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub fn insert_or_update_group_configuration(
&mut self,
field_id: &str,
field_type: &FieldTypeRevision,
group_configuration_rev: GroupConfigurationRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
// Only save one group
view.groups.clear();
view.groups.add_object(field_id, field_type, group_configuration_rev);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip_all)]
pub fn contains_group(&self, field_id: &str, field_type: &FieldTypeRevision) -> bool {
self.view.groups.get_objects(field_id, field_type).is_some()
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub fn with_mut_group<F: FnOnce(&mut GroupConfigurationRevision)>(
&mut self,
field_id: &str,
field_type: &FieldTypeRevision,
configuration_id: &str,
mut_configuration_fn: F,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| match view.groups.get_mut_objects(field_id, field_type) {
None => Ok(None),
Some(configurations_revs) => {
for configuration_rev in configurations_revs {
if configuration_rev.id == configuration_id {
mut_configuration_fn(Arc::make_mut(configuration_rev));
return Ok(Some(()));
}
}
Ok(None)
}
})
}
pub fn delete_group(
&mut self,
group_id: &str,
field_id: &str,
field_type: &FieldTypeRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
if let Some(groups) = view.groups.get_mut_objects(field_id, field_type) {
groups.retain(|group| group.id != group_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn get_all_sorts(&self, _field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<SortRevision>> {
self.sorts.get_all_objects()
}
/// For the moment, a field type only have one filter.
pub fn get_sorts(&self, field_id: &str, field_type_rev: &FieldTypeRevision) -> Vec<Arc<SortRevision>> {
self.sorts.get_objects(field_id, field_type_rev).unwrap_or_default()
}
pub fn get_sort(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
sort_id: &str,
) -> Option<Arc<SortRevision>> {
self.sorts
.get_object(field_id, field_type_rev, |sort| sort.id == sort_id)
}
pub fn insert_sort(
&mut self,
field_id: &str,
sort_rev: SortRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
let field_type = sort_rev.field_type;
view.sorts.add_object(field_id, &field_type, sort_rev);
Ok(Some(()))
})
}
pub fn update_sort(
&mut self,
field_id: &str,
sort_rev: SortRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
if let Some(sort) = view
.sorts
.get_mut_object(field_id, &sort_rev.field_type, |sort| sort.id == sort_rev.id)
{
let sort = Arc::make_mut(sort);
sort.condition = sort_rev.condition;
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_sort<T: Into<FieldTypeRevision>>(
&mut self,
sort_id: &str,
field_id: &str,
field_type: T,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
let field_type = field_type.into();
self.modify(|view| {
if let Some(sorts) = view.sorts.get_mut_objects(field_id, &field_type) {
sorts.retain(|sort| sort.id != sort_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_all_sorts(&mut self) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
view.sorts.clear();
Ok(Some(()))
})
}
pub fn get_all_filters(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<FilterRevision>> {
self.filters.get_objects_by_field_revs(field_revs)
}
/// For the moment, a field type only have one filter.
pub fn get_filters(&self, field_id: &str, field_type_rev: &FieldTypeRevision) -> Vec<Arc<FilterRevision>> {
self.filters.get_objects(field_id, field_type_rev).unwrap_or_default()
}
pub fn get_filter(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
filter_id: &str,
) -> Option<Arc<FilterRevision>> {
self.filters
.get_object(field_id, field_type_rev, |filter| filter.id == filter_id)
}
pub fn insert_filter(
&mut self,
field_id: &str,
filter_rev: FilterRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
let field_type = filter_rev.field_type;
view.filters.add_object(field_id, &field_type, filter_rev);
Ok(Some(()))
})
}
pub fn update_filter(
&mut self,
field_id: &str,
filter_rev: FilterRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
if let Some(filter) = view
.filters
.get_mut_object(field_id, &filter_rev.field_type, |filter| filter.id == filter_rev.id)
{
let filter = Arc::make_mut(filter);
filter.condition = filter_rev.condition;
filter.content = filter_rev.content;
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_filter<T: Into<FieldTypeRevision>>(
&mut self,
filter_id: &str,
field_id: &str,
field_type: T,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
let field_type = field_type.into();
self.modify(|view| {
if let Some(filters) = view.filters.get_mut_objects(field_id, &field_type) {
filters.retain(|filter| filter.id != filter_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn json_str(&self) -> SyncResult<String> {
make_grid_view_rev_json_str(&self.view)
}
pub fn layout(&self) -> LayoutRevision {
self.layout.clone()
}
fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridViewRevisionChangeset>>
where
F: FnOnce(&mut DatabaseViewRevision) -> SyncResult<Option<()>>,
{
let cloned_view = self.view.clone();
match f(Arc::make_mut(&mut self.view))? {
None => Ok(None),
Some(_) => {
let old = make_grid_view_rev_json_str(&cloned_view)?;
let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
let md5 = md5(&self.operations.json_bytes());
Ok(Some(GridViewRevisionChangeset { operations, md5 }))
}
}
}
pub fn from_operations(view_id: &str, operations: GridViewOperations) -> SyncResult<Self> {
if operations.is_empty() {
return Ok(GridViewRevisionPad::new(
view_id.to_owned(),
view_id.to_owned(),
LayoutRevision::Grid,
));
}
let s = operations.content()?;
let view: DatabaseViewRevision = serde_json::from_str(&s).map_err(|e| {
let msg = format!("Deserialize operations to GridViewRevision failed: {}", e);
tracing::error!("parsing json: {}", s);
SyncError::internal().context(msg)
})?;
Ok(Self {
view: Arc::new(view),
operations,
})
}
pub fn from_revisions(view_id: &str, revisions: Vec<Revision>) -> SyncResult<Self> {
let operations: GridViewOperations = make_operations_from_revisions(revisions)?;
Self::from_operations(view_id, operations)
}
pub fn get_groups_by_field_revs(
&self,
field_revs: &[Arc<FieldRevision>],
) -> Vec<Arc<GroupConfigurationRevision>> {
self.groups.get_objects_by_field_revs(field_revs)
}
pub fn get_all_groups(&self) -> Vec<Arc<GroupConfigurationRevision>> {
self.groups.get_all_objects()
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub fn insert_or_update_group_configuration(
&mut self,
field_id: &str,
field_type: &FieldTypeRevision,
group_configuration_rev: GroupConfigurationRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
// Only save one group
view.groups.clear();
view
.groups
.add_object(field_id, field_type, group_configuration_rev);
Ok(Some(()))
})
}
#[tracing::instrument(level = "trace", skip_all)]
pub fn contains_group(&self, field_id: &str, field_type: &FieldTypeRevision) -> bool {
self.view.groups.get_objects(field_id, field_type).is_some()
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub fn with_mut_group<F: FnOnce(&mut GroupConfigurationRevision)>(
&mut self,
field_id: &str,
field_type: &FieldTypeRevision,
configuration_id: &str,
mut_configuration_fn: F,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(
|view| match view.groups.get_mut_objects(field_id, field_type) {
None => Ok(None),
Some(configurations_revs) => {
for configuration_rev in configurations_revs {
if configuration_rev.id == configuration_id {
mut_configuration_fn(Arc::make_mut(configuration_rev));
return Ok(Some(()));
}
}
Ok(None)
},
},
)
}
pub fn delete_group(
&mut self,
group_id: &str,
field_id: &str,
field_type: &FieldTypeRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
if let Some(groups) = view.groups.get_mut_objects(field_id, field_type) {
groups.retain(|group| group.id != group_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn get_all_sorts(&self, _field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<SortRevision>> {
self.sorts.get_all_objects()
}
/// For the moment, a field type only have one filter.
pub fn get_sorts(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
) -> Vec<Arc<SortRevision>> {
self
.sorts
.get_objects(field_id, field_type_rev)
.unwrap_or_default()
}
pub fn get_sort(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
sort_id: &str,
) -> Option<Arc<SortRevision>> {
self
.sorts
.get_object(field_id, field_type_rev, |sort| sort.id == sort_id)
}
pub fn insert_sort(
&mut self,
field_id: &str,
sort_rev: SortRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
let field_type = sort_rev.field_type;
view.sorts.add_object(field_id, &field_type, sort_rev);
Ok(Some(()))
})
}
pub fn update_sort(
&mut self,
field_id: &str,
sort_rev: SortRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
if let Some(sort) = view
.sorts
.get_mut_object(field_id, &sort_rev.field_type, |sort| {
sort.id == sort_rev.id
})
{
let sort = Arc::make_mut(sort);
sort.condition = sort_rev.condition;
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_sort<T: Into<FieldTypeRevision>>(
&mut self,
sort_id: &str,
field_id: &str,
field_type: T,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
let field_type = field_type.into();
self.modify(|view| {
if let Some(sorts) = view.sorts.get_mut_objects(field_id, &field_type) {
sorts.retain(|sort| sort.id != sort_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_all_sorts(&mut self) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
view.sorts.clear();
Ok(Some(()))
})
}
pub fn get_all_filters(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<FilterRevision>> {
self.filters.get_objects_by_field_revs(field_revs)
}
/// For the moment, a field type only have one filter.
pub fn get_filters(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
) -> Vec<Arc<FilterRevision>> {
self
.filters
.get_objects(field_id, field_type_rev)
.unwrap_or_default()
}
pub fn get_filter(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
filter_id: &str,
) -> Option<Arc<FilterRevision>> {
self
.filters
.get_object(field_id, field_type_rev, |filter| filter.id == filter_id)
}
pub fn insert_filter(
&mut self,
field_id: &str,
filter_rev: FilterRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
let field_type = filter_rev.field_type;
view.filters.add_object(field_id, &field_type, filter_rev);
Ok(Some(()))
})
}
pub fn update_filter(
&mut self,
field_id: &str,
filter_rev: FilterRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
if let Some(filter) =
view
.filters
.get_mut_object(field_id, &filter_rev.field_type, |filter| {
filter.id == filter_rev.id
})
{
let filter = Arc::make_mut(filter);
filter.condition = filter_rev.condition;
filter.content = filter_rev.content;
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn delete_filter<T: Into<FieldTypeRevision>>(
&mut self,
filter_id: &str,
field_id: &str,
field_type: T,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
let field_type = field_type.into();
self.modify(|view| {
if let Some(filters) = view.filters.get_mut_objects(field_id, &field_type) {
filters.retain(|filter| filter.id != filter_id);
Ok(Some(()))
} else {
Ok(None)
}
})
}
pub fn json_str(&self) -> SyncResult<String> {
make_grid_view_rev_json_str(&self.view)
}
pub fn layout(&self) -> LayoutRevision {
self.layout.clone()
}
fn modify<F>(&mut self, f: F) -> SyncResult<Option<GridViewRevisionChangeset>>
where
F: FnOnce(&mut DatabaseViewRevision) -> SyncResult<Option<()>>,
{
let cloned_view = self.view.clone();
match f(Arc::make_mut(&mut self.view))? {
None => Ok(None),
Some(_) => {
let old = make_grid_view_rev_json_str(&cloned_view)?;
let new = self.json_str()?;
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
self.operations = self.operations.compose(&operations)?;
let md5 = md5(&self.operations.json_bytes());
Ok(Some(GridViewRevisionChangeset { operations, md5 }))
},
}
},
}
}
}
#[derive(Debug)]
pub struct GridViewRevisionChangeset {
pub operations: GridViewOperations,
pub md5: String,
pub operations: GridViewOperations,
pub md5: String,
}
pub fn make_grid_view_rev_json_str(grid_revision: &DatabaseViewRevision) -> SyncResult<String> {
let json = serde_json::to_string(grid_revision)
.map_err(|err| internal_sync_error(format!("Serialize grid view to json str failed. {:?}", err)))?;
Ok(json)
let json = serde_json::to_string(grid_revision).map_err(|err| {
internal_sync_error(format!("Serialize grid view to json str failed. {:?}", err))
})?;
Ok(json)
}
pub fn make_grid_view_operations(grid_view: &DatabaseViewRevision) -> GridViewOperations {
let json = serde_json::to_string(grid_view).unwrap();
GridViewOperationsBuilder::new().insert(&json).build()
let json = serde_json::to_string(grid_view).unwrap();
GridViewOperationsBuilder::new().insert(&json).build()
}

View File

@ -1,9 +1,9 @@
use crate::{
client_document::{
history::{History, UndoResult},
view::{ViewExtensions, RECORD_THRESHOLD},
},
errors::SyncError,
client_document::{
history::{History, UndoResult},
view::{ViewExtensions, RECORD_THRESHOLD},
},
errors::SyncError,
};
use bytes::Bytes;
use lib_infra::util::md5;
@ -12,227 +12,252 @@ use lib_ot::{core::*, text_delta::DeltaTextOperations};
use tokio::sync::mpsc;
pub trait InitialDocument {
fn json_str() -> String;
fn json_str() -> String;
}
pub struct EmptyDocument();
impl InitialDocument for EmptyDocument {
fn json_str() -> String {
DeltaTextOperations::default().json_str()
}
fn json_str() -> String {
DeltaTextOperations::default().json_str()
}
}
pub struct NewlineDocument();
impl InitialDocument for NewlineDocument {
fn json_str() -> String {
initial_delta_document_content()
}
fn json_str() -> String {
initial_delta_document_content()
}
}
pub fn initial_delta_document_content() -> String {
DeltaTextOperationBuilder::new().insert("\n").build().json_str()
DeltaTextOperationBuilder::new()
.insert("\n")
.build()
.json_str()
}
pub struct ClientDocument {
operations: DeltaTextOperations,
history: History,
view: ViewExtensions,
last_edit_time: usize,
notify: Option<mpsc::UnboundedSender<()>>,
operations: DeltaTextOperations,
history: History,
view: ViewExtensions,
last_edit_time: usize,
notify: Option<mpsc::UnboundedSender<()>>,
}
impl ClientDocument {
pub fn new<C: InitialDocument>() -> Self {
let content = C::json_str();
Self::from_json(&content).unwrap()
pub fn new<C: InitialDocument>() -> Self {
let content = C::json_str();
Self::from_json(&content).unwrap()
}
pub fn from_operations(operations: DeltaTextOperations) -> Self {
ClientDocument {
operations,
history: History::new(),
view: ViewExtensions::new(),
last_edit_time: 0,
notify: None,
}
}
pub fn from_json(json: &str) -> Result<Self, SyncError> {
let operations = DeltaTextOperations::from_json(json)?;
Ok(Self::from_operations(operations))
}
pub fn get_operations_json(&self) -> String {
self.operations.json_str()
}
pub fn to_bytes(&self) -> Bytes {
self.operations.json_bytes()
}
pub fn to_content(&self) -> String {
self.operations.content().unwrap()
}
pub fn get_operations(&self) -> &DeltaTextOperations {
&self.operations
}
pub fn document_md5(&self) -> String {
let bytes = self.to_bytes();
md5(&bytes)
}
pub fn set_notify(&mut self, notify: mpsc::UnboundedSender<()>) {
self.notify = Some(notify);
}
pub fn set_operations(&mut self, operations: DeltaTextOperations) {
tracing::trace!("document: {}", operations.json_str());
self.operations = operations;
match &self.notify {
None => {},
Some(notify) => {
let _ = notify.send(());
},
}
}
pub fn compose_operations(&mut self, operations: DeltaTextOperations) -> Result<(), SyncError> {
tracing::trace!(
"{} compose {}",
&self.operations.json_str(),
operations.json_str()
);
let composed_operations = self.operations.compose(&operations)?;
let mut undo_operations = operations.invert(&self.operations);
let now = chrono::Utc::now().timestamp_millis() as usize;
if now - self.last_edit_time < RECORD_THRESHOLD {
if let Some(last_operation) = self.history.undo() {
tracing::trace!("compose previous change");
tracing::trace!("current = {}", undo_operations);
tracing::trace!("previous = {}", last_operation);
undo_operations = undo_operations.compose(&last_operation)?;
}
} else {
self.last_edit_time = now;
}
pub fn from_operations(operations: DeltaTextOperations) -> Self {
ClientDocument {
operations,
history: History::new(),
view: ViewExtensions::new(),
last_edit_time: 0,
notify: None,
}
if !undo_operations.is_empty() {
tracing::trace!("add history operations: {}", undo_operations);
self.history.record(undo_operations);
}
pub fn from_json(json: &str) -> Result<Self, SyncError> {
let operations = DeltaTextOperations::from_json(json)?;
Ok(Self::from_operations(operations))
}
pub fn get_operations_json(&self) -> String {
self.operations.json_str()
}
pub fn to_bytes(&self) -> Bytes {
self.operations.json_bytes()
}
pub fn to_content(&self) -> String {
self.operations.content().unwrap()
}
pub fn get_operations(&self) -> &DeltaTextOperations {
&self.operations
}
pub fn document_md5(&self) -> String {
let bytes = self.to_bytes();
md5(&bytes)
}
pub fn set_notify(&mut self, notify: mpsc::UnboundedSender<()>) {
self.notify = Some(notify);
}
pub fn set_operations(&mut self, operations: DeltaTextOperations) {
tracing::trace!("document: {}", operations.json_str());
self.operations = operations;
match &self.notify {
None => {}
Some(notify) => {
let _ = notify.send(());
}
}
}
pub fn compose_operations(&mut self, operations: DeltaTextOperations) -> Result<(), SyncError> {
tracing::trace!("{} compose {}", &self.operations.json_str(), operations.json_str());
let composed_operations = self.operations.compose(&operations)?;
let mut undo_operations = operations.invert(&self.operations);
let now = chrono::Utc::now().timestamp_millis() as usize;
if now - self.last_edit_time < RECORD_THRESHOLD {
if let Some(last_operation) = self.history.undo() {
tracing::trace!("compose previous change");
tracing::trace!("current = {}", undo_operations);
tracing::trace!("previous = {}", last_operation);
undo_operations = undo_operations.compose(&last_operation)?;
}
} else {
self.last_edit_time = now;
}
if !undo_operations.is_empty() {
tracing::trace!("add history operations: {}", undo_operations);
self.history.record(undo_operations);
}
self.set_operations(composed_operations);
Ok(())
}
pub fn insert<T: ToString>(&mut self, index: usize, data: T) -> Result<DeltaTextOperations, SyncError> {
let text = data.to_string();
let interval = Interval::new(index, index);
validate_interval(&self.operations, &interval)?;
let operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn delete(&mut self, interval: Interval) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
debug_assert!(!interval.is_empty());
let operations = self.view.delete(&self.operations, interval)?;
if !operations.is_empty() {
self.compose_operations(operations.clone())?;
}
Ok(operations)
}
pub fn format(&mut self, interval: Interval, attribute: AttributeEntry) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
tracing::trace!("format {} with {:?}", interval, attribute);
let operations = self.view.format(&self.operations, attribute, interval).unwrap();
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn replace<T: ToString>(&mut self, interval: Interval, data: T) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
let mut operations = DeltaTextOperations::default();
let text = data.to_string();
if !text.is_empty() {
operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
}
if !interval.is_empty() {
let delete = self.delete(interval)?;
operations = operations.compose(&delete)?;
}
Ok(operations)
}
pub fn can_undo(&self) -> bool {
self.history.can_undo()
}
pub fn can_redo(&self) -> bool {
self.history.can_redo()
}
pub fn undo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.undo() {
None => Err(SyncError::undo().context("Undo stack is empty")),
Some(undo_operations) => {
let (new_operations, inverted_operations) = self.invert(&undo_operations)?;
self.set_operations(new_operations);
self.history.add_redo(inverted_operations);
Ok(UndoResult {
operations: undo_operations,
})
}
}
}
pub fn redo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.redo() {
None => Err(SyncError::redo()),
Some(redo_operations) => {
let (new_operations, inverted_operations) = self.invert(&redo_operations)?;
self.set_operations(new_operations);
self.history.add_undo(inverted_operations);
Ok(UndoResult {
operations: redo_operations,
})
}
}
}
pub fn is_empty(&self) -> bool {
// The document is empty if its text is equal to the initial text.
self.operations.json_str() == NewlineDocument::json_str()
}
}
impl ClientDocument {
fn invert(
&self,
operations: &DeltaTextOperations,
) -> Result<(DeltaTextOperations, DeltaTextOperations), SyncError> {
// c = a.compose(b)
// d = b.invert(a)
// a = c.compose(d)
let new_operations = self.operations.compose(operations)?;
let inverted_operations = operations.invert(&self.operations);
Ok((new_operations, inverted_operations))
}
}
fn validate_interval(operations: &DeltaTextOperations, interval: &Interval) -> Result<(), SyncError> {
if operations.utf16_target_len < interval.end {
tracing::error!(
"{:?} out of bounds. should 0..{}",
interval,
operations.utf16_target_len
);
return Err(SyncError::out_of_bound());
}
self.set_operations(composed_operations);
Ok(())
}
pub fn insert<T: ToString>(
&mut self,
index: usize,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
let text = data.to_string();
let interval = Interval::new(index, index);
validate_interval(&self.operations, &interval)?;
let operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn delete(&mut self, interval: Interval) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
debug_assert!(!interval.is_empty());
let operations = self.view.delete(&self.operations, interval)?;
if !operations.is_empty() {
self.compose_operations(operations.clone())?;
}
Ok(operations)
}
pub fn format(
&mut self,
interval: Interval,
attribute: AttributeEntry,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
tracing::trace!("format {} with {:?}", interval, attribute);
let operations = self
.view
.format(&self.operations, attribute, interval)
.unwrap();
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn replace<T: ToString>(
&mut self,
interval: Interval,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
let mut operations = DeltaTextOperations::default();
let text = data.to_string();
if !text.is_empty() {
operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
}
if !interval.is_empty() {
let delete = self.delete(interval)?;
operations = operations.compose(&delete)?;
}
Ok(operations)
}
pub fn can_undo(&self) -> bool {
self.history.can_undo()
}
pub fn can_redo(&self) -> bool {
self.history.can_redo()
}
pub fn undo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.undo() {
None => Err(SyncError::undo().context("Undo stack is empty")),
Some(undo_operations) => {
let (new_operations, inverted_operations) = self.invert(&undo_operations)?;
self.set_operations(new_operations);
self.history.add_redo(inverted_operations);
Ok(UndoResult {
operations: undo_operations,
})
},
}
}
pub fn redo(&mut self) -> Result<UndoResult, SyncError> {
match self.history.redo() {
None => Err(SyncError::redo()),
Some(redo_operations) => {
let (new_operations, inverted_operations) = self.invert(&redo_operations)?;
self.set_operations(new_operations);
self.history.add_undo(inverted_operations);
Ok(UndoResult {
operations: redo_operations,
})
},
}
}
pub fn is_empty(&self) -> bool {
// The document is empty if its text is equal to the initial text.
self.operations.json_str() == NewlineDocument::json_str()
}
}
impl ClientDocument {
fn invert(
&self,
operations: &DeltaTextOperations,
) -> Result<(DeltaTextOperations, DeltaTextOperations), SyncError> {
// c = a.compose(b)
// d = b.invert(a)
// a = c.compose(d)
let new_operations = self.operations.compose(operations)?;
let inverted_operations = operations.invert(&self.operations);
Ok((new_operations, inverted_operations))
}
}
fn validate_interval(
operations: &DeltaTextOperations,
interval: &Interval,
) -> Result<(), SyncError> {
if operations.utf16_target_len < interval.end {
tracing::error!(
"{:?} out of bounds. should 0..{}",
interval,
operations.utf16_target_len
);
return Err(SyncError::out_of_bound());
}
Ok(())
}

View File

@ -1,21 +1,21 @@
use crate::client_document::DeleteExt;
use lib_ot::{
core::{DeltaOperationBuilder, Interval},
text_delta::DeltaTextOperations,
core::{DeltaOperationBuilder, Interval},
text_delta::DeltaTextOperations,
};
pub struct DefaultDelete {}
impl DeleteExt for DefaultDelete {
fn ext_name(&self) -> &str {
"DefaultDelete"
}
fn ext_name(&self) -> &str {
"DefaultDelete"
}
fn apply(&self, _delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
Some(
DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build(),
)
}
fn apply(&self, _delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
Some(
DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build(),
)
}
}

View File

@ -1,62 +1,65 @@
use crate::{client_document::DeleteExt, util::is_newline};
use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationAttributes, OperationIterator, Utf16CodeUnitMetric, NEW_LINE},
text_delta::{empty_attributes, DeltaTextOperations},
core::{
DeltaOperationBuilder, Interval, OperationAttributes, OperationIterator, Utf16CodeUnitMetric,
NEW_LINE,
},
text_delta::{empty_attributes, DeltaTextOperations},
};
pub struct PreserveLineFormatOnMerge {}
impl DeleteExt for PreserveLineFormatOnMerge {
fn ext_name(&self) -> &str {
"PreserveLineFormatOnMerge"
fn ext_name(&self) -> &str {
"PreserveLineFormatOnMerge"
}
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
if interval.is_empty() {
return None;
}
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations> {
if interval.is_empty() {
return None;
}
// seek to the interval start pos. e.g. You backspace enter pos
let mut iter = OperationIterator::from_offset(delta, interval.start);
// seek to the interval start pos. e.g. You backspace enter pos
let mut iter = OperationIterator::from_offset(delta, interval.start);
// op will be the "\n"
let newline_op = iter.next_op_with_len(1)?;
if !is_newline(newline_op.get_data()) {
return None;
}
iter.seek::<Utf16CodeUnitMetric>(interval.size() - 1);
let mut new_delta = DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build();
while iter.has_next() {
match iter.next() {
None => tracing::error!("op must be not None when has_next() return true"),
Some(op) => {
//
match op.get_data().find(NEW_LINE) {
None => {
new_delta.retain(op.len(), empty_attributes());
continue;
}
Some(line_break) => {
let mut attributes = op.get_attributes();
attributes.remove_all_value();
if newline_op.has_attribute() {
attributes.extend(newline_op.get_attributes());
}
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attributes);
break;
}
}
}
}
}
Some(new_delta)
// op will be the "\n"
let newline_op = iter.next_op_with_len(1)?;
if !is_newline(newline_op.get_data()) {
return None;
}
iter.seek::<Utf16CodeUnitMetric>(interval.size() - 1);
let mut new_delta = DeltaOperationBuilder::new()
.retain(interval.start)
.delete(interval.size())
.build();
while iter.has_next() {
match iter.next() {
None => tracing::error!("op must be not None when has_next() return true"),
Some(op) => {
//
match op.get_data().find(NEW_LINE) {
None => {
new_delta.retain(op.len(), empty_attributes());
continue;
},
Some(line_break) => {
let mut attributes = op.get_attributes();
attributes.remove_all_value();
if newline_op.has_attribute() {
attributes.extend(newline_op.get_attributes());
}
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attributes);
break;
},
}
},
}
}
Some(new_delta)
}
}

View File

@ -1,61 +1,63 @@
use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::is_block;
use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{empty_attributes, AttributeScope, DeltaTextOperations},
core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{empty_attributes, AttributeScope, DeltaTextOperations},
};
use crate::{
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
};
pub struct ResolveBlockFormat {}
impl FormatExt for ResolveBlockFormat {
fn ext_name(&self) -> &str {
"ResolveBlockFormat"
fn ext_name(&self) -> &str {
"ResolveBlockFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_block(&attribute.key) {
return None;
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_block(&attribute.key) {
return None;
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), empty_attributes()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Block);
new_delta.extend(tmp_delta);
},
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), empty_attributes()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Block);
new_delta.extend(tmp_delta);
}
}
start += next_op.len();
}
while iter.has_next() {
let op = iter.next_op().expect("Unexpected None, iter.has_next() must return op");
match find_newline(op.get_data()) {
None => new_delta.retain(op.len(), empty_attributes()),
Some(line_break) => {
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attribute.clone().into());
break;
}
}
}
Some(new_delta)
start += next_op.len();
}
while iter.has_next() {
let op = iter
.next_op()
.expect("Unexpected None, iter.has_next() must return op");
match find_newline(op.get_data()) {
None => new_delta.retain(op.len(), empty_attributes()),
Some(line_break) => {
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attribute.clone().into());
break;
},
}
}
Some(new_delta)
}
}

View File

@ -1,48 +1,48 @@
use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::is_inline;
use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{AttributeScope, DeltaTextOperations},
core::{DeltaOperationBuilder, Interval, OperationIterator},
text_delta::{AttributeScope, DeltaTextOperations},
};
use crate::{
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
client_document::{extensions::helper::line_break, FormatExt},
util::find_newline,
};
pub struct ResolveInlineFormat {}
impl FormatExt for ResolveInlineFormat {
fn ext_name(&self) -> &str {
"ResolveInlineFormat"
fn ext_name(&self) -> &str {
"ResolveInlineFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_inline(&attribute.key) {
return None;
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), attribute.clone().into()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Inline);
new_delta.extend(tmp_delta);
},
}
start += next_op.len();
}
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations> {
if !is_inline(&attribute.key) {
return None;
}
let mut new_delta = DeltaOperationBuilder::new().retain(interval.start).build();
let mut iter = OperationIterator::from_offset(delta, interval.start);
let mut start = 0;
let end = interval.size();
while start < end && iter.has_next() {
let next_op = iter.next_op_with_len(end - start).unwrap();
match find_newline(next_op.get_data()) {
None => new_delta.retain(next_op.len(), attribute.clone().into()),
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Inline);
new_delta.extend(tmp_delta);
}
}
start += next_op.len();
}
Some(new_delta)
}
Some(new_delta)
}
}

View File

@ -1,42 +1,44 @@
use crate::util::find_newline;
use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::{empty_attributes, AttributeScope, DeltaTextOperation, DeltaTextOperations};
use lib_ot::text_delta::{
empty_attributes, AttributeScope, DeltaTextOperation, DeltaTextOperations,
};
pub(crate) fn line_break(
op: &DeltaTextOperation,
attribute: &AttributeEntry,
scope: AttributeScope,
op: &DeltaTextOperation,
attribute: &AttributeEntry,
scope: AttributeScope,
) -> DeltaTextOperations {
let mut new_delta = DeltaTextOperations::new();
let mut start = 0;
let end = op.len();
let mut s = op.get_data();
let mut new_delta = DeltaTextOperations::new();
let mut start = 0;
let end = op.len();
let mut s = op.get_data();
while let Some(line_break) = find_newline(s) {
match scope {
AttributeScope::Inline => {
new_delta.retain(line_break - start, attribute.clone().into());
new_delta.retain(1, empty_attributes());
}
AttributeScope::Block => {
new_delta.retain(line_break - start, empty_attributes());
new_delta.retain(1, attribute.clone().into());
}
_ => {
tracing::error!("Unsupported parser line break for {:?}", scope);
}
}
start = line_break + 1;
s = &s[start..s.len()];
while let Some(line_break) = find_newline(s) {
match scope {
AttributeScope::Inline => {
new_delta.retain(line_break - start, attribute.clone().into());
new_delta.retain(1, empty_attributes());
},
AttributeScope::Block => {
new_delta.retain(line_break - start, empty_attributes());
new_delta.retain(1, attribute.clone().into());
},
_ => {
tracing::error!("Unsupported parser line break for {:?}", scope);
},
}
if start < end {
match scope {
AttributeScope::Inline => new_delta.retain(end - start, attribute.clone().into()),
AttributeScope::Block => new_delta.retain(end - start, empty_attributes()),
_ => tracing::error!("Unsupported parser line break for {:?}", scope),
}
start = line_break + 1;
s = &s[start..s.len()];
}
if start < end {
match scope {
AttributeScope::Inline => new_delta.retain(end - start, attribute.clone().into()),
AttributeScope::Block => new_delta.retain(end - start, empty_attributes()),
_ => tracing::error!("Unsupported parser line break for {:?}", scope),
}
new_delta
}
new_delta
}

View File

@ -5,56 +5,56 @@ use lib_ot::text_delta::{attributes_except_header, BuildInTextAttributeKey, Delt
pub struct AutoExitBlock {}
impl InsertExt for AutoExitBlock {
fn ext_name(&self) -> &str {
"AutoExitBlock"
fn ext_name(&self) -> &str {
"AutoExitBlock"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// Auto exit block will be triggered by enter two new lines
if !is_newline(text) {
return None;
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// Auto exit block will be triggered by enter two new lines
if !is_newline(text) {
return None;
}
if !is_empty_line_at_index(delta, index) {
return None;
}
let mut iter = OperationIterator::from_offset(delta, index);
let next = iter.next_op()?;
let mut attributes = next.get_attributes();
let block_attributes = attributes_except_header(&next);
if block_attributes.is_empty() {
return None;
}
if next.len() > 1 {
return None;
}
match iter.next_op_with_newline() {
None => {}
Some((newline_op, _)) => {
let newline_attributes = attributes_except_header(&newline_op);
if block_attributes == newline_attributes {
return None;
}
}
}
attributes.retain_values(&[BuildInTextAttributeKey::Header.as_ref()]);
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.retain_with_attributes(1, attributes)
.build(),
)
if !is_empty_line_at_index(delta, index) {
return None;
}
let mut iter = OperationIterator::from_offset(delta, index);
let next = iter.next_op()?;
let mut attributes = next.get_attributes();
let block_attributes = attributes_except_header(&next);
if block_attributes.is_empty() {
return None;
}
if next.len() > 1 {
return None;
}
match iter.next_op_with_newline() {
None => {},
Some((newline_op, _)) => {
let newline_attributes = attributes_except_header(&newline_op);
if block_attributes == newline_attributes {
return None;
}
},
}
attributes.retain_values(&[BuildInTextAttributeKey::Header.as_ref()]);
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.retain_with_attributes(1, attributes)
.build(),
)
}
}

View File

@ -1,94 +1,94 @@
use crate::{client_document::InsertExt, util::is_whitespace};
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{count_utf16_code_units, DeltaOperationBuilder, OperationIterator},
text_delta::{empty_attributes, BuildInTextAttribute, DeltaTextOperations},
core::{count_utf16_code_units, DeltaOperationBuilder, OperationIterator},
text_delta::{empty_attributes, BuildInTextAttribute, DeltaTextOperations},
};
use std::cmp::min;
use url::Url;
pub struct AutoFormatExt {}
impl InsertExt for AutoFormatExt {
fn ext_name(&self) -> &str {
"AutoFormatExt"
fn ext_name(&self) -> &str {
"AutoFormatExt"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// enter whitespace to trigger auto format
if !is_whitespace(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
if let Some(prev) = iter.next_op_with_len(index) {
match AutoFormat::parse(prev.get_data()) {
None => {},
Some(formatter) => {
let mut new_attributes = prev.get_attributes();
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
// enter whitespace to trigger auto format
if !is_whitespace(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
if let Some(prev) = iter.next_op_with_len(index) {
match AutoFormat::parse(prev.get_data()) {
None => {}
Some(formatter) => {
let mut new_attributes = prev.get_attributes();
// format_len should not greater than index. The url crate will add "/" to the
// end of input string that causes the format_len greater than the input string
let format_len = min(index, formatter.format_len());
// format_len should not greater than index. The url crate will add "/" to the
// end of input string that causes the format_len greater than the input string
let format_len = min(index, formatter.format_len());
let format_attributes = formatter.to_attributes();
format_attributes.iter().for_each(|(k, v)| {
if !new_attributes.contains_key(k) {
new_attributes.insert(k.clone(), v.clone());
}
});
let next_attributes = match iter.next_op() {
None => empty_attributes(),
Some(op) => op.get_attributes(),
};
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len - min(index, format_len))
.retain_with_attributes(format_len, format_attributes)
.insert_with_attributes(text, next_attributes)
.build(),
);
}
let format_attributes = formatter.to_attributes();
format_attributes.iter().for_each(|(k, v)| {
if !new_attributes.contains_key(k) {
new_attributes.insert(k.clone(), v.clone());
}
}
});
None
let next_attributes = match iter.next_op() {
None => empty_attributes(),
Some(op) => op.get_attributes(),
};
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len - min(index, format_len))
.retain_with_attributes(format_len, format_attributes)
.insert_with_attributes(text, next_attributes)
.build(),
);
},
}
}
None
}
}
pub enum AutoFormatter {
Url(Url),
Url(Url),
}
impl AutoFormatter {
pub fn to_attributes(&self) -> AttributeHashMap {
match self {
AutoFormatter::Url(url) => BuildInTextAttribute::Link(url.as_str()).into(),
}
pub fn to_attributes(&self) -> AttributeHashMap {
match self {
AutoFormatter::Url(url) => BuildInTextAttribute::Link(url.as_str()).into(),
}
}
pub fn format_len(&self) -> usize {
let s = match self {
AutoFormatter::Url(url) => url.to_string(),
};
pub fn format_len(&self) -> usize {
let s = match self {
AutoFormatter::Url(url) => url.to_string(),
};
count_utf16_code_units(&s)
}
count_utf16_code_units(&s)
}
}
pub struct AutoFormat {}
impl AutoFormat {
fn parse(s: &str) -> Option<AutoFormatter> {
if let Ok(url) = Url::parse(s) {
return Some(AutoFormatter::Url(url));
}
None
fn parse(s: &str) -> Option<AutoFormatter> {
if let Ok(url) = Url::parse(s) {
return Some(AutoFormatter::Url(url));
}
None
}
}

View File

@ -1,50 +1,50 @@
use crate::client_document::InsertExt;
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{DeltaOperationBuilder, OperationAttributes, OperationIterator, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
core::{DeltaOperationBuilder, OperationAttributes, OperationIterator, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
};
pub struct DefaultInsertAttribute {}
impl InsertExt for DefaultInsertAttribute {
fn ext_name(&self) -> &str {
"DefaultInsertAttribute"
fn ext_name(&self) -> &str {
"DefaultInsertAttribute"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
let iter = OperationIterator::new(delta);
let mut attributes = AttributeHashMap::new();
// Enable each line split by "\n" remains the block attributes. for example:
// insert "\n" to "123456" at index 3
//
// [{"insert":"123"},{"insert":"\n","attributes":{"header":1}},
// {"insert":"456"},{"insert":"\n","attributes":{"header":1}}]
if text.ends_with(NEW_LINE) {
match iter.last() {
None => {},
Some(op) => {
if op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
attributes.extend(op.get_attributes());
}
},
}
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
let iter = OperationIterator::new(delta);
let mut attributes = AttributeHashMap::new();
// Enable each line split by "\n" remains the block attributes. for example:
// insert "\n" to "123456" at index 3
//
// [{"insert":"123"},{"insert":"\n","attributes":{"header":1}},
// {"insert":"456"},{"insert":"\n","attributes":{"header":1}}]
if text.ends_with(NEW_LINE) {
match iter.last() {
None => {}
Some(op) => {
if op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
attributes.extend(op.get_attributes());
}
}
}
}
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
)
}
Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
)
}
}

View File

@ -16,34 +16,34 @@ mod reset_format_on_new_line;
pub struct InsertEmbedsExt {}
impl InsertExt for InsertEmbedsExt {
fn ext_name(&self) -> &str {
"InsertEmbedsExt"
}
fn ext_name(&self) -> &str {
"InsertEmbedsExt"
}
fn apply(
&self,
_delta: &DeltaTextOperations,
_replace_len: usize,
_text: &str,
_index: usize,
) -> Option<DeltaTextOperations> {
None
}
fn apply(
&self,
_delta: &DeltaTextOperations,
_replace_len: usize,
_text: &str,
_index: usize,
) -> Option<DeltaTextOperations> {
None
}
}
pub struct ForceNewlineForInsertsAroundEmbedExt {}
impl InsertExt for ForceNewlineForInsertsAroundEmbedExt {
fn ext_name(&self) -> &str {
"ForceNewlineForInsertsAroundEmbedExt"
}
fn ext_name(&self) -> &str {
"ForceNewlineForInsertsAroundEmbedExt"
}
fn apply(
&self,
_delta: &DeltaTextOperations,
_replace_len: usize,
_text: &str,
_index: usize,
) -> Option<DeltaTextOperations> {
None
}
fn apply(
&self,
_delta: &DeltaTextOperations,
_replace_len: usize,
_text: &str,
_index: usize,
) -> Option<DeltaTextOperations> {
None
}
}

View File

@ -1,68 +1,72 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{DeltaOperationBuilder, OperationIterator, NEW_LINE},
text_delta::{attributes_except_header, empty_attributes, BuildInTextAttributeKey, DeltaTextOperations},
core::{DeltaOperationBuilder, OperationIterator, NEW_LINE},
text_delta::{
attributes_except_header, empty_attributes, BuildInTextAttributeKey, DeltaTextOperations,
},
};
pub struct PreserveBlockFormatOnInsert {}
impl InsertExt for PreserveBlockFormatOnInsert {
fn ext_name(&self) -> &str {
"PreserveBlockFormatOnInsert"
fn ext_name(&self) -> &str {
"PreserveBlockFormatOnInsert"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
let mut iter = OperationIterator::from_offset(delta, index);
match iter.next_op_with_newline() {
None => {},
Some((newline_op, offset)) => {
let newline_attributes = newline_op.get_attributes();
let block_attributes = attributes_except_header(&newline_op);
if block_attributes.is_empty() {
return None;
}
let mut iter = OperationIterator::from_offset(delta, index);
match iter.next_op_with_newline() {
None => {}
Some((newline_op, offset)) => {
let newline_attributes = newline_op.get_attributes();
let block_attributes = attributes_except_header(&newline_op);
if block_attributes.is_empty() {
return None;
}
let mut reset_attribute = AttributeHashMap::new();
if newline_attributes.contains_key(BuildInTextAttributeKey::Header.as_ref()) {
reset_attribute.insert(BuildInTextAttributeKey::Header, 1);
}
let lines: Vec<_> = text.split(NEW_LINE).collect();
let mut new_delta = DeltaOperationBuilder::new().retain(index + replace_len).build();
lines.iter().enumerate().for_each(|(i, line)| {
if !line.is_empty() {
new_delta.insert(line, empty_attributes());
}
if i == 0 {
new_delta.insert(NEW_LINE, newline_attributes.clone());
} else if i < lines.len() - 1 {
new_delta.insert(NEW_LINE, block_attributes.clone());
} else {
// do nothing
}
});
if !reset_attribute.is_empty() {
new_delta.retain(offset, empty_attributes());
let len = newline_op.get_data().find(NEW_LINE).unwrap();
new_delta.retain(len, empty_attributes());
new_delta.retain(1, reset_attribute);
}
return Some(new_delta);
}
let mut reset_attribute = AttributeHashMap::new();
if newline_attributes.contains_key(BuildInTextAttributeKey::Header.as_ref()) {
reset_attribute.insert(BuildInTextAttributeKey::Header, 1);
}
None
let lines: Vec<_> = text.split(NEW_LINE).collect();
let mut new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.build();
lines.iter().enumerate().for_each(|(i, line)| {
if !line.is_empty() {
new_delta.insert(line, empty_attributes());
}
if i == 0 {
new_delta.insert(NEW_LINE, newline_attributes.clone());
} else if i < lines.len() - 1 {
new_delta.insert(NEW_LINE, block_attributes.clone());
} else {
// do nothing
}
});
if !reset_attribute.is_empty() {
new_delta.retain(offset, empty_attributes());
let len = newline_op.get_data().find(NEW_LINE).unwrap();
new_delta.retain(len, empty_attributes());
new_delta.retain(1, reset_attribute);
}
return Some(new_delta);
},
}
None
}
}

View File

@ -1,109 +1,109 @@
use crate::{
client_document::InsertExt,
util::{contain_newline, is_newline},
client_document::InsertExt,
util::{contain_newline, is_newline},
};
use lib_ot::{
core::{DeltaOperationBuilder, OpNewline, OperationIterator, NEW_LINE},
text_delta::{empty_attributes, BuildInTextAttributeKey, DeltaTextOperations},
core::{DeltaOperationBuilder, OpNewline, OperationIterator, NEW_LINE},
text_delta::{empty_attributes, BuildInTextAttributeKey, DeltaTextOperations},
};
pub struct PreserveInlineFormat {}
impl InsertExt for PreserveInlineFormat {
fn ext_name(&self) -> &str {
"PreserveInlineFormat"
fn ext_name(&self) -> &str {
"PreserveInlineFormat"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if contain_newline(text) {
return None;
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if contain_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_contain() {
return None;
}
let mut attributes = prev.get_attributes();
if attributes.is_empty() || !attributes.contains_key(BuildInTextAttributeKey::Link.as_ref()) {
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
);
}
let next = iter.next_op();
match &next {
None => attributes = empty_attributes(),
Some(next) => {
if OpNewline::parse(next).is_equal() {
attributes = empty_attributes();
}
}
}
let new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build();
Some(new_delta)
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_contain() {
return None;
}
let mut attributes = prev.get_attributes();
if attributes.is_empty() || !attributes.contains_key(BuildInTextAttributeKey::Link.as_ref()) {
return Some(
DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build(),
);
}
let next = iter.next_op();
match &next {
None => attributes = empty_attributes(),
Some(next) => {
if OpNewline::parse(next).is_equal() {
attributes = empty_attributes();
}
},
}
let new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.insert_with_attributes(text, attributes)
.build();
Some(new_delta)
}
}
pub struct PreserveLineFormatOnSplit {}
impl InsertExt for PreserveLineFormatOnSplit {
fn ext_name(&self) -> &str {
"PreserveLineFormatOnSplit"
fn ext_name(&self) -> &str {
"PreserveLineFormatOnSplit"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_end() {
return None;
}
let next = iter.next_op()?;
let newline_status = OpNewline::parse(&next);
if newline_status.is_end() {
return None;
}
let mut new_delta = DeltaTextOperations::new();
new_delta.retain(index + replace_len, empty_attributes());
if newline_status.is_contain() {
debug_assert!(!next.has_attribute());
new_delta.insert(NEW_LINE, empty_attributes());
return Some(new_delta);
}
match iter.next_op_with_newline() {
None => {}
Some((newline_op, _)) => {
new_delta.insert(NEW_LINE, newline_op.get_attributes());
}
}
Some(new_delta)
let mut iter = OperationIterator::new(delta);
let prev = iter.next_op_with_len(index)?;
if OpNewline::parse(&prev).is_end() {
return None;
}
let next = iter.next_op()?;
let newline_status = OpNewline::parse(&next);
if newline_status.is_end() {
return None;
}
let mut new_delta = DeltaTextOperations::new();
new_delta.retain(index + replace_len, empty_attributes());
if newline_status.is_contain() {
debug_assert!(!next.has_attribute());
new_delta.insert(NEW_LINE, empty_attributes());
return Some(new_delta);
}
match iter.next_op_with_newline() {
None => {},
Some((newline_op, _)) => {
new_delta.insert(NEW_LINE, newline_op.get_attributes());
},
}
Some(new_delta)
}
}

View File

@ -1,50 +1,50 @@
use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{DeltaOperationBuilder, OperationIterator, Utf16CodeUnitMetric, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
core::{DeltaOperationBuilder, OperationIterator, Utf16CodeUnitMetric, NEW_LINE},
text_delta::{BuildInTextAttributeKey, DeltaTextOperations},
};
pub struct ResetLineFormatOnNewLine {}
impl InsertExt for ResetLineFormatOnNewLine {
fn ext_name(&self) -> &str {
"ResetLineFormatOnNewLine"
fn ext_name(&self) -> &str {
"ResetLineFormatOnNewLine"
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations> {
if !is_newline(text) {
return None;
}
let mut iter = OperationIterator::new(delta);
iter.seek::<Utf16CodeUnitMetric>(index);
let next_op = iter.next_op()?;
if !next_op.get_data().starts_with(NEW_LINE) {
return None;
}
let mut reset_attribute = AttributeHashMap::new();
if next_op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
reset_attribute.remove_value(BuildInTextAttributeKey::Header);
}
let len = index + replace_len;
Some(
DeltaOperationBuilder::new()
.retain(len)
.insert_with_attributes(NEW_LINE, next_op.get_attributes())
.retain_with_attributes(1, reset_attribute)
.trim()
.build(),
)
let mut iter = OperationIterator::new(delta);
iter.seek::<Utf16CodeUnitMetric>(index);
let next_op = iter.next_op()?;
if !next_op.get_data().starts_with(NEW_LINE) {
return None;
}
let mut reset_attribute = AttributeHashMap::new();
if next_op
.get_attributes()
.contains_key(BuildInTextAttributeKey::Header.as_ref())
{
reset_attribute.remove_value(BuildInTextAttributeKey::Header);
}
let len = index + replace_len;
Some(
DeltaOperationBuilder::new()
.retain(len)
.insert_with_attributes(NEW_LINE, next_op.get_attributes())
.retain_with_attributes(1, reset_attribute)
.trim()
.build(),
)
}
}

View File

@ -14,27 +14,27 @@ pub type FormatExtension = Box<dyn FormatExt + Send + Sync>;
pub type DeleteExtension = Box<dyn DeleteExt + Send + Sync>;
pub trait InsertExt {
fn ext_name(&self) -> &str;
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations>;
fn ext_name(&self) -> &str;
fn apply(
&self,
delta: &DeltaTextOperations,
replace_len: usize,
text: &str,
index: usize,
) -> Option<DeltaTextOperations>;
}
pub trait FormatExt {
fn ext_name(&self) -> &str;
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations>;
fn ext_name(&self) -> &str;
fn apply(
&self,
delta: &DeltaTextOperations,
interval: Interval,
attribute: &AttributeEntry,
) -> Option<DeltaTextOperations>;
}
pub trait DeleteExt {
fn ext_name(&self) -> &str;
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations>;
fn ext_name(&self) -> &str;
fn apply(&self, delta: &DeltaTextOperations, interval: Interval) -> Option<DeltaTextOperations>;
}

View File

@ -4,77 +4,77 @@ const MAX_UNDOES: usize = 20;
#[derive(Debug, Clone)]
pub struct UndoResult {
pub operations: DeltaTextOperations,
pub operations: DeltaTextOperations,
}
#[derive(Debug, Clone)]
pub struct History {
#[allow(dead_code)]
cur_undo: usize,
undoes: Vec<DeltaTextOperations>,
redoes: Vec<DeltaTextOperations>,
capacity: usize,
#[allow(dead_code)]
cur_undo: usize,
undoes: Vec<DeltaTextOperations>,
redoes: Vec<DeltaTextOperations>,
capacity: usize,
}
impl std::default::Default for History {
fn default() -> Self {
History {
cur_undo: 1,
undoes: Vec::new(),
redoes: Vec::new(),
capacity: MAX_UNDOES,
}
fn default() -> Self {
History {
cur_undo: 1,
undoes: Vec::new(),
redoes: Vec::new(),
capacity: MAX_UNDOES,
}
}
}
impl History {
pub fn new() -> Self {
History::default()
pub fn new() -> Self {
History::default()
}
pub fn can_undo(&self) -> bool {
!self.undoes.is_empty()
}
pub fn can_redo(&self) -> bool {
!self.redoes.is_empty()
}
pub fn add_undo(&mut self, delta: DeltaTextOperations) {
self.undoes.push(delta);
}
pub fn add_redo(&mut self, delta: DeltaTextOperations) {
self.redoes.push(delta);
}
pub fn record(&mut self, delta: DeltaTextOperations) {
if delta.ops.is_empty() {
return;
}
pub fn can_undo(&self) -> bool {
!self.undoes.is_empty()
self.redoes.clear();
self.add_undo(delta);
if self.undoes.len() > self.capacity {
self.undoes.remove(0);
}
}
pub fn undo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_undo() {
return None;
}
let delta = self.undoes.pop().unwrap();
Some(delta)
}
pub fn redo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_redo() {
return None;
}
pub fn can_redo(&self) -> bool {
!self.redoes.is_empty()
}
pub fn add_undo(&mut self, delta: DeltaTextOperations) {
self.undoes.push(delta);
}
pub fn add_redo(&mut self, delta: DeltaTextOperations) {
self.redoes.push(delta);
}
pub fn record(&mut self, delta: DeltaTextOperations) {
if delta.ops.is_empty() {
return;
}
self.redoes.clear();
self.add_undo(delta);
if self.undoes.len() > self.capacity {
self.undoes.remove(0);
}
}
pub fn undo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_undo() {
return None;
}
let delta = self.undoes.pop().unwrap();
Some(delta)
}
pub fn redo(&mut self) -> Option<DeltaTextOperations> {
if !self.can_redo() {
return None;
}
let delta = self.redoes.pop().unwrap();
Some(delta)
}
let delta = self.redoes.pop().unwrap();
Some(delta)
}
}

View File

@ -1,116 +1,119 @@
use crate::client_document::*;
use lib_ot::core::AttributeEntry;
use lib_ot::{
core::{trim, Interval},
errors::{ErrorBuilder, OTError, OTErrorCode},
text_delta::DeltaTextOperations,
core::{trim, Interval},
errors::{ErrorBuilder, OTError, OTErrorCode},
text_delta::DeltaTextOperations,
};
pub const RECORD_THRESHOLD: usize = 400; // in milliseconds
pub struct ViewExtensions {
insert_exts: Vec<InsertExtension>,
format_exts: Vec<FormatExtension>,
delete_exts: Vec<DeleteExtension>,
insert_exts: Vec<InsertExtension>,
format_exts: Vec<FormatExtension>,
delete_exts: Vec<DeleteExtension>,
}
impl ViewExtensions {
pub(crate) fn new() -> Self {
Self {
insert_exts: construct_insert_exts(),
format_exts: construct_format_exts(),
delete_exts: construct_delete_exts(),
}
pub(crate) fn new() -> Self {
Self {
insert_exts: construct_insert_exts(),
format_exts: construct_format_exts(),
delete_exts: construct_delete_exts(),
}
}
pub(crate) fn insert(
&self,
operations: &DeltaTextOperations,
text: &str,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.insert_exts {
if let Some(mut operations) = ext.apply(operations, interval.size(), text, interval.start) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
pub(crate) fn insert(
&self,
operations: &DeltaTextOperations,
text: &str,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.insert_exts {
if let Some(mut operations) = ext.apply(operations, interval.size(), text, interval.start) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
match new_operations {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyInsertFail).build()),
Some(new_operations) => Ok(new_operations),
}
}
match new_operations {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyInsertFail).build()),
Some(new_operations) => Ok(new_operations),
}
pub(crate) fn delete(
&self,
delta: &DeltaTextOperations,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_delta = None;
for ext in &self.delete_exts {
if let Some(mut delta) = ext.apply(delta, interval) {
trim(&mut delta);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
}
pub(crate) fn delete(
&self,
delta: &DeltaTextOperations,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_delta = None;
for ext in &self.delete_exts {
if let Some(mut delta) = ext.apply(delta, interval) {
trim(&mut delta);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), delta);
new_delta = Some(delta);
break;
}
}
match new_delta {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyDeleteFail).build()),
Some(new_delta) => Ok(new_delta),
}
}
match new_delta {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyDeleteFail).build()),
Some(new_delta) => Ok(new_delta),
}
pub(crate) fn format(
&self,
operations: &DeltaTextOperations,
attribute: AttributeEntry,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.format_exts {
if let Some(mut operations) = ext.apply(operations, interval, &attribute) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
pub(crate) fn format(
&self,
operations: &DeltaTextOperations,
attribute: AttributeEntry,
interval: Interval,
) -> Result<DeltaTextOperations, OTError> {
let mut new_operations = None;
for ext in &self.format_exts {
if let Some(mut operations) = ext.apply(operations, interval, &attribute) {
trim(&mut operations);
tracing::trace!("[{}] applied, delta: {}", ext.ext_name(), operations);
new_operations = Some(operations);
break;
}
}
match new_operations {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyFormatFail).build()),
Some(new_operations) => Ok(new_operations),
}
match new_operations {
None => Err(ErrorBuilder::new(OTErrorCode::ApplyFormatFail).build()),
Some(new_operations) => Ok(new_operations),
}
}
}
fn construct_insert_exts() -> Vec<InsertExtension> {
vec![
Box::new(InsertEmbedsExt {}),
Box::new(ForceNewlineForInsertsAroundEmbedExt {}),
Box::new(AutoExitBlock {}),
Box::new(PreserveBlockFormatOnInsert {}),
Box::new(PreserveLineFormatOnSplit {}),
Box::new(ResetLineFormatOnNewLine {}),
Box::new(AutoFormatExt {}),
Box::new(PreserveInlineFormat {}),
Box::new(DefaultInsertAttribute {}),
]
vec![
Box::new(InsertEmbedsExt {}),
Box::new(ForceNewlineForInsertsAroundEmbedExt {}),
Box::new(AutoExitBlock {}),
Box::new(PreserveBlockFormatOnInsert {}),
Box::new(PreserveLineFormatOnSplit {}),
Box::new(ResetLineFormatOnNewLine {}),
Box::new(AutoFormatExt {}),
Box::new(PreserveInlineFormat {}),
Box::new(DefaultInsertAttribute {}),
]
}
fn construct_format_exts() -> Vec<FormatExtension> {
vec![
// Box::new(FormatLinkAtCaretPositionExt {}),
Box::new(ResolveBlockFormat {}),
Box::new(ResolveInlineFormat {}),
]
vec![
// Box::new(FormatLinkAtCaretPositionExt {}),
Box::new(ResolveBlockFormat {}),
Box::new(ResolveInlineFormat {}),
]
}
fn construct_delete_exts() -> Vec<DeleteExtension> {
vec![Box::new(PreserveLineFormatOnMerge {}), Box::new(DefaultDelete {})]
vec![
Box::new(PreserveLineFormatOnMerge {}),
Box::new(DefaultDelete {}),
]
}

View File

@ -1,7 +1,7 @@
use crate::client_folder::FolderOperations;
use crate::{
client_folder::{default_folder_operations, FolderPad},
errors::SyncResult,
client_folder::{default_folder_operations, FolderPad},
errors::SyncResult,
};
use flowy_sync::util::make_operations_from_revisions;
use folder_model::{TrashRevision, WorkspaceRevision};
@ -10,40 +10,40 @@ use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub(crate) struct FolderPadBuilder {
workspaces: Vec<WorkspaceRevision>,
trash: Vec<TrashRevision>,
workspaces: Vec<WorkspaceRevision>,
trash: Vec<TrashRevision>,
}
impl FolderPadBuilder {
pub(crate) fn new() -> Self {
Self {
workspaces: vec![],
trash: vec![],
}
pub(crate) fn new() -> Self {
Self {
workspaces: vec![],
trash: vec![],
}
}
#[allow(dead_code)]
pub(crate) fn with_workspace(mut self, workspaces: Vec<WorkspaceRevision>) -> Self {
self.workspaces = workspaces;
self
}
#[allow(dead_code)]
pub(crate) fn with_workspace(mut self, workspaces: Vec<WorkspaceRevision>) -> Self {
self.workspaces = workspaces;
self
}
#[allow(dead_code)]
pub(crate) fn with_trash(mut self, trash: Vec<TrashRevision>) -> Self {
self.trash = trash;
self
}
#[allow(dead_code)]
pub(crate) fn with_trash(mut self, trash: Vec<TrashRevision>) -> Self {
self.trash = trash;
self
}
pub(crate) fn build_with_revisions(self, revisions: Vec<Revision>) -> SyncResult<FolderPad> {
let mut operations: FolderOperations = make_operations_from_revisions(revisions)?;
if operations.is_empty() {
operations = default_folder_operations();
}
FolderPad::from_operations(operations)
pub(crate) fn build_with_revisions(self, revisions: Vec<Revision>) -> SyncResult<FolderPad> {
let mut operations: FolderOperations = make_operations_from_revisions(revisions)?;
if operations.is_empty() {
operations = default_folder_operations();
}
FolderPad::from_operations(operations)
}
#[allow(dead_code)]
pub(crate) fn build(self) -> SyncResult<FolderPad> {
FolderPad::new(self.workspaces, self.trash)
}
#[allow(dead_code)]
pub(crate) fn build(self) -> SyncResult<FolderPad> {
FolderPad::new(self.workspaces, self.trash)
}
}

View File

@ -10,133 +10,138 @@ use std::sync::Arc;
pub type AtomicNodeTree = RwLock<NodeTree>;
pub struct FolderNodePad {
pub tree: Arc<AtomicNodeTree>,
pub node_id: NodeId,
pub workspaces: WorkspaceList,
pub trash: TrashList,
pub tree: Arc<AtomicNodeTree>,
pub node_id: NodeId,
pub workspaces: WorkspaceList,
pub trash: TrashList,
}
#[derive(Clone, Node)]
#[node_type = "workspaces"]
pub struct WorkspaceList {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(child_name = "workspace")]
inner: Vec<WorkspaceNode>,
#[node(child_name = "workspace")]
inner: Vec<WorkspaceNode>,
}
impl std::ops::Deref for WorkspaceList {
type Target = Vec<WorkspaceNode>;
type Target = Vec<WorkspaceNode>;
fn deref(&self) -> &Self::Target {
&self.inner
}
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl std::ops::DerefMut for WorkspaceList {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
#[derive(Clone, Node)]
#[node_type = "trash"]
pub struct TrashList {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(child_name = "trash")]
inner: Vec<TrashNode>,
#[node(child_name = "trash")]
inner: Vec<TrashNode>,
}
impl FolderNodePad {
pub fn new() -> Self {
Self::default()
}
pub fn new() -> Self {
Self::default()
}
pub fn get_workspace(&self, workspace_id: &str) -> Option<&WorkspaceNode> {
self.workspaces.iter().find(|workspace| workspace.id == workspace_id)
}
pub fn get_workspace(&self, workspace_id: &str) -> Option<&WorkspaceNode> {
self
.workspaces
.iter()
.find(|workspace| workspace.id == workspace_id)
}
pub fn get_mut_workspace(&mut self, workspace_id: &str) -> Option<&mut WorkspaceNode> {
self.workspaces
.iter_mut()
.find(|workspace| workspace.id == workspace_id)
}
pub fn get_mut_workspace(&mut self, workspace_id: &str) -> Option<&mut WorkspaceNode> {
self
.workspaces
.iter_mut()
.find(|workspace| workspace.id == workspace_id)
}
pub fn add_workspace(&mut self, mut workspace: WorkspaceNode) {
let path = workspaces_path().clone_with(self.workspaces.len());
let op = NodeOperation::Insert {
path: path.clone(),
nodes: vec![workspace.to_node_data()],
};
self.tree.write().apply_op(op).unwrap();
pub fn add_workspace(&mut self, mut workspace: WorkspaceNode) {
let path = workspaces_path().clone_with(self.workspaces.len());
let op = NodeOperation::Insert {
path: path.clone(),
nodes: vec![workspace.to_node_data()],
};
self.tree.write().apply_op(op).unwrap();
let node_id = self.tree.read().node_id_at_path(path).unwrap();
workspace.node_id = Some(node_id);
self.workspaces.push(workspace);
}
let node_id = self.tree.read().node_id_at_path(path).unwrap();
workspace.node_id = Some(node_id);
self.workspaces.push(workspace);
}
pub fn to_json(&self, pretty: bool) -> SyncResult<String> {
self.tree
.read()
.to_json(pretty)
.map_err(|e| SyncError::serde().context(e))
}
pub fn to_json(&self, pretty: bool) -> SyncResult<String> {
self
.tree
.read()
.to_json(pretty)
.map_err(|e| SyncError::serde().context(e))
}
}
impl std::default::Default for FolderNodePad {
fn default() -> Self {
let tree = Arc::new(RwLock::new(NodeTree::default()));
fn default() -> Self {
let tree = Arc::new(RwLock::new(NodeTree::default()));
// Workspace
let mut workspaces = WorkspaceList {
tree: tree.clone(),
node_id: None,
inner: vec![],
};
let workspace_node = workspaces.to_node_data();
// Workspace
let mut workspaces = WorkspaceList {
tree: tree.clone(),
node_id: None,
inner: vec![],
};
let workspace_node = workspaces.to_node_data();
// Trash
let mut trash = TrashList {
tree: tree.clone(),
node_id: None,
inner: vec![],
};
let trash_node = trash.to_node_data();
// Trash
let mut trash = TrashList {
tree: tree.clone(),
node_id: None,
inner: vec![],
};
let trash_node = trash.to_node_data();
let folder_node = NodeDataBuilder::new("folder")
.add_node_data(workspace_node)
.add_node_data(trash_node)
.build();
let folder_node = NodeDataBuilder::new("folder")
.add_node_data(workspace_node)
.add_node_data(trash_node)
.build();
let operation = NodeOperation::Insert {
path: folder_path(),
nodes: vec![folder_node],
};
tree.write().apply_op(operation).unwrap();
let node_id = tree.read().node_id_at_path(folder_path()).unwrap();
workspaces.node_id = Some(tree.read().node_id_at_path(workspaces_path()).unwrap());
trash.node_id = Some(tree.read().node_id_at_path(trash_path()).unwrap());
let operation = NodeOperation::Insert {
path: folder_path(),
nodes: vec![folder_node],
};
tree.write().apply_op(operation).unwrap();
let node_id = tree.read().node_id_at_path(folder_path()).unwrap();
workspaces.node_id = Some(tree.read().node_id_at_path(workspaces_path()).unwrap());
trash.node_id = Some(tree.read().node_id_at_path(trash_path()).unwrap());
Self {
tree,
node_id,
workspaces,
trash,
}
Self {
tree,
node_id,
workspaces,
trash,
}
}
}
fn folder_path() -> Path {
vec![0].into()
vec![0].into()
}
fn workspaces_path() -> Path {
folder_path().clone_with(0)
folder_path().clone_with(0)
}
fn trash_path() -> Path {
folder_path().clone_with(1)
folder_path().clone_with(1)
}

View File

@ -7,14 +7,14 @@ use std::sync::Arc;
#[derive(Clone, Node)]
#[node_type = "trash"]
pub struct TrashNode {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
}

View File

@ -3,52 +3,70 @@ use crate::errors::SyncResult;
use lib_ot::core::{AttributeHashMap, AttributeValue, Changeset, NodeId, NodeOperation};
use std::sync::Arc;
pub fn get_attributes_str_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<String> {
tree.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.str_value())
pub fn get_attributes_str_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<String> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.str_value())
}
pub fn set_attributes_str_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
value: String,
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
value: String,
) -> SyncResult<()> {
let old_attributes = match get_attributes(tree.clone(), node_id) {
None => AttributeHashMap::new(),
Some(attributes) => attributes,
};
let mut new_attributes = old_attributes.clone();
new_attributes.insert(key, value);
let path = tree.read().path_from_node_id(*node_id);
let update_operation = NodeOperation::Update {
path,
changeset: Changeset::Attributes {
new: new_attributes,
old: old_attributes,
},
};
tree.write().apply_op(update_operation)?;
Ok(())
let old_attributes = match get_attributes(tree.clone(), node_id) {
None => AttributeHashMap::new(),
Some(attributes) => attributes,
};
let mut new_attributes = old_attributes.clone();
new_attributes.insert(key, value);
let path = tree.read().path_from_node_id(*node_id);
let update_operation = NodeOperation::Update {
path,
changeset: Changeset::Attributes {
new: new_attributes,
old: old_attributes,
},
};
tree.write().apply_op(update_operation)?;
Ok(())
}
#[allow(dead_code)]
pub fn get_attributes_int_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<i64> {
tree.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.int_value())
pub fn get_attributes_int_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<i64> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.int_value())
}
pub fn get_attributes(tree: Arc<AtomicNodeTree>, node_id: &NodeId) -> Option<AttributeHashMap> {
tree.read().get_node(*node_id).map(|node| node.attributes.clone())
tree
.read()
.get_node(*node_id)
.map(|node| node.attributes.clone())
}
#[allow(dead_code)]
pub fn get_attributes_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<AttributeValue> {
tree.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
pub fn get_attributes_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<AttributeValue> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
}

View File

@ -7,55 +7,55 @@ use std::sync::Arc;
#[derive(Clone, Node)]
#[node_type = "workspace"]
pub struct WorkspaceNode {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
#[node(child_name = "app")]
pub apps: Vec<AppNode>,
#[node(child_name = "app")]
pub apps: Vec<AppNode>,
}
impl WorkspaceNode {
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self {
tree,
node_id: None,
id,
name,
apps: vec![],
}
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self {
tree,
node_id: None,
id,
name,
apps: vec![],
}
}
}
#[derive(Clone, Node)]
#[node_type = "app"]
pub struct AppNode {
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
pub tree: Arc<AtomicNodeTree>,
pub node_id: Option<NodeId>,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub id: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
#[node(get_value_with = "get_attributes_str_value")]
#[node(set_value_with = "set_attributes_str_value")]
pub name: String,
}
impl AppNode {
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self {
tree,
node_id: None,
id,
name,
}
pub fn new(tree: Arc<AtomicNodeTree>, id: String, name: String) -> Self {
Self {
tree,
node_id: None,
id,
name,
}
}
}

View File

@ -2,7 +2,7 @@ pub mod client_database;
pub mod client_document;
pub mod client_folder;
pub mod errors {
pub use flowy_sync::errors::*;
pub use flowy_sync::errors::*;
}
pub mod util;

View File

@ -3,127 +3,127 @@ use dissimilar::Chunk;
use document_model::document::DocumentInfo;
use lib_ot::core::{DeltaOperationBuilder, OTString, OperationAttributes};
use lib_ot::{
core::{DeltaOperations, OperationTransform, NEW_LINE, WHITESPACE},
text_delta::DeltaTextOperations,
core::{DeltaOperations, OperationTransform, NEW_LINE, WHITESPACE},
text_delta::DeltaTextOperations,
};
use revision_model::Revision;
use serde::de::DeserializeOwned;
#[inline]
pub fn find_newline(s: &str) -> Option<usize> {
s.find(NEW_LINE)
s.find(NEW_LINE)
}
#[inline]
pub fn is_newline(s: &str) -> bool {
s == NEW_LINE
s == NEW_LINE
}
#[inline]
pub fn is_whitespace(s: &str) -> bool {
s == WHITESPACE
s == WHITESPACE
}
#[inline]
pub fn contain_newline(s: &str) -> bool {
s.contains(NEW_LINE)
s.contains(NEW_LINE)
}
pub fn recover_operation_from_revisions<T>(
revisions: Vec<Revision>,
validator: impl Fn(&DeltaOperations<T>) -> bool,
revisions: Vec<Revision>,
validator: impl Fn(&DeltaOperations<T>) -> bool,
) -> Option<(DeltaOperations<T>, i64)>
where
T: OperationAttributes + DeserializeOwned + OperationAttributes,
T: OperationAttributes + DeserializeOwned + OperationAttributes,
{
let mut new_operations = DeltaOperations::<T>::new();
let mut rev_id = 0;
for revision in revisions {
if let Ok(operations) = DeltaOperations::<T>::from_bytes(revision.bytes) {
match new_operations.compose(&operations) {
Ok(composed_operations) => {
if validator(&composed_operations) {
rev_id = revision.rev_id;
new_operations = composed_operations;
} else {
break;
}
}
Err(_) => break,
}
} else {
let mut new_operations = DeltaOperations::<T>::new();
let mut rev_id = 0;
for revision in revisions {
if let Ok(operations) = DeltaOperations::<T>::from_bytes(revision.bytes) {
match new_operations.compose(&operations) {
Ok(composed_operations) => {
if validator(&composed_operations) {
rev_id = revision.rev_id;
new_operations = composed_operations;
} else {
break;
}
}
if new_operations.is_empty() {
None
}
},
Err(_) => break,
}
} else {
Some((new_operations, rev_id))
break;
}
}
if new_operations.is_empty() {
None
} else {
Some((new_operations, rev_id))
}
}
#[inline]
pub fn make_document_info_from_revisions(
doc_id: &str,
revisions: Vec<Revision>,
doc_id: &str,
revisions: Vec<Revision>,
) -> Result<Option<DocumentInfo>, SyncError> {
if revisions.is_empty() {
return Ok(None);
if revisions.is_empty() {
return Ok(None);
}
let mut delta = DeltaTextOperations::new();
let mut base_rev_id = 0;
let mut rev_id = 0;
for revision in revisions {
base_rev_id = revision.base_rev_id;
rev_id = revision.rev_id;
if revision.bytes.is_empty() {
tracing::warn!("revision delta_data is empty");
}
let mut delta = DeltaTextOperations::new();
let mut base_rev_id = 0;
let mut rev_id = 0;
for revision in revisions {
base_rev_id = revision.base_rev_id;
rev_id = revision.rev_id;
let new_delta = DeltaTextOperations::from_bytes(revision.bytes)?;
delta = delta.compose(&new_delta)?;
}
if revision.bytes.is_empty() {
tracing::warn!("revision delta_data is empty");
}
let new_delta = DeltaTextOperations::from_bytes(revision.bytes)?;
delta = delta.compose(&new_delta)?;
}
Ok(Some(DocumentInfo {
doc_id: doc_id.to_owned(),
data: delta.json_bytes().to_vec(),
rev_id,
base_rev_id,
}))
Ok(Some(DocumentInfo {
doc_id: doc_id.to_owned(),
data: delta.json_bytes().to_vec(),
rev_id,
base_rev_id,
}))
}
#[inline]
pub fn rev_id_from_str(s: &str) -> Result<i64, SyncError> {
let rev_id = s
.to_owned()
.parse::<i64>()
.map_err(|e| SyncError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?;
Ok(rev_id)
let rev_id = s
.to_owned()
.parse::<i64>()
.map_err(|e| SyncError::internal().context(format!("Parse rev_id from {} failed. {}", s, e)))?;
Ok(rev_id)
}
pub fn cal_diff<T: OperationAttributes>(old: String, new: String) -> Option<DeltaOperations<T>> {
let chunks = dissimilar::diff(&old, &new);
let mut delta_builder = DeltaOperationBuilder::<T>::new();
for chunk in &chunks {
match chunk {
Chunk::Equal(s) => {
delta_builder = delta_builder.retain(OTString::from(*s).utf16_len());
}
Chunk::Delete(s) => {
delta_builder = delta_builder.delete(OTString::from(*s).utf16_len());
}
Chunk::Insert(s) => {
delta_builder = delta_builder.insert(s);
}
}
let chunks = dissimilar::diff(&old, &new);
let mut delta_builder = DeltaOperationBuilder::<T>::new();
for chunk in &chunks {
match chunk {
Chunk::Equal(s) => {
delta_builder = delta_builder.retain(OTString::from(*s).utf16_len());
},
Chunk::Delete(s) => {
delta_builder = delta_builder.delete(OTString::from(*s).utf16_len());
},
Chunk::Insert(s) => {
delta_builder = delta_builder.insert(s);
},
}
}
let delta = delta_builder.build();
if delta.is_empty() {
None
} else {
Some(delta)
}
let delta = delta_builder.build();
if delta.is_empty() {
None
} else {
Some(delta)
}
}

View File

@ -2,57 +2,74 @@ use flowy_client_sync::client_folder::{FolderNodePad, WorkspaceNode};
#[test]
fn client_folder_create_default_folder_test() {
let folder_pad = FolderNodePad::new();
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
);
let folder_pad = FolderNodePad::new();
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
);
}
#[test]
fn client_folder_create_default_folder_with_workspace_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
folder_pad.workspaces.add_workspace(workspace).unwrap();
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces","children":[{"type":"workspace","attributes":{"id":"1","name":"workspace name"}}]},{"type":"trash"}]}"#
);
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces","children":[{"type":"workspace","attributes":{"id":"1","name":"workspace name"}}]},{"type":"trash"}]}"#
);
assert_eq!(
folder_pad.get_workspace("1").unwrap().get_name().unwrap(),
"workspace name"
);
assert_eq!(
folder_pad.get_workspace("1").unwrap().get_name().unwrap(),
"workspace name"
);
}
#[test]
fn client_folder_delete_workspace_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad.workspaces.remove_workspace("1");
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
);
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad.workspaces.remove_workspace("1");
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
json,
r#"{"type":"folder","children":[{"type":"workspaces"},{"type":"trash"}]}"#
);
}
#[test]
fn client_folder_update_workspace_name_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad
.workspaces
.get_mut_workspace("1")
.unwrap()
.set_name("my first workspace".to_string());
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad
.workspaces
.get_mut_workspace("1")
.unwrap()
.set_name("my first workspace".to_string());
assert_eq!(
folder_pad.workspaces.get_workspace("1").unwrap().get_name().unwrap(),
"my first workspace"
);
assert_eq!(
folder_pad
.workspaces
.get_workspace("1")
.unwrap()
.get_name()
.unwrap(),
"my first workspace"
);
}

View File

@ -3,87 +3,115 @@ use folder_model::AppRevision;
use lib_ot::core::Path;
pub enum FolderNodePadScript {
CreateWorkspace { id: String, name: String },
DeleteWorkspace { id: String },
AssertPathOfWorkspace { id: String, expected_path: Path },
AssertNumberOfWorkspace { expected: usize },
CreateApp { id: String, name: String },
DeleteApp { id: String },
UpdateApp { id: String, name: String },
AssertApp { id: String, expected: Option<AppRevision> },
AssertAppContent { id: String, name: String },
// AssertNumberOfApps { expected: usize },
CreateWorkspace {
id: String,
name: String,
},
DeleteWorkspace {
id: String,
},
AssertPathOfWorkspace {
id: String,
expected_path: Path,
},
AssertNumberOfWorkspace {
expected: usize,
},
CreateApp {
id: String,
name: String,
},
DeleteApp {
id: String,
},
UpdateApp {
id: String,
name: String,
},
AssertApp {
id: String,
expected: Option<AppRevision>,
},
AssertAppContent {
id: String,
name: String,
},
// AssertNumberOfApps { expected: usize },
}
pub struct FolderNodePadTest {
folder_pad: FolderNodePad,
folder_pad: FolderNodePad,
}
impl FolderNodePadTest {
pub fn new() -> FolderNodePadTest {
let mut folder_pad = FolderNodePad::default();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
folder_pad.workspaces.add_workspace(workspace).unwrap();
Self { folder_pad }
}
pub fn new() -> FolderNodePadTest {
let mut folder_pad = FolderNodePad::default();
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
Self { folder_pad }
}
pub fn run_scripts(&mut self, scripts: Vec<FolderNodePadScript>) {
for script in scripts {
self.run_script(script);
}
pub fn run_scripts(&mut self, scripts: Vec<FolderNodePadScript>) {
for script in scripts {
self.run_script(script);
}
}
pub fn run_script(&mut self, script: FolderNodePadScript) {
match script {
FolderNodePadScript::CreateWorkspace { id, name } => {
let workspace = WorkspaceNode::new(self.folder_pad.tree.clone(), id, name);
self.folder_pad.workspaces.add_workspace(workspace).unwrap();
}
FolderNodePadScript::DeleteWorkspace { id } => {
self.folder_pad.workspaces.remove_workspace(id);
}
FolderNodePadScript::AssertPathOfWorkspace { id, expected_path } => {
let workspace_node: &WorkspaceNode = self.folder_pad.workspaces.get_workspace(id).unwrap();
let node_id = workspace_node.node_id.unwrap();
let path = self.folder_pad.tree.read().path_from_node_id(node_id);
assert_eq!(path, expected_path);
}
FolderNodePadScript::AssertNumberOfWorkspace { expected } => {
assert_eq!(self.folder_pad.workspaces.len(), expected);
}
FolderNodePadScript::CreateApp { id, name } => {
let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name);
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.add_app(app_node).unwrap();
}
FolderNodePadScript::DeleteApp { id } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.remove_app(&id);
}
FolderNodePadScript::UpdateApp { id, name } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.get_mut_app(&id).unwrap().set_name(name);
}
FolderNodePadScript::AssertApp { id, expected } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id);
match expected {
None => assert!(app.is_none()),
Some(expected_app) => {
let app_node = app.unwrap();
assert_eq!(expected_app.name, app_node.get_name().unwrap());
assert_eq!(expected_app.id, app_node.get_id().unwrap());
}
}
}
FolderNodePadScript::AssertAppContent { id, name } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id).unwrap();
assert_eq!(app.get_name().unwrap(), name)
} // FolderNodePadScript::AssertNumberOfApps { expected } => {
// let workspace_node = self.folder_pad.get_workspace("1").unwrap();
// assert_eq!(workspace_node.apps.len(), expected);
// }
pub fn run_script(&mut self, script: FolderNodePadScript) {
match script {
FolderNodePadScript::CreateWorkspace { id, name } => {
let workspace = WorkspaceNode::new(self.folder_pad.tree.clone(), id, name);
self.folder_pad.workspaces.add_workspace(workspace).unwrap();
},
FolderNodePadScript::DeleteWorkspace { id } => {
self.folder_pad.workspaces.remove_workspace(id);
},
FolderNodePadScript::AssertPathOfWorkspace { id, expected_path } => {
let workspace_node: &WorkspaceNode = self.folder_pad.workspaces.get_workspace(id).unwrap();
let node_id = workspace_node.node_id.unwrap();
let path = self.folder_pad.tree.read().path_from_node_id(node_id);
assert_eq!(path, expected_path);
},
FolderNodePadScript::AssertNumberOfWorkspace { expected } => {
assert_eq!(self.folder_pad.workspaces.len(), expected);
},
FolderNodePadScript::CreateApp { id, name } => {
let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name);
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.add_app(app_node).unwrap();
},
FolderNodePadScript::DeleteApp { id } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.remove_app(&id);
},
FolderNodePadScript::UpdateApp { id, name } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.get_mut_app(&id).unwrap().set_name(name);
},
FolderNodePadScript::AssertApp { id, expected } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id);
match expected {
None => assert!(app.is_none()),
Some(expected_app) => {
let app_node = app.unwrap();
assert_eq!(expected_app.name, app_node.get_name().unwrap());
assert_eq!(expected_app.id, app_node.get_id().unwrap());
},
}
},
FolderNodePadScript::AssertAppContent { id, name } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id).unwrap();
assert_eq!(app.get_name().unwrap(), name)
}, // FolderNodePadScript::AssertNumberOfApps { expected } => {
// let workspace_node = self.folder_pad.get_workspace("1").unwrap();
// assert_eq!(workspace_node.apps.len(), expected);
// }
}
}
}

View File

@ -3,84 +3,88 @@ use crate::client_folder::script::FolderNodePadTest;
#[test]
fn client_folder_create_multi_workspaces_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
AssertPathOfWorkspace {
id: "1".to_string(),
expected_path: vec![0, 0, 0].into(),
},
CreateWorkspace {
id: "a".to_string(),
name: "workspace a".to_string(),
},
AssertPathOfWorkspace {
id: "a".to_string(),
expected_path: vec![0, 0, 1].into(),
},
CreateWorkspace {
id: "b".to_string(),
name: "workspace b".to_string(),
},
AssertPathOfWorkspace {
id: "b".to_string(),
expected_path: vec![0, 0, 2].into(),
},
AssertNumberOfWorkspace { expected: 3 },
// The path of the workspace 'b' will be changed after deleting the 'a' workspace.
DeleteWorkspace { id: "a".to_string() },
AssertPathOfWorkspace {
id: "b".to_string(),
expected_path: vec![0, 0, 1].into(),
},
]);
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
AssertPathOfWorkspace {
id: "1".to_string(),
expected_path: vec![0, 0, 0].into(),
},
CreateWorkspace {
id: "a".to_string(),
name: "workspace a".to_string(),
},
AssertPathOfWorkspace {
id: "a".to_string(),
expected_path: vec![0, 0, 1].into(),
},
CreateWorkspace {
id: "b".to_string(),
name: "workspace b".to_string(),
},
AssertPathOfWorkspace {
id: "b".to_string(),
expected_path: vec![0, 0, 2].into(),
},
AssertNumberOfWorkspace { expected: 3 },
// The path of the workspace 'b' will be changed after deleting the 'a' workspace.
DeleteWorkspace {
id: "a".to_string(),
},
AssertPathOfWorkspace {
id: "b".to_string(),
expected_path: vec![0, 0, 1].into(),
},
]);
}
#[test]
fn client_folder_create_app_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
AssertAppContent {
id: "1".to_string(),
name: "my first app".to_string(),
},
]);
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
AssertAppContent {
id: "1".to_string(),
name: "my first app".to_string(),
},
]);
}
#[test]
fn client_folder_delete_app_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
DeleteApp { id: "1".to_string() },
AssertApp {
id: "1".to_string(),
expected: None,
},
]);
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
DeleteApp {
id: "1".to_string(),
},
AssertApp {
id: "1".to_string(),
expected: None,
},
]);
}
#[test]
fn client_folder_update_app_test() {
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
UpdateApp {
id: "1".to_string(),
name: "TODO".to_string(),
},
AssertAppContent {
id: "1".to_string(),
name: "TODO".to_string(),
},
]);
let mut test = FolderNodePadTest::new();
test.run_scripts(vec![
CreateApp {
id: "1".to_string(),
name: "my first app".to_string(),
},
UpdateApp {
id: "1".to_string(),
name: "TODO".to_string(),
},
AssertAppContent {
id: "1".to_string(),
name: "TODO".to_string(),
},
]);
}

View File

@ -3,39 +3,39 @@ use quote::format_ident;
#[allow(dead_code)]
pub struct EventASTContext {
pub event: syn::Ident,
pub event_ty: syn::Ident,
pub event_request_struct: syn::Ident,
pub event_input: Option<syn::Path>,
pub event_output: Option<syn::Path>,
pub event_error: String,
pub event: syn::Ident,
pub event_ty: syn::Ident,
pub event_request_struct: syn::Ident,
pub event_input: Option<syn::Path>,
pub event_output: Option<syn::Path>,
pub event_error: String,
}
impl EventASTContext {
#[allow(dead_code)]
pub fn from(enum_attrs: &EventEnumAttrs) -> EventASTContext {
let command_name = enum_attrs.enum_item_name.clone();
if command_name.is_empty() {
panic!("Invalid command name: {}", enum_attrs.enum_item_name);
}
let event = format_ident!("{}", &command_name);
let splits = command_name.split('_').collect::<Vec<&str>>();
let event_ty = format_ident!("{}", enum_attrs.enum_name);
let event_request_struct = format_ident!("{}Event", &splits.join(""));
let event_input = enum_attrs.event_input();
let event_output = enum_attrs.event_output();
let event_error = enum_attrs.event_error();
EventASTContext {
event,
event_ty,
event_request_struct,
event_input,
event_output,
event_error,
}
#[allow(dead_code)]
pub fn from(enum_attrs: &EventEnumAttrs) -> EventASTContext {
let command_name = enum_attrs.enum_item_name.clone();
if command_name.is_empty() {
panic!("Invalid command name: {}", enum_attrs.enum_item_name);
}
let event = format_ident!("{}", &command_name);
let splits = command_name.split('_').collect::<Vec<&str>>();
let event_ty = format_ident!("{}", enum_attrs.enum_name);
let event_request_struct = format_ident!("{}Event", &splits.join(""));
let event_input = enum_attrs.event_input();
let event_output = enum_attrs.event_output();
let event_error = enum_attrs.event_error();
EventASTContext {
event,
event_ty,
event_request_struct,
event_input,
event_output,
event_error,
}
}
}

View File

@ -10,63 +10,71 @@ use syn::Item;
use walkdir::WalkDir;
pub fn gen(crate_name: &str) {
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
log::warn!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return;
}
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
log::warn!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return;
}
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
log::warn!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return;
}
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
log::warn!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return;
}
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string();
let event_crates = parse_dart_event_files(vec![crate_path]);
let event_ast = event_crates.iter().flat_map(parse_event_crate).collect::<Vec<_>>();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = DART_IMPORTED.to_owned();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
}
}
let dart_event_folder: PathBuf = [
&std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap(),
&std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap(),
"lib",
"dispatch",
"dart_event",
crate_name,
]
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
let event_crates = parse_dart_event_files(vec![crate_path]);
let event_ast = event_crates
.iter()
.collect();
.flat_map(parse_event_crate)
.collect::<Vec<_>>();
if !dart_event_folder.as_path().exists() {
std::fs::create_dir_all(dart_event_folder.as_path()).unwrap();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = DART_IMPORTED.to_owned();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
}
}
let dart_event_file_path = path_string_with_component(&dart_event_folder, vec!["dart_event.dart"]);
println!("cargo:rerun-if-changed={}", dart_event_file_path);
let dart_event_folder: PathBuf = [
&std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap(),
&std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap(),
"lib",
"dispatch",
"dart_event",
crate_name,
]
.iter()
.collect();
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&dart_event_file_path)
{
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
}
Err(err) => {
panic!("Failed to open file: {}, {:?}", dart_event_file_path, err);
}
}
if !dart_event_folder.as_path().exists() {
std::fs::create_dir_all(dart_event_folder.as_path()).unwrap();
}
let dart_event_file_path =
path_string_with_component(&dart_event_folder, vec!["dart_event.dart"]);
println!("cargo:rerun-if-changed={}", dart_event_file_path);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&dart_event_file_path)
{
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}, {:?}", dart_event_file_path, err);
},
}
}
const DART_IMPORTED: &str = r#"
@ -76,90 +84,93 @@ part of '../../dispatch.dart';
#[derive(Debug)]
pub struct DartEventCrate {
crate_path: PathBuf,
event_files: Vec<String>,
crate_path: PathBuf,
event_files: Vec<String>,
}
impl DartEventCrate {
pub fn from_config(config: &CrateConfig) -> Self {
DartEventCrate {
crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(),
}
pub fn from_config(config: &CrateConfig) -> Self {
DartEventCrate {
crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(),
}
}
}
pub fn parse_dart_event_files(crate_paths: Vec<String>) -> Vec<DartEventCrate> {
let mut dart_event_crates: Vec<DartEventCrate> = vec![];
crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| DartEventCrate::from_config(&crate_config))
.collect::<Vec<DartEventCrate>>();
dart_event_crates.extend(crates);
});
dart_event_crates
let mut dart_event_crates: Vec<DartEventCrate> = vec![];
crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| DartEventCrate::from_config(&crate_config))
.collect::<Vec<DartEventCrate>>();
dart_event_crates.extend(crates);
});
dart_event_crates
}
pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
event_crate
.event_files
.iter()
.flat_map(|event_file| {
let file_path = path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
event_crate
.event_files
.iter()
.flat_map(|event_file| {
let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
let ast_result = ASTResult::new();
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&item_enum.attrs,
);
ast_result.check().unwrap();
attrs
.iter()
.filter(|attr| !attr.attrs.event_attrs.ignore)
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
}
_ => vec![],
})
.collect::<Vec<_>>()
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast
.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
let ast_result = ASTResult::new();
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&item_enum.attrs,
);
ast_result.check().unwrap();
attrs
.iter()
.filter(|attr| !attr.attrs.event_attrs.ignore)
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
},
_ => vec![],
})
.collect::<Vec<EventASTContext>>()
.collect::<Vec<_>>()
})
.collect::<Vec<EventASTContext>>()
}
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
ast.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input
.as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
ast
.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input
.as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
let output_deserializer = event_ast
.event_output
.as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string());
let output_deserializer = event_ast
.event_output
.as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string());
EventRenderContext {
input_deserializer,
output_deserializer,
error_deserializer: event_ast.event_error.clone(),
event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(),
}
})
.collect::<Vec<EventRenderContext>>()
EventRenderContext {
input_deserializer,
output_deserializer,
error_deserializer: event_ast.event_error.clone(),
event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(),
}
})
.collect::<Vec<EventRenderContext>>()
}

View File

@ -2,60 +2,64 @@ use crate::util::get_tera;
use tera::Context;
pub struct EventTemplate {
tera_context: Context,
tera_context: Context,
}
pub struct EventRenderContext {
pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>,
pub error_deserializer: String,
pub event: String,
pub event_ty: String,
pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>,
pub error_deserializer: String,
pub event: String,
pub event_ty: String,
}
#[allow(dead_code)]
impl EventTemplate {
pub fn new() -> Self {
EventTemplate {
tera_context: Context::new(),
}
pub fn new() -> Self {
EventTemplate {
tera_context: Context::new(),
}
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let dart_class_name = format!("{}{}", ctx.event_ty, ctx.event);
let event = format!("{}.{}", ctx.event_ty, ctx.event);
self.tera_context.insert("event_class", &dart_class_name);
self.tera_context.insert("event", &event);
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => self.tera_context.insert("input_deserializer", "Unit"),
Some(ref input) => self.tera_context.insert("input_deserializer", input),
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let dart_class_name = format!("{}{}", ctx.event_ty, ctx.event);
let event = format!("{}.{}", ctx.event_ty, ctx.event);
self.tera_context.insert("event_class", &dart_class_name);
self.tera_context.insert("event", &event);
// eprintln!(
// "😁 {:?} / {:?}",
// &ctx.input_deserializer, &ctx.output_deserializer
// );
self.tera_context.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => self.tera_context.insert("input_deserializer", "Unit"),
Some(ref input) => self.tera_context.insert("input_deserializer", input),
}
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
// eprintln!(
// "😁 {:?} / {:?}",
// &ctx.input_deserializer, &ctx.output_deserializer
// );
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "Unit"),
Some(ref output) => self.tera_context.insert("output_deserializer", output),
}
self.tera_context.insert("error_deserializer", &ctx.error_deserializer);
let tera = get_tera("dart_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "Unit"),
Some(ref output) => self.tera_context.insert("output_deserializer", output),
}
self
.tera_context
.insert("error_deserializer", &ctx.error_deserializer);
let tera = get_tera("dart_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -3,57 +3,62 @@ use std::path::{Path, PathBuf};
#[derive(serde::Deserialize, Clone, Debug)]
pub struct FlowyConfig {
#[serde(default)]
pub event_files: Vec<String>,
#[serde(default)]
pub event_files: Vec<String>,
// Collect AST from the file or directory specified by proto_input to generate the proto files.
#[serde(default)]
pub proto_input: Vec<String>,
// Collect AST from the file or directory specified by proto_input to generate the proto files.
#[serde(default)]
pub proto_input: Vec<String>,
// Output path for the generated proto files. The default value is default_proto_output()
#[serde(default = "default_proto_output")]
pub proto_output: String,
// Output path for the generated proto files. The default value is default_proto_output()
#[serde(default = "default_proto_output")]
pub proto_output: String,
// Create a crate that stores the generated protobuf Rust structures. The default value is default_protobuf_crate()
#[serde(default = "default_protobuf_crate")]
pub protobuf_crate_path: String,
// Create a crate that stores the generated protobuf Rust structures. The default value is default_protobuf_crate()
#[serde(default = "default_protobuf_crate")]
pub protobuf_crate_path: String,
}
fn default_proto_output() -> String {
"resources/proto".to_owned()
"resources/proto".to_owned()
}
fn default_protobuf_crate() -> String {
"src/protobuf".to_owned()
"src/protobuf".to_owned()
}
impl FlowyConfig {
pub fn from_toml_file(path: &Path) -> Self {
let content = fs::read_to_string(path).unwrap();
let config: FlowyConfig = toml::from_str(content.as_ref()).unwrap();
config
}
pub fn from_toml_file(path: &Path) -> Self {
let content = fs::read_to_string(path).unwrap();
let config: FlowyConfig = toml::from_str(content.as_ref()).unwrap();
config
}
}
pub struct CrateConfig {
pub crate_path: PathBuf,
pub crate_folder: String,
pub flowy_config: FlowyConfig,
pub crate_path: PathBuf,
pub crate_folder: String,
pub flowy_config: FlowyConfig,
}
pub fn parse_crate_config_from(entry: &walkdir::DirEntry) -> Option<CrateConfig> {
let mut config_path = entry.path().parent().unwrap().to_path_buf();
config_path.push("Flowy.toml");
if !config_path.as_path().exists() {
return None;
}
let crate_path = entry.path().parent().unwrap().to_path_buf();
let flowy_config = FlowyConfig::from_toml_file(config_path.as_path());
let crate_folder = crate_path.file_stem().unwrap().to_str().unwrap().to_string();
let mut config_path = entry.path().parent().unwrap().to_path_buf();
config_path.push("Flowy.toml");
if !config_path.as_path().exists() {
return None;
}
let crate_path = entry.path().parent().unwrap().to_path_buf();
let flowy_config = FlowyConfig::from_toml_file(config_path.as_path());
let crate_folder = crate_path
.file_stem()
.unwrap()
.to_str()
.unwrap()
.to_string();
Some(CrateConfig {
crate_path,
crate_folder,
flowy_config,
})
Some(CrateConfig {
crate_path,
crate_folder,
flowy_config,
})
}

View File

@ -16,6 +16,6 @@ pub mod util;
#[derive(serde::Serialize, serde::Deserialize)]
pub struct ProtoCache {
pub structs: Vec<String>,
pub enums: Vec<String>,
pub structs: Vec<String>,
pub enums: Vec<String>,
}

View File

@ -14,151 +14,161 @@ use syn::Item;
use walkdir::WalkDir;
pub fn parse_protobuf_context_from(crate_paths: Vec<String>) -> Vec<ProtobufCrateContext> {
let crate_infos = parse_crate_info_from_path(crate_paths);
crate_infos
.into_iter()
.map(|crate_info| {
let proto_output_path = crate_info.proto_output_path();
let files = crate_info
.proto_input_paths()
.iter()
.flat_map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_path))
.collect::<Vec<ProtoFile>>();
let crate_infos = parse_crate_info_from_path(crate_paths);
crate_infos
.into_iter()
.map(|crate_info| {
let proto_output_path = crate_info.proto_output_path();
let files = crate_info
.proto_input_paths()
.iter()
.flat_map(|proto_crate_path| parse_files_protobuf(proto_crate_path, &proto_output_path))
.collect::<Vec<ProtoFile>>();
ProtobufCrateContext::from_crate_info(crate_info, files)
})
.collect::<Vec<ProtobufCrateContext>>()
ProtobufCrateContext::from_crate_info(crate_info, files)
})
.collect::<Vec<ProtobufCrateContext>>()
}
fn parse_files_protobuf(proto_crate_path: &Path, proto_output_path: &Path) -> Vec<ProtoFile> {
let mut gen_proto_vec: Vec<ProtoFile> = vec![];
// file_stem https://doc.rust-lang.org/std/path/struct.Path.html#method.file_stem
for (path, file_name) in WalkDir::new(proto_crate_path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(|e| !e.file_type().is_dir())
.map(|e| {
let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name)
})
{
if file_name == "mod" {
continue;
}
// https://docs.rs/syn/1.0.54/syn/struct.File.html
let ast = syn::parse_file(read_file(&path).unwrap().as_ref())
.unwrap_or_else(|_| panic!("Unable to parse file at {}", path));
let structs = get_ast_structs(&ast);
let proto_file = format!("{}.proto", &file_name);
let proto_file_path = path_string_with_component(proto_output_path, vec![&proto_file]);
let proto_syntax = find_proto_syntax(proto_file_path.as_ref());
let mut proto_content = String::new();
// The types that are not defined in the current file.
let mut ref_types: Vec<String> = vec![];
structs.iter().for_each(|s| {
let mut struct_template = StructTemplate::new();
struct_template.set_message_struct_name(&s.name);
s.fields
.iter()
.filter(|field| field.pb_attrs.pb_index().is_some())
.for_each(|field| {
ref_types.push(field.ty_as_str());
struct_template.set_field(field);
});
let s = struct_template.render().unwrap();
proto_content.push_str(s.as_ref());
proto_content.push('\n');
});
let enums = get_ast_enums(&ast);
enums.iter().for_each(|e| {
let mut enum_template = EnumTemplate::new();
enum_template.set_message_enum(e);
let s = enum_template.render().unwrap();
proto_content.push_str(s.as_ref());
ref_types.push(e.name.clone());
proto_content.push('\n');
});
if !enums.is_empty() || !structs.is_empty() {
let structs: Vec<String> = structs.iter().map(|s| s.name.clone()).collect();
let enums: Vec<String> = enums.iter().map(|e| e.name.clone()).collect();
ref_types.retain(|s| !structs.contains(s));
ref_types.retain(|s| !enums.contains(s));
let info = ProtoFile {
file_path: path.clone(),
file_name: file_name.clone(),
ref_types,
structs,
enums,
syntax: proto_syntax,
content: proto_content,
};
gen_proto_vec.push(info);
}
let mut gen_proto_vec: Vec<ProtoFile> = vec![];
// file_stem https://doc.rust-lang.org/std/path/struct.Path.html#method.file_stem
for (path, file_name) in WalkDir::new(proto_crate_path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(|e| !e.file_type().is_dir())
.map(|e| {
let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name)
})
{
if file_name == "mod" {
continue;
}
gen_proto_vec
// https://docs.rs/syn/1.0.54/syn/struct.File.html
let ast = syn::parse_file(read_file(&path).unwrap().as_ref())
.unwrap_or_else(|_| panic!("Unable to parse file at {}", path));
let structs = get_ast_structs(&ast);
let proto_file = format!("{}.proto", &file_name);
let proto_file_path = path_string_with_component(proto_output_path, vec![&proto_file]);
let proto_syntax = find_proto_syntax(proto_file_path.as_ref());
let mut proto_content = String::new();
// The types that are not defined in the current file.
let mut ref_types: Vec<String> = vec![];
structs.iter().for_each(|s| {
let mut struct_template = StructTemplate::new();
struct_template.set_message_struct_name(&s.name);
s.fields
.iter()
.filter(|field| field.pb_attrs.pb_index().is_some())
.for_each(|field| {
ref_types.push(field.ty_as_str());
struct_template.set_field(field);
});
let s = struct_template.render().unwrap();
proto_content.push_str(s.as_ref());
proto_content.push('\n');
});
let enums = get_ast_enums(&ast);
enums.iter().for_each(|e| {
let mut enum_template = EnumTemplate::new();
enum_template.set_message_enum(e);
let s = enum_template.render().unwrap();
proto_content.push_str(s.as_ref());
ref_types.push(e.name.clone());
proto_content.push('\n');
});
if !enums.is_empty() || !structs.is_empty() {
let structs: Vec<String> = structs.iter().map(|s| s.name.clone()).collect();
let enums: Vec<String> = enums.iter().map(|e| e.name.clone()).collect();
ref_types.retain(|s| !structs.contains(s));
ref_types.retain(|s| !enums.contains(s));
let info = ProtoFile {
file_path: path.clone(),
file_name: file_name.clone(),
ref_types,
structs,
enums,
syntax: proto_syntax,
content: proto_content,
};
gen_proto_vec.push(info);
}
}
gen_proto_vec
}
pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
// let mut content = format!("{:#?}", &ast);
// let mut file = File::create("./foo.txt").unwrap();
// file.write_all(content.as_bytes()).unwrap();
let ast_result = ASTResult::new();
let mut proto_structs: Vec<Struct> = vec![];
ast.items.iter().for_each(|item| {
if let Item::Struct(item_struct) = item {
let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields);
// let mut content = format!("{:#?}", &ast);
// let mut file = File::create("./foo.txt").unwrap();
// file.write_all(content.as_bytes()).unwrap();
let ast_result = ASTResult::new();
let mut proto_structs: Vec<Struct> = vec![];
ast.items.iter().for_each(|item| {
if let Item::Struct(item_struct) = item {
let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields);
if fields.iter().filter(|f| f.pb_attrs.pb_index().is_some()).count() > 0 {
proto_structs.push(Struct {
name: item_struct.ident.to_string(),
fields,
});
}
}
});
ast_result.check().unwrap();
proto_structs
if fields
.iter()
.filter(|f| f.pb_attrs.pb_index().is_some())
.count()
> 0
{
proto_structs.push(Struct {
name: item_struct.ident.to_string(),
fields,
});
}
}
});
ast_result.check().unwrap();
proto_structs
}
pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
let mut flowy_enums: Vec<FlowyEnum> = vec![];
let ast_result = ASTResult::new();
let mut flowy_enums: Vec<FlowyEnum> = vec![];
let ast_result = ASTResult::new();
ast.items.iter().for_each(|item| {
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
if let Item::Enum(item_enum) = item {
let attrs = flowy_ast::enum_from_ast(&ast_result, &item_enum.ident, &item_enum.variants, &ast.attrs);
flowy_enums.push(FlowyEnum {
name: item_enum.ident.to_string(),
attrs,
});
}
});
ast_result.check().unwrap();
flowy_enums
ast.items.iter().for_each(|item| {
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
if let Item::Enum(item_enum) = item {
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&ast.attrs,
);
flowy_enums.push(FlowyEnum {
name: item_enum.ident.to_string(),
attrs,
});
}
});
ast_result.check().unwrap();
flowy_enums
}
pub struct FlowyEnum<'a> {
pub name: String,
pub attrs: Vec<ASTEnumVariant<'a>>,
pub name: String,
pub attrs: Vec<ASTEnumVariant<'a>>,
}
pub struct Struct<'a> {
pub name: String,
pub fields: Vec<ASTField<'a>>,
pub name: String,
pub fields: Vec<ASTField<'a>>,
}
lazy_static! {
@ -167,27 +177,27 @@ lazy_static! {
}
fn find_proto_syntax(path: &str) -> String {
if !Path::new(path).exists() {
return String::from("syntax = \"proto3\";\n");
if !Path::new(path).exists() {
return String::from("syntax = \"proto3\";\n");
}
let mut result = String::new();
let mut file = File::open(path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
content.lines().for_each(|line| {
////Result<Option<Match<'t>>>
if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
result.push_str(m.as_str());
}
let mut result = String::new();
let mut file = File::open(path).unwrap();
let mut content = String::new();
file.read_to_string(&mut content).unwrap();
// if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
// result.push_str(m.as_str());
// result.push('\n');
// }
});
content.lines().for_each(|line| {
////Result<Option<Match<'t>>>
if let Ok(Some(m)) = SYNTAX_REGEX.find(line) {
result.push_str(m.as_str());
}
// if let Ok(Some(m)) = IMPORT_REGEX.find(line) {
// result.push_str(m.as_str());
// result.push('\n');
// }
});
result.push('\n');
result
result.push('\n');
result
}

View File

@ -18,255 +18,274 @@ use std::process::Command;
use walkdir::WalkDir;
pub fn gen(crate_name: &str) {
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string();
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
// 1. generate the proto files to proto_file_dir
#[cfg(feature = "proto_gen")]
let proto_crates = gen_proto_files(crate_name, &crate_path);
// 1. generate the proto files to proto_file_dir
#[cfg(feature = "proto_gen")]
let proto_crates = gen_proto_files(crate_name, &crate_path);
for proto_crate in proto_crates {
let mut proto_file_paths = vec![];
let mut file_names = vec![];
let proto_file_output_path = proto_crate.proto_output_path().to_str().unwrap().to_string();
let protobuf_output_path = proto_crate.protobuf_crate_path().to_str().unwrap().to_string();
for proto_crate in proto_crates {
let mut proto_file_paths = vec![];
let mut file_names = vec![];
let proto_file_output_path = proto_crate
.proto_output_path()
.to_str()
.unwrap()
.to_string();
let protobuf_output_path = proto_crate
.protobuf_crate_path()
.to_str()
.unwrap()
.to_string();
for (path, file_name) in WalkDir::new(&proto_file_output_path)
.into_iter()
.filter_map(|e| e.ok())
.map(|e| {
let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name)
})
{
if path.ends_with(".proto") {
// https://stackoverflow.com/questions/49077147/how-can-i-force-build-rs-to-run-again-without-cleaning-my-whole-project
println!("cargo:rerun-if-changed={}", path);
proto_file_paths.push(path);
file_names.push(file_name);
}
}
let protoc_bin_path = protoc_bin_vendored::protoc_bin_path().unwrap();
// 2. generate the protobuf files(Dart)
#[cfg(feature = "dart")]
generate_dart_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
#[cfg(feature = "ts")]
generate_ts_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
// 3. generate the protobuf files(Rust)
generate_rust_protobuf_files(
&protoc_bin_path,
&proto_file_paths,
&proto_file_output_path,
&protobuf_output_path,
);
for (path, file_name) in WalkDir::new(&proto_file_output_path)
.into_iter()
.filter_map(|e| e.ok())
.map(|e| {
let path = e.path().to_str().unwrap().to_string();
let file_name = e.path().file_stem().unwrap().to_str().unwrap().to_string();
(path, file_name)
})
{
if path.ends_with(".proto") {
// https://stackoverflow.com/questions/49077147/how-can-i-force-build-rs-to-run-again-without-cleaning-my-whole-project
println!("cargo:rerun-if-changed={}", path);
proto_file_paths.push(path);
file_names.push(file_name);
}
}
let protoc_bin_path = protoc_bin_vendored::protoc_bin_path().unwrap();
// 2. generate the protobuf files(Dart)
#[cfg(feature = "dart")]
generate_dart_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
#[cfg(feature = "ts")]
generate_ts_protobuf_files(
crate_name,
&proto_file_output_path,
&proto_file_paths,
&file_names,
&protoc_bin_path,
);
// 3. generate the protobuf files(Rust)
generate_rust_protobuf_files(
&protoc_bin_path,
&proto_file_paths,
&proto_file_output_path,
&protobuf_output_path,
);
}
}
fn generate_rust_protobuf_files(
protoc_bin_path: &Path,
proto_file_paths: &[String],
proto_file_output_path: &str,
protobuf_output_path: &str,
protoc_bin_path: &Path,
proto_file_paths: &[String],
proto_file_output_path: &str,
protobuf_output_path: &str,
) {
protoc_rust::Codegen::new()
.out_dir(protobuf_output_path)
.protoc_path(protoc_bin_path)
.inputs(proto_file_paths)
.include(proto_file_output_path)
.run()
.expect("Running rust protoc failed.");
protoc_rust::Codegen::new()
.out_dir(protobuf_output_path)
.protoc_path(protoc_bin_path)
.inputs(proto_file_paths)
.include(proto_file_output_path)
.run()
.expect("Running rust protoc failed.");
}
#[cfg(feature = "ts")]
fn generate_ts_protobuf_files(
name: &str,
proto_file_output_path: &str,
paths: &[String],
file_names: &Vec<String>,
protoc_bin_path: &Path,
name: &str,
proto_file_output_path: &str,
paths: &[String],
file_names: &Vec<String>,
protoc_bin_path: &Path,
) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path =
std::env::var("TAURI_BACKEND_SERVICE_PATH").unwrap_or("appflowy_tauri/src/services/backend".to_string());
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
.unwrap_or("appflowy_tauri/src/services/backend".to_string());
let mut output = PathBuf::new();
output.push(root);
output.push(tauri_backend_service_path);
output.push("classes");
output.push(name);
let mut output = PathBuf::new();
output.push(root);
output.push(tauri_backend_service_path);
output.push("classes");
output.push(name);
if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap();
}
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| {
let result = cmd_lib::run_cmd! {
${protoc_bin_path} --ts_out=${output} --proto_path=${proto_file_output_path} ${path}
};
if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap();
}
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| {
let result = cmd_lib::run_cmd! {
${protoc_bin_path} --ts_out=${output} --proto_path=${proto_file_output_path} ${path}
};
if result.is_err() {
panic!("Generate dart pb file failed with: {}, {:?}", path, result)
};
});
if result.is_err() {
panic!("Generate dart pb file failed with: {}, {:?}", path, result)
};
});
let ts_index = path_string_with_component(&output, vec!["index.ts"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_index)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names {
let c = format!("export * from \"./{}\";\n", file_name);
export.push_str(c.as_ref());
}
let ts_index = path_string_with_component(&output, vec!["index.ts"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_index)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names {
let c = format!("export * from \"./{}\";\n", file_name);
export.push_str(c.as_ref());
}
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
}
Err(err) => {
panic!("Failed to open file: {}", err);
}
}
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
#[cfg(feature = "dart")]
fn generate_dart_protobuf_files(
name: &str,
proto_file_output_path: &str,
paths: &[String],
file_names: &Vec<String>,
protoc_bin_path: &Path,
name: &str,
proto_file_output_path: &str,
paths: &[String],
file_names: &Vec<String>,
protoc_bin_path: &Path,
) {
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
log::error!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return;
}
if std::env::var("CARGO_MAKE_WORKING_DIRECTORY").is_err() {
log::error!("CARGO_MAKE_WORKING_DIRECTORY was not set, skip generate dart pb");
return;
}
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
log::error!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return;
}
if std::env::var("FLUTTER_FLOWY_SDK_PATH").is_err() {
log::error!("FLUTTER_FLOWY_SDK_PATH was not set, skip generate dart pb");
return;
}
let mut output = PathBuf::new();
output.push(std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap());
output.push(std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap());
output.push("lib");
output.push("protobuf");
output.push(name);
let mut output = PathBuf::new();
output.push(std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap());
output.push(std::env::var("FLUTTER_FLOWY_SDK_PATH").unwrap());
output.push("lib");
output.push("protobuf");
output.push(name);
if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap();
}
check_pb_dart_plugin();
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| {
let result = cmd_lib::run_cmd! {
${protoc_bin_path} --dart_out=${output} --proto_path=${proto_file_output_path} ${path}
};
if !output.as_path().exists() {
std::fs::create_dir_all(&output).unwrap();
}
check_pb_dart_plugin();
let protoc_bin_path = protoc_bin_path.to_str().unwrap().to_owned();
paths.iter().for_each(|path| {
let result = cmd_lib::run_cmd! {
${protoc_bin_path} --dart_out=${output} --proto_path=${proto_file_output_path} ${path}
};
if result.is_err() {
panic!("Generate dart pb file failed with: {}, {:?}", path, result)
};
});
if result.is_err() {
panic!("Generate dart pb file failed with: {}, {:?}", path, result)
};
});
let protobuf_dart = path_string_with_component(&output, vec!["protobuf.dart"]);
let protobuf_dart = path_string_with_component(&output, vec!["protobuf.dart"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&protobuf_dart)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names {
let c = format!("export './{}.pb.dart';\n", file_name);
export.push_str(c.as_ref());
}
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&protobuf_dart)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
for file_name in file_names {
let c = format!("export './{}.pb.dart';\n", file_name);
export.push_str(c.as_ref());
}
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
}
Err(err) => {
panic!("Failed to open file: {}", err);
}
}
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
pub fn check_pb_dart_plugin() {
if cfg!(target_os = "windows") {
//Command::new("cmd")
// .arg("/C")
// .arg(cmd)
// .status()
// .expect("failed to execute process");
//panic!("{}", format!("\n❌ The protoc-gen-dart was not installed correctly."))
} else {
let exit_result = Command::new("sh")
.arg("-c")
.arg("command -v protoc-gen-dart")
.status()
.expect("failed to execute process");
if cfg!(target_os = "windows") {
//Command::new("cmd")
// .arg("/C")
// .arg(cmd)
// .status()
// .expect("failed to execute process");
//panic!("{}", format!("\n❌ The protoc-gen-dart was not installed correctly."))
} else {
let exit_result = Command::new("sh")
.arg("-c")
.arg("command -v protoc-gen-dart")
.status()
.expect("failed to execute process");
if !exit_result.success() {
let mut msg = "\n❌ Can't find protoc-gen-dart in $PATH:\n".to_string();
let output = Command::new("sh").arg("-c").arg("echo $PATH").output();
let paths = String::from_utf8(output.unwrap().stdout)
.unwrap()
.split(':')
.map(|s| s.to_string())
.collect::<Vec<String>>();
if !exit_result.success() {
let mut msg = "\n❌ Can't find protoc-gen-dart in $PATH:\n".to_string();
let output = Command::new("sh").arg("-c").arg("echo $PATH").output();
let paths = String::from_utf8(output.unwrap().stdout)
.unwrap()
.split(':')
.map(|s| s.to_string())
.collect::<Vec<String>>();
paths.iter().for_each(|s| msg.push_str(&format!("{}\n", s)));
paths.iter().for_each(|s| msg.push_str(&format!("{}\n", s)));
if let Ok(output) = Command::new("sh").arg("-c").arg("which protoc-gen-dart").output() {
msg.push_str(&format!(
"Installed protoc-gen-dart path: {:?}\n",
String::from_utf8(output.stdout).unwrap()
));
}
if let Ok(output) = Command::new("sh")
.arg("-c")
.arg("which protoc-gen-dart")
.output()
{
msg.push_str(&format!(
"Installed protoc-gen-dart path: {:?}\n",
String::from_utf8(output.stdout).unwrap()
));
}
msg.push_str("✅ You can fix that by adding:");
msg.push_str("\n\texport PATH=\"$PATH\":\"$HOME/.pub-cache/bin\"\n");
msg.push_str("to your shell's config file.(.bashrc, .bash, .profile, .zshrc etc.)");
panic!("{}", msg)
}
msg.push_str("✅ You can fix that by adding:");
msg.push_str("\n\texport PATH=\"$PATH\":\"$HOME/.pub-cache/bin\"\n");
msg.push_str("to your shell's config file.(.bashrc, .bash, .profile, .zshrc etc.)");
panic!("{}", msg)
}
}
}
#[cfg(feature = "proto_gen")]
fn gen_proto_files(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrate> {
let crate_context = ProtoGenerator::gen(crate_name, crate_path);
let proto_crates = crate_context
.iter()
.map(|info| info.protobuf_crate.clone())
.collect::<Vec<_>>();
let crate_context = ProtoGenerator::gen(crate_name, crate_path);
let proto_crates = crate_context
.iter()
.map(|info| info.protobuf_crate.clone())
.collect::<Vec<_>>();
crate_context.into_iter().flat_map(|info| info.files).for_each(|file| {
println!("cargo:rerun-if-changed={}", file.file_path);
crate_context
.into_iter()
.flat_map(|info| info.files)
.for_each(|file| {
println!("cargo:rerun-if-changed={}", file.file_path);
});
proto_crates
proto_crates
}

View File

@ -14,148 +14,158 @@ use std::{fs::OpenOptions, io::Write};
pub struct ProtoGenerator();
impl ProtoGenerator {
pub fn gen(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrateContext> {
let crate_contexts = parse_protobuf_context_from(vec![crate_path.to_owned()]);
write_proto_files(&crate_contexts);
write_rust_crate_mod_file(&crate_contexts);
pub fn gen(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrateContext> {
let crate_contexts = parse_protobuf_context_from(vec![crate_path.to_owned()]);
write_proto_files(&crate_contexts);
write_rust_crate_mod_file(&crate_contexts);
let proto_cache = ProtoCache::from_crate_contexts(&crate_contexts);
let proto_cache_str = serde_json::to_string(&proto_cache).unwrap();
let proto_cache = ProtoCache::from_crate_contexts(&crate_contexts);
let proto_cache_str = serde_json::to_string(&proto_cache).unwrap();
let crate_cache_dir = path_buf_with_component(&cache_dir(), vec![crate_name]);
if !crate_cache_dir.as_path().exists() {
std::fs::create_dir_all(&crate_cache_dir).unwrap();
}
let protobuf_cache_path = path_string_with_component(&crate_cache_dir, vec!["proto_cache"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&protobuf_cache_path)
{
Ok(ref mut file) => {
file.write_all(proto_cache_str.as_bytes()).unwrap();
File::flush(file).unwrap();
}
Err(_err) => {
panic!("Failed to open file: {}", protobuf_cache_path);
}
}
crate_contexts
let crate_cache_dir = path_buf_with_component(&cache_dir(), vec![crate_name]);
if !crate_cache_dir.as_path().exists() {
std::fs::create_dir_all(&crate_cache_dir).unwrap();
}
let protobuf_cache_path = path_string_with_component(&crate_cache_dir, vec!["proto_cache"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&protobuf_cache_path)
{
Ok(ref mut file) => {
file.write_all(proto_cache_str.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(_err) => {
panic!("Failed to open file: {}", protobuf_cache_path);
},
}
crate_contexts
}
}
fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) {
let file_path_content_map = crate_contexts
let file_path_content_map = crate_contexts
.iter()
.flat_map(|ctx| {
ctx
.files
.iter()
.flat_map(|ctx| {
ctx.files
.iter()
.map(|file| {
(
file.file_path.clone(),
ProtoFileSymbol {
file_name: file.file_name.clone(),
symbols: file.symbols(),
},
)
})
.collect::<HashMap<String, ProtoFileSymbol>>()
.map(|file| {
(
file.file_path.clone(),
ProtoFileSymbol {
file_name: file.file_name.clone(),
symbols: file.symbols(),
},
)
})
.collect::<HashMap<String, ProtoFileSymbol>>();
.collect::<HashMap<String, ProtoFileSymbol>>()
})
.collect::<HashMap<String, ProtoFileSymbol>>();
for context in crate_contexts {
let dir = context.protobuf_crate.proto_output_path();
context.files.iter().for_each(|file| {
// syntax
let mut file_content = file.syntax.clone();
for context in crate_contexts {
let dir = context.protobuf_crate.proto_output_path();
context.files.iter().for_each(|file| {
// syntax
let mut file_content = file.syntax.clone();
// import
file_content.push_str(&gen_import_content(file, &file_path_content_map));
// import
file_content.push_str(&gen_import_content(file, &file_path_content_map));
// content
file_content.push_str(&file.content);
// content
file_content.push_str(&file.content);
let proto_file = format!("{}.proto", &file.file_name);
let proto_file_path = path_string_with_component(&dir, vec![&proto_file]);
save_content_to_file_with_diff_prompt(&file_content, proto_file_path.as_ref());
});
}
let proto_file = format!("{}.proto", &file.file_name);
let proto_file_path = path_string_with_component(&dir, vec![&proto_file]);
save_content_to_file_with_diff_prompt(&file_content, proto_file_path.as_ref());
});
}
}
fn gen_import_content(current_file: &ProtoFile, file_path_symbols_map: &HashMap<String, ProtoFileSymbol>) -> String {
let mut import_files: Vec<String> = vec![];
file_path_symbols_map
.iter()
.for_each(|(file_path, proto_file_symbols)| {
if file_path != &current_file.file_path {
current_file.ref_types.iter().for_each(|ref_type| {
if proto_file_symbols.symbols.contains(ref_type) {
let import_file = format!("import \"{}.proto\";", proto_file_symbols.file_name);
if !import_files.contains(&import_file) {
import_files.push(import_file);
}
}
});
fn gen_import_content(
current_file: &ProtoFile,
file_path_symbols_map: &HashMap<String, ProtoFileSymbol>,
) -> String {
let mut import_files: Vec<String> = vec![];
file_path_symbols_map
.iter()
.for_each(|(file_path, proto_file_symbols)| {
if file_path != &current_file.file_path {
current_file.ref_types.iter().for_each(|ref_type| {
if proto_file_symbols.symbols.contains(ref_type) {
let import_file = format!("import \"{}.proto\";", proto_file_symbols.file_name);
if !import_files.contains(&import_file) {
import_files.push(import_file);
}
}
});
if import_files.len() == 1 {
format!("{}\n", import_files.pop().unwrap())
} else {
import_files.join("\n")
}
}
});
if import_files.len() == 1 {
format!("{}\n", import_files.pop().unwrap())
} else {
import_files.join("\n")
}
}
struct ProtoFileSymbol {
file_name: String,
symbols: Vec<String>,
file_name: String,
symbols: Vec<String>,
}
fn write_rust_crate_mod_file(crate_contexts: &[ProtobufCrateContext]) {
for context in crate_contexts {
let mod_path = context.protobuf_crate.proto_model_mod_file();
match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&mod_path)
{
Ok(ref mut file) => {
let mut mod_file_content = String::new();
for context in crate_contexts {
let mod_path = context.protobuf_crate.proto_model_mod_file();
match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&mod_path)
{
Ok(ref mut file) => {
let mut mod_file_content = String::new();
mod_file_content.push_str("#![cfg_attr(rustfmt, rustfmt::skip)]\n");
mod_file_content.push_str("// Auto-generated, do not edit\n");
walk_dir(
context.protobuf_crate.proto_output_path(),
|e| !e.file_type().is_dir(),
|_, name| {
let c = format!("\nmod {};\npub use {}::*;\n", &name, &name);
mod_file_content.push_str(c.as_ref());
},
);
file.write_all(mod_file_content.as_bytes()).unwrap();
}
Err(err) => {
panic!("Failed to open file: {}", err);
}
}
mod_file_content.push_str("#![cfg_attr(rustfmt, rustfmt::skip)]\n");
mod_file_content.push_str("// Auto-generated, do not edit\n");
walk_dir(
context.protobuf_crate.proto_output_path(),
|e| !e.file_type().is_dir(),
|_, name| {
let c = format!("\nmod {};\npub use {}::*;\n", &name, &name);
mod_file_content.push_str(c.as_ref());
},
);
file.write_all(mod_file_content.as_bytes()).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
}
impl ProtoCache {
fn from_crate_contexts(crate_contexts: &[ProtobufCrateContext]) -> Self {
let proto_files = crate_contexts
.iter()
.flat_map(|crate_info| &crate_info.files)
.collect::<Vec<&ProtoFile>>();
fn from_crate_contexts(crate_contexts: &[ProtobufCrateContext]) -> Self {
let proto_files = crate_contexts
.iter()
.flat_map(|crate_info| &crate_info.files)
.collect::<Vec<&ProtoFile>>();
let structs: Vec<String> = proto_files.iter().flat_map(|info| info.structs.clone()).collect();
let enums: Vec<String> = proto_files.iter().flat_map(|info| info.enums.clone()).collect();
Self { structs, enums }
}
let structs: Vec<String> = proto_files
.iter()
.flat_map(|info| info.structs.clone())
.collect();
let enums: Vec<String> = proto_files
.iter()
.flat_map(|info| info.enums.clone())
.collect();
Self { structs, enums }
}
}

View File

@ -9,135 +9,140 @@ use walkdir::WalkDir;
#[derive(Debug)]
pub struct ProtobufCrateContext {
pub files: Vec<ProtoFile>,
pub protobuf_crate: ProtobufCrate,
pub files: Vec<ProtoFile>,
pub protobuf_crate: ProtobufCrate,
}
impl ProtobufCrateContext {
pub fn from_crate_info(inner: ProtobufCrate, files: Vec<ProtoFile>) -> Self {
Self {
files,
protobuf_crate: inner,
}
pub fn from_crate_info(inner: ProtobufCrate, files: Vec<ProtoFile>) -> Self {
Self {
files,
protobuf_crate: inner,
}
}
pub fn create_crate_mod_file(&self) {
// mod model;
// pub use model::*;
let mod_file_path = path_string_with_component(&self.protobuf_crate.protobuf_crate_path(), vec!["mod.rs"]);
let mut content = "#![cfg_attr(rustfmt, rustfmt::skip)]\n".to_owned();
content.push_str("// Auto-generated, do not edit\n");
content.push_str("mod model;\npub use model::*;");
match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&mod_file_path)
{
Ok(ref mut file) => {
file.write_all(content.as_bytes()).unwrap();
}
Err(err) => {
panic!("Failed to open protobuf mod file: {}", err);
}
}
pub fn create_crate_mod_file(&self) {
// mod model;
// pub use model::*;
let mod_file_path =
path_string_with_component(&self.protobuf_crate.protobuf_crate_path(), vec!["mod.rs"]);
let mut content = "#![cfg_attr(rustfmt, rustfmt::skip)]\n".to_owned();
content.push_str("// Auto-generated, do not edit\n");
content.push_str("mod model;\npub use model::*;");
match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&mod_file_path)
{
Ok(ref mut file) => {
file.write_all(content.as_bytes()).unwrap();
},
Err(err) => {
panic!("Failed to open protobuf mod file: {}", err);
},
}
}
#[allow(dead_code)]
pub fn flutter_mod_dir(&self, root: &str) -> String {
let crate_module_dir = format!("{}/{}", root, self.protobuf_crate.crate_folder);
crate_module_dir
}
#[allow(dead_code)]
pub fn flutter_mod_dir(&self, root: &str) -> String {
let crate_module_dir = format!("{}/{}", root, self.protobuf_crate.crate_folder);
crate_module_dir
}
#[allow(dead_code)]
pub fn flutter_mod_file(&self, root: &str) -> String {
let crate_module_dir = format!("{}/{}/protobuf.dart", root, self.protobuf_crate.crate_folder);
crate_module_dir
}
#[allow(dead_code)]
pub fn flutter_mod_file(&self, root: &str) -> String {
let crate_module_dir = format!(
"{}/{}/protobuf.dart",
root, self.protobuf_crate.crate_folder
);
crate_module_dir
}
}
#[derive(Clone, Debug)]
pub struct ProtobufCrate {
pub crate_folder: String,
pub crate_path: PathBuf,
flowy_config: FlowyConfig,
pub crate_folder: String,
pub crate_path: PathBuf,
flowy_config: FlowyConfig,
}
impl ProtobufCrate {
pub fn from_config(config: CrateConfig) -> Self {
ProtobufCrate {
crate_path: config.crate_path,
crate_folder: config.crate_folder,
flowy_config: config.flowy_config,
}
pub fn from_config(config: CrateConfig) -> Self {
ProtobufCrate {
crate_path: config.crate_path,
crate_folder: config.crate_folder,
flowy_config: config.flowy_config,
}
}
// Return the file paths for each rust file that used to generate the proto file.
pub fn proto_input_paths(&self) -> Vec<PathBuf> {
self.flowy_config
.proto_input
.iter()
.map(|name| path_buf_with_component(&self.crate_path, vec![name]))
.collect::<Vec<PathBuf>>()
}
// Return the file paths for each rust file that used to generate the proto file.
pub fn proto_input_paths(&self) -> Vec<PathBuf> {
self
.flowy_config
.proto_input
.iter()
.map(|name| path_buf_with_component(&self.crate_path, vec![name]))
.collect::<Vec<PathBuf>>()
}
// The protobuf_crate_path is used to store the generated protobuf Rust structures.
pub fn protobuf_crate_path(&self) -> PathBuf {
let crate_path = PathBuf::from(&self.flowy_config.protobuf_crate_path);
create_dir_if_not_exist(&crate_path);
crate_path
}
// The protobuf_crate_path is used to store the generated protobuf Rust structures.
pub fn protobuf_crate_path(&self) -> PathBuf {
let crate_path = PathBuf::from(&self.flowy_config.protobuf_crate_path);
create_dir_if_not_exist(&crate_path);
crate_path
}
// The proto_output_path is used to store the proto files
pub fn proto_output_path(&self) -> PathBuf {
let output_dir = PathBuf::from(&self.flowy_config.proto_output);
create_dir_if_not_exist(&output_dir);
output_dir
}
// The proto_output_path is used to store the proto files
pub fn proto_output_path(&self) -> PathBuf {
let output_dir = PathBuf::from(&self.flowy_config.proto_output);
create_dir_if_not_exist(&output_dir);
output_dir
}
pub fn proto_model_mod_file(&self) -> String {
path_string_with_component(&self.protobuf_crate_path(), vec!["mod.rs"])
}
pub fn proto_model_mod_file(&self) -> String {
path_string_with_component(&self.protobuf_crate_path(), vec!["mod.rs"])
}
}
#[derive(Debug)]
pub struct ProtoFile {
pub file_path: String,
pub file_name: String,
pub structs: Vec<String>,
// store the type of current file using
pub ref_types: Vec<String>,
pub file_path: String,
pub file_name: String,
pub structs: Vec<String>,
// store the type of current file using
pub ref_types: Vec<String>,
pub enums: Vec<String>,
// proto syntax. "proto3" or "proto2"
pub syntax: String,
pub enums: Vec<String>,
// proto syntax. "proto3" or "proto2"
pub syntax: String,
// proto message content
pub content: String,
// proto message content
pub content: String,
}
impl ProtoFile {
pub fn symbols(&self) -> Vec<String> {
let mut symbols = self.structs.clone();
let mut enum_symbols = self.enums.clone();
symbols.append(&mut enum_symbols);
symbols
}
pub fn symbols(&self) -> Vec<String> {
let mut symbols = self.structs.clone();
let mut enum_symbols = self.enums.clone();
symbols.append(&mut enum_symbols);
symbols
}
}
pub fn parse_crate_info_from_path(roots: Vec<String>) -> Vec<ProtobufCrate> {
let mut protobuf_crates: Vec<ProtobufCrate> = vec![];
roots.iter().for_each(|root| {
let crates = WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(ProtobufCrate::from_config)
.collect::<Vec<ProtobufCrate>>();
protobuf_crates.extend(crates);
});
protobuf_crates
let mut protobuf_crates: Vec<ProtobufCrate> = vec![];
roots.iter().for_each(|root| {
let crates = WalkDir::new(root)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(ProtobufCrate::from_config)
.collect::<Vec<ProtobufCrate>>();
protobuf_crates.extend(crates);
});
protobuf_crates
}

View File

@ -3,33 +3,33 @@ use itertools::Itertools;
use tera::Context;
pub struct ProtobufDeriveMeta {
context: Context,
structs: Vec<String>,
enums: Vec<String>,
context: Context,
structs: Vec<String>,
enums: Vec<String>,
}
#[allow(dead_code)]
impl ProtobufDeriveMeta {
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
let enums: Vec<_> = enums.into_iter().unique().collect();
ProtobufDeriveMeta {
context: Context::new(),
structs,
enums,
}
pub fn new(structs: Vec<String>, enums: Vec<String>) -> Self {
let enums: Vec<_> = enums.into_iter().unique().collect();
ProtobufDeriveMeta {
context: Context::new(),
structs,
enums,
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("names", &self.structs);
self.context.insert("enums", &self.enums);
pub fn render(&mut self) -> Option<String> {
self.context.insert("names", &self.structs);
self.context.insert("enums", &self.enums);
let tera = get_tera("protobuf_file/template/derive_meta");
match tera.render("derive_meta.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
let tera = get_tera("protobuf_file/template/derive_meta");
match tera.render("derive_meta.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -3,36 +3,38 @@ use crate::util::get_tera;
use tera::Context;
pub struct EnumTemplate {
context: Context,
items: Vec<String>,
context: Context,
items: Vec<String>,
}
#[allow(dead_code)]
impl EnumTemplate {
pub fn new() -> Self {
EnumTemplate {
context: Context::new(),
items: vec![],
}
pub fn new() -> Self {
EnumTemplate {
context: Context::new(),
items: vec![],
}
}
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
self.context.insert("enum_name", &flowy_enum.name);
flowy_enum.attrs.iter().for_each(|item| {
self.items
.push(format!("{} = {};", item.attrs.enum_item_name, item.attrs.value))
})
}
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
self.context.insert("enum_name", &flowy_enum.name);
flowy_enum.attrs.iter().for_each(|item| {
self.items.push(format!(
"{} = {};",
item.attrs.enum_item_name, item.attrs.value
))
})
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("items", &self.items);
let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("enum.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("items", &self.items);
let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("enum.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -16,91 +16,95 @@ pub static RUST_TYPE_MAP: phf::Map<&'static str, &'static str> = phf_map! {
};
pub struct StructTemplate {
context: Context,
fields: Vec<String>,
context: Context,
fields: Vec<String>,
}
#[allow(dead_code)]
impl StructTemplate {
pub fn new() -> Self {
StructTemplate {
context: Context::new(),
fields: vec![],
}
pub fn new() -> Self {
StructTemplate {
context: Context::new(),
fields: vec![],
}
}
pub fn set_message_struct_name(&mut self, name: &str) {
self.context.insert("struct_name", name);
}
pub fn set_field(&mut self, field: &ASTField) {
// {{ field_type }} {{ field_name }} = {{index}};
let name = field.name().unwrap().to_string();
let index = field.pb_attrs.pb_index().unwrap();
let ty: &str = &field.ty_as_str();
let mut mapped_ty: &str = ty;
if RUST_TYPE_MAP.contains_key(ty) {
mapped_ty = RUST_TYPE_MAP[ty];
}
pub fn set_message_struct_name(&mut self, name: &str) {
self.context.insert("struct_name", name);
if let Some(ref category) = field.bracket_category {
match category {
BracketCategory::Opt => match &field.bracket_inner_ty {
None => {},
Some(inner_ty) => match inner_ty.to_string().as_str() {
//TODO: support hashmap or something else wrapped by Option
"Vec" => {
self.fields.push(format!(
"oneof one_of_{} {{ bytes {} = {}; }};",
name, name, index
));
},
_ => {
self.fields.push(format!(
"oneof one_of_{} {{ {} {} = {}; }};",
name, mapped_ty, name, index
));
},
},
},
BracketCategory::Map((k, v)) => {
let key: &str = k;
let value: &str = v;
self.fields.push(format!(
// map<string, string> attrs = 1;
"map<{}, {}> {} = {};",
RUST_TYPE_MAP.get(key).unwrap_or(&key),
RUST_TYPE_MAP.get(value).unwrap_or(&value),
name,
index
));
},
BracketCategory::Vec => {
let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
// Vec<u8>
if mapped_ty == "u8" && bracket_ty == "Vec" {
self.fields.push(format!("bytes {} = {};", name, index))
} else {
self.fields.push(format!(
"{} {} {} = {};",
RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
))
}
},
BracketCategory::Other => self
.fields
.push(format!("{} {} = {};", mapped_ty, name, index)),
}
}
}
pub fn set_field(&mut self, field: &ASTField) {
// {{ field_type }} {{ field_name }} = {{index}};
let name = field.name().unwrap().to_string();
let index = field.pb_attrs.pb_index().unwrap();
let ty: &str = &field.ty_as_str();
let mut mapped_ty: &str = ty;
if RUST_TYPE_MAP.contains_key(ty) {
mapped_ty = RUST_TYPE_MAP[ty];
}
if let Some(ref category) = field.bracket_category {
match category {
BracketCategory::Opt => match &field.bracket_inner_ty {
None => {}
Some(inner_ty) => match inner_ty.to_string().as_str() {
//TODO: support hashmap or something else wrapped by Option
"Vec" => {
self.fields
.push(format!("oneof one_of_{} {{ bytes {} = {}; }};", name, name, index));
}
_ => {
self.fields.push(format!(
"oneof one_of_{} {{ {} {} = {}; }};",
name, mapped_ty, name, index
));
}
},
},
BracketCategory::Map((k, v)) => {
let key: &str = k;
let value: &str = v;
self.fields.push(format!(
// map<string, string> attrs = 1;
"map<{}, {}> {} = {};",
RUST_TYPE_MAP.get(key).unwrap_or(&key),
RUST_TYPE_MAP.get(value).unwrap_or(&value),
name,
index
));
}
BracketCategory::Vec => {
let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
// Vec<u8>
if mapped_ty == "u8" && bracket_ty == "Vec" {
self.fields.push(format!("bytes {} = {};", name, index))
} else {
self.fields.push(format!(
"{} {} {} = {};",
RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
))
}
}
BracketCategory::Other => self.fields.push(format!("{} {} = {};", mapped_ty, name, index)),
}
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("fields", &self.fields);
let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("struct.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
pub fn render(&mut self) -> Option<String> {
self.context.insert("fields", &self.fields);
let tera = get_tera("protobuf_file/template/proto_file");
match tera.render("struct.tera", &self.context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -2,64 +2,69 @@ use crate::util::get_tera;
use tera::Context;
pub struct EventTemplate {
tera_context: Context,
tera_context: Context,
}
pub struct EventRenderContext {
pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>,
pub error_deserializer: String,
pub event: String,
pub event_ty: String,
pub prefix: String,
pub input_deserializer: Option<String>,
pub output_deserializer: Option<String>,
pub error_deserializer: String,
pub event: String,
pub event_ty: String,
pub prefix: String,
}
#[allow(dead_code)]
impl EventTemplate {
pub fn new() -> Self {
EventTemplate {
tera_context: Context::new(),
}
pub fn new() -> Self {
EventTemplate {
tera_context: Context::new(),
}
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let event_func_name = format!("{}{}", ctx.event_ty, ctx.event);
self
.tera_context
.insert("event_func_name", &event_func_name);
self
.tera_context
.insert("event_name", &format!("{}.{}", ctx.prefix, ctx.event_ty));
self.tera_context.insert("event", &ctx.event);
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => {},
Some(ref input) => self
.tera_context
.insert("input_deserializer", &format!("{}.{}", ctx.prefix, input)),
}
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let event_func_name = format!("{}{}", ctx.event_ty, ctx.event);
self.tera_context.insert("event_func_name", &event_func_name);
self.tera_context
.insert("event_name", &format!("{}.{}", ctx.prefix, ctx.event_ty));
self.tera_context.insert("event", &ctx.event);
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
self.tera_context.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => {}
Some(ref input) => self
.tera_context
.insert("input_deserializer", &format!("{}.{}", ctx.prefix, input)),
}
let has_output = ctx.output_deserializer.is_some();
self.tera_context.insert("has_output", &has_output);
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "void"),
Some(ref output) => self
.tera_context
.insert("output_deserializer", &format!("{}.{}", ctx.prefix, output)),
}
self.tera_context.insert(
"error_deserializer",
&format!("{}.{}", ctx.prefix, ctx.error_deserializer),
);
let tera = get_tera("ts_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
}
}
match ctx.output_deserializer {
None => self.tera_context.insert("output_deserializer", "void"),
Some(ref output) => self
.tera_context
.insert("output_deserializer", &format!("{}.{}", ctx.prefix, output)),
}
self.tera_context.insert(
"error_deserializer",
&format!("{}.{}", ctx.prefix, ctx.error_deserializer),
);
let tera = get_tera("ts_event");
match tera.render("event_template.tera", &self.tera_context) {
Ok(r) => Some(r),
Err(e) => {
log::error!("{:?}", e);
None
},
}
}
}

View File

@ -13,175 +13,187 @@ use syn::Item;
use walkdir::WalkDir;
pub fn gen(crate_name: &str) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path =
std::env::var("TAURI_BACKEND_SERVICE_PATH").unwrap_or("appflowy_tauri/src/services/backend".to_string());
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
.unwrap_or("appflowy_tauri/src/services/backend".to_string());
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string();
let event_crates = parse_ts_event_files(vec![crate_path]);
let event_ast = event_crates.iter().flat_map(parse_event_crate).collect::<Vec<_>>();
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
let event_crates = parse_ts_event_files(vec![crate_path]);
let event_ast = event_crates
.iter()
.flat_map(parse_event_crate)
.collect::<Vec<_>>();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = TS_HEADER.to_string();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = TS_HEADER.to_string();
for (index, render_ctx) in event_render_ctx.into_iter().enumerate() {
let mut event_template = EventTemplate::new();
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
}
if let Some(content) = event_template.render(render_ctx, index) {
render_result.push_str(content.as_ref())
}
render_result.push_str(TS_FOOTER);
}
render_result.push_str(TS_FOOTER);
let ts_event_folder: PathBuf = [&root, &tauri_backend_service_path, "events", crate_name]
.iter()
.collect();
if !ts_event_folder.as_path().exists() {
std::fs::create_dir_all(ts_event_folder.as_path()).unwrap();
}
let ts_event_folder: PathBuf = [&root, &tauri_backend_service_path, "events", crate_name]
.iter()
.collect();
if !ts_event_folder.as_path().exists() {
std::fs::create_dir_all(ts_event_folder.as_path()).unwrap();
}
let event_file = "event";
let event_file_ext = "ts";
let ts_event_file_path =
path_string_with_component(&ts_event_folder, vec![&format!("{}.{}", event_file, event_file_ext)]);
println!("cargo:rerun-if-changed={}", ts_event_file_path);
let event_file = "event";
let event_file_ext = "ts";
let ts_event_file_path = path_string_with_component(
&ts_event_folder,
vec![&format!("{}.{}", event_file, event_file_ext)],
);
println!("cargo:rerun-if-changed={}", ts_event_file_path);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_event_file_path)
{
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
}
Err(err) => {
panic!("Failed to open file: {}, {:?}", ts_event_file_path, err);
}
}
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_event_file_path)
{
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}, {:?}", ts_event_file_path, err);
},
}
let ts_index = path_string_with_component(&ts_event_folder, vec!["index.ts"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_index)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
export.push_str(&format!("export * from '../../classes/{}';\n", crate_name));
export.push_str(&format!("export * from './{}';\n", event_file));
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
}
Err(err) => {
panic!("Failed to open file: {}", err);
}
}
let ts_index = path_string_with_component(&ts_event_folder, vec!["index.ts"]);
match std::fs::OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(&ts_index)
{
Ok(ref mut file) => {
let mut export = String::new();
export.push_str("// Auto-generated, do not edit \n");
export.push_str(&format!("export * from '../../classes/{}';\n", crate_name));
export.push_str(&format!("export * from './{}';\n", event_file));
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
},
Err(err) => {
panic!("Failed to open file: {}", err);
},
}
}
#[derive(Debug)]
pub struct TsEventCrate {
crate_path: PathBuf,
event_files: Vec<String>,
crate_path: PathBuf,
event_files: Vec<String>,
}
impl TsEventCrate {
pub fn from_config(config: &CrateConfig) -> Self {
TsEventCrate {
crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(),
}
pub fn from_config(config: &CrateConfig) -> Self {
TsEventCrate {
crate_path: config.crate_path.clone(),
event_files: config.flowy_config.event_files.clone(),
}
}
}
pub fn parse_ts_event_files(crate_paths: Vec<String>) -> Vec<TsEventCrate> {
let mut ts_event_crates: Vec<TsEventCrate> = vec![];
crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| TsEventCrate::from_config(&crate_config))
.collect::<Vec<TsEventCrate>>();
ts_event_crates.extend(crates);
});
ts_event_crates
let mut ts_event_crates: Vec<TsEventCrate> = vec![];
crate_paths.iter().for_each(|path| {
let crates = WalkDir::new(path)
.into_iter()
.filter_entry(|e| !is_hidden(e))
.filter_map(|e| e.ok())
.filter(is_crate_dir)
.flat_map(|e| parse_crate_config_from(&e))
.map(|crate_config| TsEventCrate::from_config(&crate_config))
.collect::<Vec<TsEventCrate>>();
ts_event_crates.extend(crates);
});
ts_event_crates
}
pub fn parse_event_crate(event_crate: &TsEventCrate) -> Vec<EventASTContext> {
event_crate
.event_files
.iter()
.flat_map(|event_file| {
let file_path = path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
event_crate
.event_files
.iter()
.flat_map(|event_file| {
let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
let ast_result = ASTResult::new();
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&item_enum.attrs,
);
ast_result.check().unwrap();
attrs
.iter()
.filter(|attr| !attr.attrs.event_attrs.ignore)
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
}
_ => vec![],
})
.collect::<Vec<_>>()
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast
.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
let ast_result = ASTResult::new();
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&item_enum.attrs,
);
ast_result.check().unwrap();
attrs
.iter()
.filter(|attr| !attr.attrs.event_attrs.ignore)
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
},
_ => vec![],
})
.collect::<Vec<EventASTContext>>()
.collect::<Vec<_>>()
})
.collect::<Vec<EventASTContext>>()
}
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
let mut import_objects = HashSet::new();
ast.iter().for_each(|event_ast| {
if let Some(input) = event_ast.event_input.as_ref() {
import_objects.insert(input.get_ident().unwrap().to_string());
}
if let Some(output) = event_ast.event_output.as_ref() {
import_objects.insert(output.get_ident().unwrap().to_string());
}
});
let mut import_objects = HashSet::new();
ast.iter().for_each(|event_ast| {
if let Some(input) = event_ast.event_input.as_ref() {
import_objects.insert(input.get_ident().unwrap().to_string());
}
if let Some(output) = event_ast.event_output.as_ref() {
import_objects.insert(output.get_ident().unwrap().to_string());
}
});
ast.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input
.as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
ast
.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input
.as_ref()
.map(|event_input| event_input.get_ident().unwrap().to_string());
let output_deserializer = event_ast
.event_output
.as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string());
let output_deserializer = event_ast
.event_output
.as_ref()
.map(|event_output| event_output.get_ident().unwrap().to_string());
EventRenderContext {
input_deserializer,
output_deserializer,
error_deserializer: event_ast.event_error.to_string(),
event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(),
prefix: "pb".to_string(),
}
})
.collect::<Vec<EventRenderContext>>()
EventRenderContext {
input_deserializer,
output_deserializer,
error_deserializer: event_ast.event_error.to_string(),
event: event_ast.event.to_string(),
event_ty: event_ast.event_ty.to_string(),
prefix: "pb".to_string(),
}
})
.collect::<Vec<EventRenderContext>>()
}
const TS_HEADER: &str = r#"

View File

@ -3,172 +3,188 @@ use similar::{ChangeTag, TextDiff};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::{
fs::{File, OpenOptions},
io::{Read, Write},
fs::{File, OpenOptions},
io::{Read, Write},
};
use tera::Tera;
use walkdir::WalkDir;
pub fn read_file(path: &str) -> Option<String> {
let mut file = File::open(path).unwrap_or_else(|_| panic!("Unable to open file at {}", path));
let mut content = String::new();
match file.read_to_string(&mut content) {
Ok(_) => Some(content),
Err(e) => {
log::error!("{}, with error: {:?}", path, e);
Some("".to_string())
}
}
let mut file = File::open(path).unwrap_or_else(|_| panic!("Unable to open file at {}", path));
let mut content = String::new();
match file.read_to_string(&mut content) {
Ok(_) => Some(content),
Err(e) => {
log::error!("{}, with error: {:?}", path, e);
Some("".to_string())
},
}
}
pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str) {
if Path::new(output_file).exists() {
let old_content = read_file(output_file).unwrap();
let new_content = content.to_owned();
let write_to_file = || match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(output_file)
{
Ok(ref mut file) => {
file.write_all(new_content.as_bytes()).unwrap();
}
Err(err) => {
panic!("Failed to open log file: {}", err);
}
};
if new_content != old_content {
print_diff(old_content, new_content.clone());
write_to_file()
}
} else {
match OpenOptions::new().create(true).write(true).open(output_file) {
Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
Err(err) => panic!("Open or create to {} fail: {}", output_file, err),
}
if Path::new(output_file).exists() {
let old_content = read_file(output_file).unwrap();
let new_content = content.to_owned();
let write_to_file = || match OpenOptions::new()
.create(true)
.write(true)
.append(false)
.truncate(true)
.open(output_file)
{
Ok(ref mut file) => {
file.write_all(new_content.as_bytes()).unwrap();
},
Err(err) => {
panic!("Failed to open log file: {}", err);
},
};
if new_content != old_content {
print_diff(old_content, new_content.clone());
write_to_file()
}
} else {
match OpenOptions::new()
.create(true)
.write(true)
.open(output_file)
{
Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
Err(err) => panic!("Open or create to {} fail: {}", output_file, err),
}
}
}
pub fn print_diff(old_content: String, new_content: String) {
let diff = TextDiff::from_lines(&old_content, &new_content);
for op in diff.ops() {
for change in diff.iter_changes(op) {
let (sign, style) = match change.tag() {
ChangeTag::Delete => ("-", Style::new().red()),
ChangeTag::Insert => ("+", Style::new().green()),
ChangeTag::Equal => (" ", Style::new()),
};
let diff = TextDiff::from_lines(&old_content, &new_content);
for op in diff.ops() {
for change in diff.iter_changes(op) {
let (sign, style) = match change.tag() {
ChangeTag::Delete => ("-", Style::new().red()),
ChangeTag::Insert => ("+", Style::new().green()),
ChangeTag::Equal => (" ", Style::new()),
};
match change.tag() {
ChangeTag::Delete => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
}
ChangeTag::Insert => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
}
ChangeTag::Equal => {}
};
}
println!("---------------------------------------------------");
match change.tag() {
ChangeTag::Delete => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
},
ChangeTag::Insert => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
},
ChangeTag::Equal => {},
};
}
println!("---------------------------------------------------");
}
}
#[allow(dead_code)]
pub fn is_crate_dir(e: &walkdir::DirEntry) -> bool {
let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
cargo == *"Cargo"
let cargo = e.path().file_stem().unwrap().to_str().unwrap().to_string();
cargo == *"Cargo"
}
#[allow(dead_code)]
pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
if e.path().extension().is_none() {
return false;
}
let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
ext == *"proto"
if e.path().extension().is_none() {
return false;
}
let ext = e.path().extension().unwrap().to_str().unwrap().to_string();
ext == *"proto"
}
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
entry
.file_name()
.to_str()
.map(|s| s.starts_with('.'))
.unwrap_or(false)
}
pub fn create_dir_if_not_exist(dir: &Path) {
if !dir.exists() {
std::fs::create_dir_all(dir).unwrap();
}
if !dir.exists() {
std::fs::create_dir_all(dir).unwrap();
}
}
pub fn path_string_with_component(path: &Path, components: Vec<&str>) -> String {
path_buf_with_component(path, components).to_str().unwrap().to_string()
path_buf_with_component(path, components)
.to_str()
.unwrap()
.to_string()
}
#[allow(dead_code)]
pub fn path_buf_with_component(path: &Path, components: Vec<&str>) -> PathBuf {
let mut path_buf = path.to_path_buf();
for component in components {
path_buf.push(component);
}
path_buf
let mut path_buf = path.to_path_buf();
for component in components {
path_buf.push(component);
}
path_buf
}
#[allow(dead_code)]
pub fn walk_dir<P: AsRef<Path>, F1, F2>(dir: P, filter: F2, mut path_and_name: F1)
where
F1: FnMut(String, String),
F2: Fn(&walkdir::DirEntry) -> bool,
F1: FnMut(String, String),
F2: Fn(&walkdir::DirEntry) -> bool,
{
for (path, name) in WalkDir::new(dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| filter(e))
.map(|e| {
(
e.path().to_str().unwrap().to_string(),
e.path().file_stem().unwrap().to_str().unwrap().to_string(),
)
})
{
path_and_name(path, name);
}
for (path, name) in WalkDir::new(dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| filter(e))
.map(|e| {
(
e.path().to_str().unwrap().to_string(),
e.path().file_stem().unwrap().to_str().unwrap().to_string(),
)
})
{
path_and_name(path, name);
}
}
#[allow(dead_code)]
pub fn suffix_relative_to_path(path: &str, base: &str) -> String {
let base = Path::new(base);
let path = Path::new(path);
path.strip_prefix(base).unwrap().to_str().unwrap().to_owned()
let base = Path::new(base);
let path = Path::new(path);
path
.strip_prefix(base)
.unwrap()
.to_str()
.unwrap()
.to_owned()
}
pub fn get_tera(directory: &str) -> Tera {
let mut root = format!("{}/src/", env!("CARGO_MANIFEST_DIR"));
root.push_str(directory);
let mut root = format!("{}/src/", env!("CARGO_MANIFEST_DIR"));
root.push_str(directory);
let root_absolute_path = match std::fs::canonicalize(&root) {
Ok(p) => p.as_path().display().to_string(),
Err(e) => {
panic!("❌ Canonicalize file path {} failed {:?}", root, e);
}
};
let root_absolute_path = match std::fs::canonicalize(&root) {
Ok(p) => p.as_path().display().to_string(),
Err(e) => {
panic!("❌ Canonicalize file path {} failed {:?}", root, e);
},
};
let mut template_path = format!("{}/**/*.tera", root_absolute_path);
if cfg!(windows) {
// remove "\\?\" prefix on windows
template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
}
let mut template_path = format!("{}/**/*.tera", root_absolute_path);
if cfg!(windows) {
// remove "\\?\" prefix on windows
template_path = format!("{}/**/*.tera", &root_absolute_path[4..]);
}
match Tera::new(template_path.as_ref()) {
Ok(t) => t,
Err(e) => {
log::error!("Parsing error(s): {}", e);
::std::process::exit(1);
}
}
match Tera::new(template_path.as_ref()) {
Ok(t) => t,
Err(e) => {
log::error!("Parsing error(s): {}", e);
::std::process::exit(1);
},
}
}
pub fn cache_dir() -> PathBuf {
let mut path_buf = PathBuf::from_str(env!("CARGO_MANIFEST_DIR")).unwrap();
path_buf.push(".cache");
path_buf
let mut path_buf = PathBuf::from_str(env!("CARGO_MANIFEST_DIR")).unwrap();
path_buf.push(".cache");
path_buf
}

View File

@ -1,8 +1,8 @@
use bytes::Bytes;
use flowy_client_ws::FlowyWebSocketConnect;
use flowy_document::{
errors::{internal_error, FlowyError},
DocumentCloudService, DocumentConfig, DocumentDatabase, DocumentManager, DocumentUser,
errors::{internal_error, FlowyError},
DocumentCloudService, DocumentConfig, DocumentDatabase, DocumentManager, DocumentUser,
};
use flowy_net::ClientServerConfiguration;
use flowy_net::{http_server::document::DocumentCloudServiceImpl, local_server::LocalServer};
@ -17,98 +17,101 @@ use ws_model::ws_revision::ClientRevisionWSData;
pub struct DocumentDepsResolver();
impl DocumentDepsResolver {
pub fn resolve(
local_server: Option<Arc<LocalServer>>,
ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration,
document_config: &DocumentConfig,
) -> Arc<DocumentManager> {
let user = Arc::new(BlockUserImpl(user_session.clone()));
let rev_web_socket = Arc::new(DocumentRevisionWebSocket(ws_conn.clone()));
let cloud_service: Arc<dyn DocumentCloudService> = match local_server {
None => Arc::new(DocumentCloudServiceImpl::new(server_config.clone())),
Some(local_server) => local_server,
};
let database = Arc::new(DocumentDatabaseImpl(user_session));
pub fn resolve(
local_server: Option<Arc<LocalServer>>,
ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration,
document_config: &DocumentConfig,
) -> Arc<DocumentManager> {
let user = Arc::new(BlockUserImpl(user_session.clone()));
let rev_web_socket = Arc::new(DocumentRevisionWebSocket(ws_conn.clone()));
let cloud_service: Arc<dyn DocumentCloudService> = match local_server {
None => Arc::new(DocumentCloudServiceImpl::new(server_config.clone())),
Some(local_server) => local_server,
};
let database = Arc::new(DocumentDatabaseImpl(user_session));
let manager = Arc::new(DocumentManager::new(
cloud_service,
user,
database,
rev_web_socket,
document_config.clone(),
));
let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();
let manager = Arc::new(DocumentManager::new(
cloud_service,
user,
database,
rev_web_socket,
document_config.clone(),
));
let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();
manager
}
manager
}
}
struct BlockUserImpl(Arc<UserSession>);
impl DocumentUser for BlockUserImpl {
fn user_dir(&self) -> Result<String, FlowyError> {
let dir = self.0.user_dir().map_err(|e| FlowyError::unauthorized().context(e))?;
fn user_dir(&self) -> Result<String, FlowyError> {
let dir = self
.0
.user_dir()
.map_err(|e| FlowyError::unauthorized().context(e))?;
let doc_dir = format!("{}/document", dir);
if !Path::new(&doc_dir).exists() {
std::fs::create_dir_all(&doc_dir)?;
}
Ok(doc_dir)
let doc_dir = format!("{}/document", dir);
if !Path::new(&doc_dir).exists() {
std::fs::create_dir_all(&doc_dir)?;
}
Ok(doc_dir)
}
fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id()
}
fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id()
}
fn token(&self) -> Result<String, FlowyError> {
self.0.token()
}
fn token(&self) -> Result<String, FlowyError> {
self.0.token()
}
}
struct DocumentDatabaseImpl(Arc<UserSession>);
impl DocumentDatabase for DocumentDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool()
}
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool()
}
}
struct DocumentRevisionWebSocket(Arc<FlowyWebSocketConnect>);
impl RevisionWebSocket for DocumentRevisionWebSocket {
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage {
channel: WSChannel::Document,
data: bytes.to_vec(),
};
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {}
Some(sender) => {
sender.send(msg).map_err(internal_error)?;
}
}
Ok(())
})
}
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage {
channel: WSChannel::Document,
data: bytes.to_vec(),
};
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {},
Some(sender) => {
sender.send(msg).map_err(internal_error)?;
},
}
Ok(())
})
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await })
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await })
}
}
struct DocumentWSMessageReceiverImpl(Arc<DocumentManager>);
impl WSMessageReceiver for DocumentWSMessageReceiverImpl {
fn source(&self) -> WSChannel {
WSChannel::Document
}
fn receive_message(&self, msg: WebSocketRawMessage) {
let handler = self.0.clone();
tokio::spawn(async move {
handler.receive_ws_data(Bytes::from(msg.data)).await;
});
}
fn source(&self) -> WSChannel {
WSChannel::Document
}
fn receive_message(&self, msg: WebSocketRawMessage) {
let handler = self.0.clone();
tokio::spawn(async move {
handler.receive_ws_data(Bytes::from(msg.data)).await;
});
}
}

View File

@ -10,9 +10,9 @@ use flowy_document::DocumentManager;
use flowy_folder::entities::{ViewDataFormatPB, ViewLayoutTypePB, ViewPB};
use flowy_folder::manager::{ViewDataProcessor, ViewDataProcessorMap};
use flowy_folder::{
errors::{internal_error, FlowyError},
event_map::{FolderCouldServiceV1, WorkspaceDatabase, WorkspaceUser},
manager::FolderManager,
errors::{internal_error, FlowyError},
event_map::{FolderCouldServiceV1, WorkspaceDatabase, WorkspaceUser},
manager::FolderManager,
};
use flowy_net::ClientServerConfiguration;
use flowy_net::{http_server::folder::FolderHttpCloudService, local_server::LocalServer};
@ -30,294 +30,320 @@ use ws_model::ws_revision::ClientRevisionWSData;
pub struct FolderDepsResolver();
impl FolderDepsResolver {
pub async fn resolve(
local_server: Option<Arc<LocalServer>>,
user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration,
ws_conn: &Arc<FlowyWebSocketConnect>,
text_block_manager: &Arc<DocumentManager>,
grid_manager: &Arc<DatabaseManager>,
) -> Arc<FolderManager> {
let user: Arc<dyn WorkspaceUser> = Arc::new(WorkspaceUserImpl(user_session.clone()));
let database: Arc<dyn WorkspaceDatabase> = Arc::new(WorkspaceDatabaseImpl(user_session));
let web_socket = Arc::new(FolderRevisionWebSocket(ws_conn.clone()));
let cloud_service: Arc<dyn FolderCouldServiceV1> = match local_server {
None => Arc::new(FolderHttpCloudService::new(server_config.clone())),
Some(local_server) => local_server,
};
pub async fn resolve(
local_server: Option<Arc<LocalServer>>,
user_session: Arc<UserSession>,
server_config: &ClientServerConfiguration,
ws_conn: &Arc<FlowyWebSocketConnect>,
text_block_manager: &Arc<DocumentManager>,
grid_manager: &Arc<DatabaseManager>,
) -> Arc<FolderManager> {
let user: Arc<dyn WorkspaceUser> = Arc::new(WorkspaceUserImpl(user_session.clone()));
let database: Arc<dyn WorkspaceDatabase> = Arc::new(WorkspaceDatabaseImpl(user_session));
let web_socket = Arc::new(FolderRevisionWebSocket(ws_conn.clone()));
let cloud_service: Arc<dyn FolderCouldServiceV1> = match local_server {
None => Arc::new(FolderHttpCloudService::new(server_config.clone())),
Some(local_server) => local_server,
};
let view_data_processor = make_view_data_processor(text_block_manager.clone(), grid_manager.clone());
let folder_manager =
Arc::new(FolderManager::new(user.clone(), cloud_service, database, view_data_processor, web_socket).await);
let view_data_processor =
make_view_data_processor(text_block_manager.clone(), grid_manager.clone());
let folder_manager = Arc::new(
FolderManager::new(
user.clone(),
cloud_service,
database,
view_data_processor,
web_socket,
)
.await,
);
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match folder_manager.initialize(&user_id, &token).await {
Ok(_) => {}
Err(e) => tracing::error!("Initialize folder manager failed: {}", e),
}
}
let receiver = Arc::new(FolderWSMessageReceiverImpl(folder_manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();
folder_manager
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match folder_manager.initialize(&user_id, &token).await {
Ok(_) => {},
Err(e) => tracing::error!("Initialize folder manager failed: {}", e),
}
}
let receiver = Arc::new(FolderWSMessageReceiverImpl(folder_manager.clone()));
ws_conn.add_ws_message_receiver(receiver).unwrap();
folder_manager
}
}
fn make_view_data_processor(
document_manager: Arc<DocumentManager>,
grid_manager: Arc<DatabaseManager>,
document_manager: Arc<DocumentManager>,
grid_manager: Arc<DatabaseManager>,
) -> ViewDataProcessorMap {
let mut map: HashMap<ViewDataFormatPB, Arc<dyn ViewDataProcessor + Send + Sync>> = HashMap::new();
let mut map: HashMap<ViewDataFormatPB, Arc<dyn ViewDataProcessor + Send + Sync>> = HashMap::new();
let document_processor = Arc::new(DocumentViewDataProcessor(document_manager));
document_processor.data_types().into_iter().for_each(|data_type| {
map.insert(data_type, document_processor.clone());
let document_processor = Arc::new(DocumentViewDataProcessor(document_manager));
document_processor
.data_types()
.into_iter()
.for_each(|data_type| {
map.insert(data_type, document_processor.clone());
});
let grid_data_impl = Arc::new(GridViewDataProcessor(grid_manager));
grid_data_impl.data_types().into_iter().for_each(|data_type| {
map.insert(data_type, grid_data_impl.clone());
let grid_data_impl = Arc::new(GridViewDataProcessor(grid_manager));
grid_data_impl
.data_types()
.into_iter()
.for_each(|data_type| {
map.insert(data_type, grid_data_impl.clone());
});
Arc::new(map)
Arc::new(map)
}
struct WorkspaceDatabaseImpl(Arc<UserSession>);
impl WorkspaceDatabase for WorkspaceDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool().map_err(|e| FlowyError::internal().context(e))
}
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self
.0
.db_pool()
.map_err(|e| FlowyError::internal().context(e))
}
}
struct WorkspaceUserImpl(Arc<UserSession>);
impl WorkspaceUser for WorkspaceUserImpl {
fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id().map_err(|e| FlowyError::internal().context(e))
}
fn user_id(&self) -> Result<String, FlowyError> {
self
.0
.user_id()
.map_err(|e| FlowyError::internal().context(e))
}
fn token(&self) -> Result<String, FlowyError> {
self.0.token().map_err(|e| FlowyError::internal().context(e))
}
fn token(&self) -> Result<String, FlowyError> {
self
.0
.token()
.map_err(|e| FlowyError::internal().context(e))
}
}
struct FolderRevisionWebSocket(Arc<FlowyWebSocketConnect>);
impl RevisionWebSocket for FolderRevisionWebSocket {
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage {
channel: WSChannel::Folder,
data: bytes.to_vec(),
};
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage {
channel: WSChannel::Folder,
data: bytes.to_vec(),
};
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {}
Some(sender) => {
sender.send(msg).map_err(internal_error)?;
}
}
Ok(())
})
}
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {},
Some(sender) => {
sender.send(msg).map_err(internal_error)?;
},
}
Ok(())
})
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await })
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await })
}
}
struct FolderWSMessageReceiverImpl(Arc<FolderManager>);
impl WSMessageReceiver for FolderWSMessageReceiverImpl {
fn source(&self) -> WSChannel {
WSChannel::Folder
}
fn receive_message(&self, msg: WebSocketRawMessage) {
let handler = self.0.clone();
tokio::spawn(async move {
handler.did_receive_ws_data(Bytes::from(msg.data)).await;
});
}
fn source(&self) -> WSChannel {
WSChannel::Folder
}
fn receive_message(&self, msg: WebSocketRawMessage) {
let handler = self.0.clone();
tokio::spawn(async move {
handler.did_receive_ws_data(Bytes::from(msg.data)).await;
});
}
}
struct DocumentViewDataProcessor(Arc<DocumentManager>);
impl ViewDataProcessor for DocumentViewDataProcessor {
fn create_view(
&self,
_user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
view_data: Bytes,
) -> FutureResult<(), FlowyError> {
// Only accept Document type
debug_assert_eq!(layout, ViewLayoutTypePB::Document);
let view_data = match String::from_utf8(view_data.to_vec()) {
Ok(content) => match make_transaction_from_document_content(&content) {
Ok(transaction) => transaction.to_bytes().unwrap_or(vec![]),
Err(_) => vec![],
},
Err(_) => vec![],
};
fn create_view(
&self,
_user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
view_data: Bytes,
) -> FutureResult<(), FlowyError> {
// Only accept Document type
debug_assert_eq!(layout, ViewLayoutTypePB::Document);
let view_data = match String::from_utf8(view_data.to_vec()) {
Ok(content) => match make_transaction_from_document_content(&content) {
Ok(transaction) => transaction.to_bytes().unwrap_or(vec![]),
Err(_) => vec![],
},
Err(_) => vec![],
};
let revision = Revision::initial_revision(view_id, Bytes::from(view_data));
let view_id = view_id.to_string();
let manager = self.0.clone();
let revision = Revision::initial_revision(view_id, Bytes::from(view_data));
let view_id = view_id.to_string();
let manager = self.0.clone();
FutureResult::new(async move {
manager.create_document(view_id, vec![revision]).await?;
Ok(())
})
}
FutureResult::new(async move {
manager.create_document(view_id, vec![revision]).await?;
Ok(())
})
}
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> {
let manager = self.0.clone();
let view_id = view_id.to_string();
FutureResult::new(async move {
manager.close_document_editor(view_id).await?;
Ok(())
})
}
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> {
let manager = self.0.clone();
let view_id = view_id.to_string();
FutureResult::new(async move {
manager.close_document_editor(view_id).await?;
Ok(())
})
}
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> {
let view_id = view.id.clone();
let manager = self.0.clone();
FutureResult::new(async move {
let editor = manager.open_document_editor(view_id).await?;
let document_data = Bytes::from(editor.duplicate().await?);
Ok(document_data)
})
}
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> {
let view_id = view.id.clone();
let manager = self.0.clone();
FutureResult::new(async move {
let editor = manager.open_document_editor(view_id).await?;
let document_data = Bytes::from(editor.duplicate().await?);
Ok(document_data)
})
}
fn create_default_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
_data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(layout, ViewLayoutTypePB::Document);
let _user_id = user_id.to_string();
let view_id = view_id.to_string();
let manager = self.0.clone();
let document_content = self.0.initial_document_content();
FutureResult::new(async move {
let delta_data = Bytes::from(document_content);
let revision = Revision::initial_revision(&view_id, delta_data.clone());
manager.create_document(view_id, vec![revision]).await?;
Ok(delta_data)
})
}
fn create_default_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
_data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(layout, ViewLayoutTypePB::Document);
let _user_id = user_id.to_string();
let view_id = view_id.to_string();
let manager = self.0.clone();
let document_content = self.0.initial_document_content();
FutureResult::new(async move {
let delta_data = Bytes::from(document_content);
let revision = Revision::initial_revision(&view_id, delta_data.clone());
manager.create_document(view_id, vec![revision]).await?;
Ok(delta_data)
})
}
fn create_view_with_data(
&self,
_user_id: &str,
_view_id: &str,
data: Vec<u8>,
layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(layout, ViewLayoutTypePB::Document);
FutureResult::new(async move { Ok(Bytes::from(data)) })
}
fn create_view_with_data(
&self,
_user_id: &str,
_view_id: &str,
data: Vec<u8>,
layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(layout, ViewLayoutTypePB::Document);
FutureResult::new(async move { Ok(Bytes::from(data)) })
}
fn data_types(&self) -> Vec<ViewDataFormatPB> {
vec![ViewDataFormatPB::DeltaFormat, ViewDataFormatPB::NodeFormat]
}
fn data_types(&self) -> Vec<ViewDataFormatPB> {
vec![ViewDataFormatPB::DeltaFormat, ViewDataFormatPB::NodeFormat]
}
}
struct GridViewDataProcessor(Arc<DatabaseManager>);
impl ViewDataProcessor for GridViewDataProcessor {
fn create_view(
&self,
_user_id: &str,
view_id: &str,
_layout: ViewLayoutTypePB,
delta_data: Bytes,
) -> FutureResult<(), FlowyError> {
let revision = Revision::initial_revision(view_id, delta_data);
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
FutureResult::new(async move {
grid_manager.create_database(view_id, vec![revision]).await?;
Ok(())
})
}
fn create_view(
&self,
_user_id: &str,
view_id: &str,
_layout: ViewLayoutTypePB,
delta_data: Bytes,
) -> FutureResult<(), FlowyError> {
let revision = Revision::initial_revision(view_id, delta_data);
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
FutureResult::new(async move {
grid_manager
.create_database(view_id, vec![revision])
.await?;
Ok(())
})
}
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> {
let grid_manager = self.0.clone();
let view_id = view_id.to_string();
FutureResult::new(async move {
grid_manager.close_database(view_id).await?;
Ok(())
})
}
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> {
let grid_manager = self.0.clone();
let view_id = view_id.to_string();
FutureResult::new(async move {
grid_manager.close_database(view_id).await?;
Ok(())
})
}
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> {
let grid_manager = self.0.clone();
let view_id = view.id.clone();
FutureResult::new(async move {
let editor = grid_manager.open_database(view_id).await?;
let delta_bytes = editor.duplicate_grid().await?;
Ok(delta_bytes.into())
})
}
fn get_view_data(&self, view: &ViewPB) -> FutureResult<Bytes, FlowyError> {
let grid_manager = self.0.clone();
let view_id = view.id.clone();
FutureResult::new(async move {
let editor = grid_manager.open_database(view_id).await?;
let delta_bytes = editor.duplicate_grid().await?;
Ok(delta_bytes.into())
})
}
fn create_default_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(data_format, ViewDataFormatPB::DatabaseFormat);
let (build_context, layout) = match layout {
ViewLayoutTypePB::Grid => (make_default_grid(), LayoutTypePB::Grid),
ViewLayoutTypePB::Board => (make_default_board(), LayoutTypePB::Board),
ViewLayoutTypePB::Calendar => (make_default_calendar(), LayoutTypePB::Calendar),
ViewLayoutTypePB::Document => {
return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
});
}
};
fn create_default_view(
&self,
user_id: &str,
view_id: &str,
layout: ViewLayoutTypePB,
data_format: ViewDataFormatPB,
) -> FutureResult<Bytes, FlowyError> {
debug_assert_eq!(data_format, ViewDataFormatPB::DatabaseFormat);
let (build_context, layout) = match layout {
ViewLayoutTypePB::Grid => (make_default_grid(), LayoutTypePB::Grid),
ViewLayoutTypePB::Board => (make_default_board(), LayoutTypePB::Board),
ViewLayoutTypePB::Calendar => (make_default_calendar(), LayoutTypePB::Calendar),
ViewLayoutTypePB::Document => {
return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
});
},
};
let user_id = user_id.to_string();
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
FutureResult::new(async move {
make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await
})
}
let user_id = user_id.to_string();
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
FutureResult::new(async move {
make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await
})
}
fn create_view_with_data(
&self,
user_id: &str,
view_id: &str,
data: Vec<u8>,
layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError> {
let user_id = user_id.to_string();
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
fn create_view_with_data(
&self,
user_id: &str,
view_id: &str,
data: Vec<u8>,
layout: ViewLayoutTypePB,
) -> FutureResult<Bytes, FlowyError> {
let user_id = user_id.to_string();
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
let layout = match layout {
ViewLayoutTypePB::Grid => LayoutTypePB::Grid,
ViewLayoutTypePB::Board => LayoutTypePB::Board,
ViewLayoutTypePB::Calendar => LayoutTypePB::Calendar,
ViewLayoutTypePB::Document => {
return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
});
}
};
let layout = match layout {
ViewLayoutTypePB::Grid => LayoutTypePB::Grid,
ViewLayoutTypePB::Board => LayoutTypePB::Board,
ViewLayoutTypePB::Calendar => LayoutTypePB::Calendar,
ViewLayoutTypePB::Document => {
return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
});
},
};
FutureResult::new(async move {
let bytes = Bytes::from(data);
let build_context = BuildDatabaseContext::try_from(bytes)?;
make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await
})
}
FutureResult::new(async move {
let bytes = Bytes::from(data);
let build_context = BuildDatabaseContext::try_from(bytes)?;
make_database_view_data(&user_id, &view_id, layout, grid_manager, build_context).await
})
}
fn data_types(&self) -> Vec<ViewDataFormatPB> {
vec![ViewDataFormatPB::DatabaseFormat]
}
fn data_types(&self) -> Vec<ViewDataFormatPB> {
vec![ViewDataFormatPB::DatabaseFormat]
}
}

View File

@ -18,76 +18,81 @@ use ws_model::ws_revision::ClientRevisionWSData;
pub struct GridDepsResolver();
impl GridDepsResolver {
pub async fn resolve(
ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
) -> Arc<DatabaseManager> {
let user = Arc::new(GridUserImpl(user_session.clone()));
let rev_web_socket = Arc::new(GridRevisionWebSocket(ws_conn));
let grid_manager = Arc::new(DatabaseManager::new(
user.clone(),
rev_web_socket,
task_scheduler,
Arc::new(GridDatabaseImpl(user_session)),
));
pub async fn resolve(
ws_conn: Arc<FlowyWebSocketConnect>,
user_session: Arc<UserSession>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
) -> Arc<DatabaseManager> {
let user = Arc::new(GridUserImpl(user_session.clone()));
let rev_web_socket = Arc::new(GridRevisionWebSocket(ws_conn));
let grid_manager = Arc::new(DatabaseManager::new(
user.clone(),
rev_web_socket,
task_scheduler,
Arc::new(GridDatabaseImpl(user_session)),
));
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match grid_manager.initialize(&user_id, &token).await {
Ok(_) => {}
Err(e) => tracing::error!("Initialize grid manager failed: {}", e),
}
}
grid_manager
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match grid_manager.initialize(&user_id, &token).await {
Ok(_) => {},
Err(e) => tracing::error!("Initialize grid manager failed: {}", e),
}
}
grid_manager
}
}
struct GridDatabaseImpl(Arc<UserSession>);
impl GridDatabase for GridDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool().map_err(|e| FlowyError::internal().context(e))
}
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self
.0
.db_pool()
.map_err(|e| FlowyError::internal().context(e))
}
}
struct GridUserImpl(Arc<UserSession>);
impl DatabaseUser for GridUserImpl {
fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id()
}
fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id()
}
fn token(&self) -> Result<String, FlowyError> {
self.0.token()
}
fn token(&self) -> Result<String, FlowyError> {
self.0.token()
}
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool()
}
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool()
}
}
struct GridRevisionWebSocket(Arc<FlowyWebSocketConnect>);
impl RevisionWebSocket for GridRevisionWebSocket {
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage {
channel: WSChannel::Database,
data: bytes.to_vec(),
};
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
let bytes: Bytes = data.try_into().unwrap();
let msg = WebSocketRawMessage {
channel: WSChannel::Database,
data: bytes.to_vec(),
};
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {}
Some(sender) => {
sender.send(msg).map_err(|e| FlowyError::internal().context(e))?;
}
}
Ok(())
})
}
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {},
Some(sender) => {
sender
.send(msg)
.map_err(|e| FlowyError::internal().context(e))?;
},
}
Ok(())
})
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await })
}
fn subscribe_state_changed(&self) -> BoxFuture<WSStateReceiver> {
let ws_conn = self.0.clone();
Box::pin(async move { ws_conn.subscribe_websocket_state().await })
}
}

View File

@ -6,13 +6,13 @@ use std::sync::Arc;
pub struct UserDepsResolver();
impl UserDepsResolver {
pub fn resolve(
local_server: &Option<Arc<LocalServer>>,
server_config: &ClientServerConfiguration,
) -> Arc<dyn UserCloudService> {
match local_server.clone() {
None => Arc::new(UserHttpCloudService::new(server_config)),
Some(local_server) => local_server,
}
pub fn resolve(
local_server: &Option<Arc<LocalServer>>,
server_config: &ClientServerConfiguration,
) -> Arc<dyn UserCloudService> {
match local_server.clone() {
None => Arc::new(UserHttpCloudService::new(server_config)),
Some(local_server) => local_server,
}
}
}

View File

@ -22,11 +22,11 @@ use module::make_plugins;
pub use module::*;
use std::time::Duration;
use std::{
fmt,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
fmt,
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
};
use tokio::sync::{broadcast, RwLock};
use user_model::UserProfile;
@ -35,316 +35,331 @@ static INIT_LOG: AtomicBool = AtomicBool::new(false);
#[derive(Clone)]
pub struct AppFlowyCoreConfig {
/// Different `AppFlowyCoreConfig` instance should have different name
name: String,
/// Panics if the `root` path is not existing
storage_path: String,
log_filter: String,
server_config: ClientServerConfiguration,
pub document: DocumentConfig,
/// Different `AppFlowyCoreConfig` instance should have different name
name: String,
/// Panics if the `root` path is not existing
storage_path: String,
log_filter: String,
server_config: ClientServerConfiguration,
pub document: DocumentConfig,
}
impl fmt::Debug for AppFlowyCoreConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("AppFlowyCoreConfig")
.field("storage_path", &self.storage_path)
.field("server-config", &self.server_config)
.field("document-config", &self.document)
.finish()
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("AppFlowyCoreConfig")
.field("storage_path", &self.storage_path)
.field("server-config", &self.server_config)
.field("document-config", &self.document)
.finish()
}
}
impl AppFlowyCoreConfig {
pub fn new(root: &str, name: String, server_config: ClientServerConfiguration) -> Self {
AppFlowyCoreConfig {
name,
storage_path: root.to_owned(),
log_filter: create_log_filter("info".to_owned(), vec![]),
server_config,
document: DocumentConfig::default(),
}
pub fn new(root: &str, name: String, server_config: ClientServerConfiguration) -> Self {
AppFlowyCoreConfig {
name,
storage_path: root.to_owned(),
log_filter: create_log_filter("info".to_owned(), vec![]),
server_config,
document: DocumentConfig::default(),
}
}
pub fn with_document_version(mut self, version: DocumentVersionPB) -> Self {
self.document.version = version;
self
}
pub fn with_document_version(mut self, version: DocumentVersionPB) -> Self {
self.document.version = version;
self
}
pub fn log_filter(mut self, level: &str, with_crates: Vec<String>) -> Self {
self.log_filter = create_log_filter(level.to_owned(), with_crates);
self
}
pub fn log_filter(mut self, level: &str, with_crates: Vec<String>) -> Self {
self.log_filter = create_log_filter(level.to_owned(), with_crates);
self
}
}
fn create_log_filter(level: String, with_crates: Vec<String>) -> String {
let level = std::env::var("RUST_LOG").unwrap_or(level);
let mut filters = with_crates
.into_iter()
.map(|crate_name| format!("{}={}", crate_name, level))
.collect::<Vec<String>>();
filters.push(format!("flowy_core={}", level));
filters.push(format!("flowy_folder={}", level));
filters.push(format!("flowy_user={}", level));
filters.push(format!("flowy_document={}", level));
filters.push(format!("flowy_database={}", level));
filters.push(format!("flowy_sync={}", "info"));
filters.push(format!("flowy_client_sync={}", "info"));
filters.push(format!("flowy_notification={}", "info"));
filters.push(format!("lib_ot={}", level));
filters.push(format!("lib_ws={}", level));
filters.push(format!("lib_infra={}", level));
filters.push(format!("flowy_sync={}", level));
filters.push(format!("flowy_revision={}", level));
filters.push(format!("flowy_revision_persistence={}", level));
filters.push(format!("flowy_task={}", level));
// filters.push(format!("lib_dispatch={}", level));
let level = std::env::var("RUST_LOG").unwrap_or(level);
let mut filters = with_crates
.into_iter()
.map(|crate_name| format!("{}={}", crate_name, level))
.collect::<Vec<String>>();
filters.push(format!("flowy_core={}", level));
filters.push(format!("flowy_folder={}", level));
filters.push(format!("flowy_user={}", level));
filters.push(format!("flowy_document={}", level));
filters.push(format!("flowy_database={}", level));
filters.push(format!("flowy_sync={}", "info"));
filters.push(format!("flowy_client_sync={}", "info"));
filters.push(format!("flowy_notification={}", "info"));
filters.push(format!("lib_ot={}", level));
filters.push(format!("lib_ws={}", level));
filters.push(format!("lib_infra={}", level));
filters.push(format!("flowy_sync={}", level));
filters.push(format!("flowy_revision={}", level));
filters.push(format!("flowy_revision_persistence={}", level));
filters.push(format!("flowy_task={}", level));
// filters.push(format!("lib_dispatch={}", level));
filters.push(format!("dart_ffi={}", "info"));
filters.push(format!("flowy_sqlite={}", "info"));
filters.push(format!("flowy_net={}", "info"));
filters.join(",")
filters.push(format!("dart_ffi={}", "info"));
filters.push(format!("flowy_sqlite={}", "info"));
filters.push(format!("flowy_net={}", "info"));
filters.join(",")
}
#[derive(Clone)]
pub struct AppFlowyCore {
#[allow(dead_code)]
pub config: AppFlowyCoreConfig,
pub user_session: Arc<UserSession>,
pub document_manager: Arc<DocumentManager>,
pub folder_manager: Arc<FolderManager>,
pub grid_manager: Arc<DatabaseManager>,
pub event_dispatcher: Arc<AFPluginDispatcher>,
pub ws_conn: Arc<FlowyWebSocketConnect>,
pub local_server: Option<Arc<LocalServer>>,
pub task_dispatcher: Arc<RwLock<TaskDispatcher>>,
#[allow(dead_code)]
pub config: AppFlowyCoreConfig,
pub user_session: Arc<UserSession>,
pub document_manager: Arc<DocumentManager>,
pub folder_manager: Arc<FolderManager>,
pub grid_manager: Arc<DatabaseManager>,
pub event_dispatcher: Arc<AFPluginDispatcher>,
pub ws_conn: Arc<FlowyWebSocketConnect>,
pub local_server: Option<Arc<LocalServer>>,
pub task_dispatcher: Arc<RwLock<TaskDispatcher>>,
}
impl AppFlowyCore {
pub fn new(config: AppFlowyCoreConfig) -> Self {
init_log(&config);
init_kv(&config.storage_path);
tracing::debug!("🔥 {:?}", config);
let runtime = tokio_default_runtime().unwrap();
let task_scheduler = TaskDispatcher::new(Duration::from_secs(2));
let task_dispatcher = Arc::new(RwLock::new(task_scheduler));
runtime.spawn(TaskRunner::run(task_dispatcher.clone()));
pub fn new(config: AppFlowyCoreConfig) -> Self {
init_log(&config);
init_kv(&config.storage_path);
tracing::debug!("🔥 {:?}", config);
let runtime = tokio_default_runtime().unwrap();
let task_scheduler = TaskDispatcher::new(Duration::from_secs(2));
let task_dispatcher = Arc::new(RwLock::new(task_scheduler));
runtime.spawn(TaskRunner::run(task_dispatcher.clone()));
let (local_server, ws_conn) = mk_local_server(&config.server_config);
let (user_session, document_manager, folder_manager, local_server, grid_manager) = runtime.block_on(async {
let user_session = mk_user_session(&config, &local_server, &config.server_config);
let document_manager = DocumentDepsResolver::resolve(
local_server.clone(),
ws_conn.clone(),
user_session.clone(),
&config.server_config,
&config.document,
);
let (local_server, ws_conn) = mk_local_server(&config.server_config);
let (user_session, document_manager, folder_manager, local_server, grid_manager) = runtime
.block_on(async {
let user_session = mk_user_session(&config, &local_server, &config.server_config);
let document_manager = DocumentDepsResolver::resolve(
local_server.clone(),
ws_conn.clone(),
user_session.clone(),
&config.server_config,
&config.document,
);
let grid_manager =
GridDepsResolver::resolve(ws_conn.clone(), user_session.clone(), task_dispatcher.clone()).await;
let grid_manager = GridDepsResolver::resolve(
ws_conn.clone(),
user_session.clone(),
task_dispatcher.clone(),
)
.await;
let folder_manager = FolderDepsResolver::resolve(
local_server.clone(),
user_session.clone(),
&config.server_config,
&ws_conn,
&document_manager,
&grid_manager,
)
.await;
let folder_manager = FolderDepsResolver::resolve(
local_server.clone(),
user_session.clone(),
&config.server_config,
&ws_conn,
&document_manager,
&grid_manager,
)
.await;
if let Some(local_server) = local_server.as_ref() {
local_server.run();
}
ws_conn.init().await;
(
user_session,
document_manager,
folder_manager,
local_server,
grid_manager,
)
});
let user_status_listener = UserStatusListener {
document_manager: document_manager.clone(),
folder_manager: folder_manager.clone(),
grid_manager: grid_manager.clone(),
ws_conn: ws_conn.clone(),
config: config.clone(),
};
let user_status_callback = UserStatusCallbackImpl {
listener: Arc::new(user_status_listener),
};
let cloned_user_session = user_session.clone();
runtime.block_on(async move {
cloned_user_session.clone().init(user_status_callback).await;
});
let event_dispatcher = Arc::new(AFPluginDispatcher::construct(runtime, || {
make_plugins(
&ws_conn,
&folder_manager,
&grid_manager,
&user_session,
&document_manager,
)
}));
_start_listening(&event_dispatcher, &ws_conn, &folder_manager);
Self {
config,
user_session,
document_manager,
folder_manager,
grid_manager,
event_dispatcher,
ws_conn,
local_server,
task_dispatcher,
if let Some(local_server) = local_server.as_ref() {
local_server.run();
}
}
ws_conn.init().await;
(
user_session,
document_manager,
folder_manager,
local_server,
grid_manager,
)
});
pub fn dispatcher(&self) -> Arc<AFPluginDispatcher> {
self.event_dispatcher.clone()
let user_status_listener = UserStatusListener {
document_manager: document_manager.clone(),
folder_manager: folder_manager.clone(),
grid_manager: grid_manager.clone(),
ws_conn: ws_conn.clone(),
config: config.clone(),
};
let user_status_callback = UserStatusCallbackImpl {
listener: Arc::new(user_status_listener),
};
let cloned_user_session = user_session.clone();
runtime.block_on(async move {
cloned_user_session.clone().init(user_status_callback).await;
});
let event_dispatcher = Arc::new(AFPluginDispatcher::construct(runtime, || {
make_plugins(
&ws_conn,
&folder_manager,
&grid_manager,
&user_session,
&document_manager,
)
}));
_start_listening(&event_dispatcher, &ws_conn, &folder_manager);
Self {
config,
user_session,
document_manager,
folder_manager,
grid_manager,
event_dispatcher,
ws_conn,
local_server,
task_dispatcher,
}
}
pub fn dispatcher(&self) -> Arc<AFPluginDispatcher> {
self.event_dispatcher.clone()
}
}
fn _start_listening(
event_dispatcher: &AFPluginDispatcher,
ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<FolderManager>,
event_dispatcher: &AFPluginDispatcher,
ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<FolderManager>,
) {
let subscribe_network_type = ws_conn.subscribe_network_ty();
let folder_manager = folder_manager.clone();
let cloned_folder_manager = folder_manager;
let ws_conn = ws_conn.clone();
let subscribe_network_type = ws_conn.subscribe_network_ty();
let folder_manager = folder_manager.clone();
let cloned_folder_manager = folder_manager;
let ws_conn = ws_conn.clone();
event_dispatcher.spawn(async move {
listen_on_websocket(ws_conn.clone());
});
event_dispatcher.spawn(async move {
listen_on_websocket(ws_conn.clone());
});
event_dispatcher.spawn(async move {
_listen_network_status(subscribe_network_type, cloned_folder_manager).await;
});
event_dispatcher.spawn(async move {
_listen_network_status(subscribe_network_type, cloned_folder_manager).await;
});
}
fn mk_local_server(
server_config: &ClientServerConfiguration,
server_config: &ClientServerConfiguration,
) -> (Option<Arc<LocalServer>>, Arc<FlowyWebSocketConnect>) {
let ws_addr = server_config.ws_addr();
if cfg!(feature = "http_sync") {
let ws_conn = Arc::new(FlowyWebSocketConnect::new(ws_addr));
(None, ws_conn)
} else {
let context = flowy_net::local_server::build_server(server_config);
let local_ws = Arc::new(context.local_ws);
let ws_conn = Arc::new(FlowyWebSocketConnect::from_local(ws_addr, local_ws));
(Some(Arc::new(context.local_server)), ws_conn)
}
let ws_addr = server_config.ws_addr();
if cfg!(feature = "http_sync") {
let ws_conn = Arc::new(FlowyWebSocketConnect::new(ws_addr));
(None, ws_conn)
} else {
let context = flowy_net::local_server::build_server(server_config);
let local_ws = Arc::new(context.local_ws);
let ws_conn = Arc::new(FlowyWebSocketConnect::from_local(ws_addr, local_ws));
(Some(Arc::new(context.local_server)), ws_conn)
}
}
async fn _listen_network_status(mut subscribe: broadcast::Receiver<NetworkType>, _core: Arc<FolderManager>) {
while let Ok(_new_type) = subscribe.recv().await {
// core.network_state_changed(new_type);
}
async fn _listen_network_status(
mut subscribe: broadcast::Receiver<NetworkType>,
_core: Arc<FolderManager>,
) {
while let Ok(_new_type) = subscribe.recv().await {
// core.network_state_changed(new_type);
}
}
fn init_kv(root: &str) {
match flowy_sqlite::kv::KV::init(root) {
Ok(_) => {}
Err(e) => tracing::error!("Init kv store failed: {}", e),
}
match flowy_sqlite::kv::KV::init(root) {
Ok(_) => {},
Err(e) => tracing::error!("Init kv store failed: {}", e),
}
}
fn init_log(config: &AppFlowyCoreConfig) {
if !INIT_LOG.load(Ordering::SeqCst) {
INIT_LOG.store(true, Ordering::SeqCst);
if !INIT_LOG.load(Ordering::SeqCst) {
INIT_LOG.store(true, Ordering::SeqCst);
let _ = lib_log::Builder::new("AppFlowy-Client", &config.storage_path)
.env_filter(&config.log_filter)
.build();
}
let _ = lib_log::Builder::new("AppFlowy-Client", &config.storage_path)
.env_filter(&config.log_filter)
.build();
}
}
fn mk_user_session(
config: &AppFlowyCoreConfig,
local_server: &Option<Arc<LocalServer>>,
server_config: &ClientServerConfiguration,
config: &AppFlowyCoreConfig,
local_server: &Option<Arc<LocalServer>>,
server_config: &ClientServerConfiguration,
) -> Arc<UserSession> {
let user_config = UserSessionConfig::new(&config.name, &config.storage_path);
let cloud_service = UserDepsResolver::resolve(local_server, server_config);
Arc::new(UserSession::new(user_config, cloud_service))
let user_config = UserSessionConfig::new(&config.name, &config.storage_path);
let cloud_service = UserDepsResolver::resolve(local_server, server_config);
Arc::new(UserSession::new(user_config, cloud_service))
}
struct UserStatusListener {
document_manager: Arc<DocumentManager>,
folder_manager: Arc<FolderManager>,
grid_manager: Arc<DatabaseManager>,
ws_conn: Arc<FlowyWebSocketConnect>,
config: AppFlowyCoreConfig,
document_manager: Arc<DocumentManager>,
folder_manager: Arc<FolderManager>,
grid_manager: Arc<DatabaseManager>,
ws_conn: Arc<FlowyWebSocketConnect>,
config: AppFlowyCoreConfig,
}
impl UserStatusListener {
async fn did_sign_in(&self, token: &str, user_id: &str) -> FlowyResult<()> {
self.folder_manager.initialize(user_id, token).await?;
self.document_manager.initialize(user_id).await?;
self.grid_manager.initialize(user_id, token).await?;
self.ws_conn.start(token.to_owned(), user_id.to_owned()).await?;
Ok(())
}
async fn did_sign_in(&self, token: &str, user_id: &str) -> FlowyResult<()> {
self.folder_manager.initialize(user_id, token).await?;
self.document_manager.initialize(user_id).await?;
self.grid_manager.initialize(user_id, token).await?;
self
.ws_conn
.start(token.to_owned(), user_id.to_owned())
.await?;
Ok(())
}
async fn did_sign_up(&self, user_profile: &UserProfile) -> FlowyResult<()> {
let view_data_type = match self.config.document.version {
DocumentVersionPB::V0 => ViewDataFormatPB::DeltaFormat,
DocumentVersionPB::V1 => ViewDataFormatPB::NodeFormat,
};
self.folder_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token, view_data_type)
.await?;
self.document_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
async fn did_sign_up(&self, user_profile: &UserProfile) -> FlowyResult<()> {
let view_data_type = match self.config.document.version {
DocumentVersionPB::V0 => ViewDataFormatPB::DeltaFormat,
DocumentVersionPB::V1 => ViewDataFormatPB::NodeFormat,
};
self
.folder_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token, view_data_type)
.await?;
self
.document_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
self.grid_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
self
.grid_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
self.ws_conn
.start(user_profile.token.clone(), user_profile.id.clone())
.await?;
Ok(())
}
self
.ws_conn
.start(user_profile.token.clone(), user_profile.id.clone())
.await?;
Ok(())
}
async fn did_expired(&self, _token: &str, user_id: &str) -> FlowyResult<()> {
self.folder_manager.clear(user_id).await;
self.ws_conn.stop().await;
Ok(())
}
async fn did_expired(&self, _token: &str, user_id: &str) -> FlowyResult<()> {
self.folder_manager.clear(user_id).await;
self.ws_conn.stop().await;
Ok(())
}
}
struct UserStatusCallbackImpl {
listener: Arc<UserStatusListener>,
listener: Arc<UserStatusListener>,
}
impl UserStatusCallback for UserStatusCallbackImpl {
fn did_sign_in(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone();
let token = token.to_owned();
let user_id = user_id.to_owned();
to_fut(async move { listener.did_sign_in(&token, &user_id).await })
}
fn did_sign_in(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone();
let token = token.to_owned();
let user_id = user_id.to_owned();
to_fut(async move { listener.did_sign_in(&token, &user_id).await })
}
fn did_sign_up(&self, user_profile: &UserProfile) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone();
let user_profile = user_profile.clone();
to_fut(async move { listener.did_sign_up(&user_profile).await })
}
fn did_sign_up(&self, user_profile: &UserProfile) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone();
let user_profile = user_profile.clone();
to_fut(async move { listener.did_sign_up(&user_profile).await })
}
fn did_expired(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone();
let token = token.to_owned();
let user_id = user_id.to_owned();
to_fut(async move { listener.did_expired(&token, &user_id).await })
}
fn did_expired(&self, token: &str, user_id: &str) -> Fut<FlowyResult<()>> {
let listener = self.listener.clone();
let token = token.to_owned();
let user_id = user_id.to_owned();
to_fut(async move { listener.did_expired(&token, &user_id).await })
}
}

View File

@ -7,16 +7,22 @@ use lib_dispatch::prelude::AFPlugin;
use std::sync::Arc;
pub fn make_plugins(
ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<FolderManager>,
grid_manager: &Arc<DatabaseManager>,
user_session: &Arc<UserSession>,
document_manager: &Arc<DocumentManager>,
ws_conn: &Arc<FlowyWebSocketConnect>,
folder_manager: &Arc<FolderManager>,
grid_manager: &Arc<DatabaseManager>,
user_session: &Arc<UserSession>,
document_manager: &Arc<DocumentManager>,
) -> Vec<AFPlugin> {
let user_plugin = flowy_user::event_map::init(user_session.clone());
let folder_plugin = flowy_folder::event_map::init(folder_manager.clone());
let network_plugin = flowy_net::event_map::init(ws_conn.clone());
let grid_plugin = flowy_database::event_map::init(grid_manager.clone());
let document_plugin = flowy_document::event_map::init(document_manager.clone());
vec![user_plugin, folder_plugin, network_plugin, grid_plugin, document_plugin]
let user_plugin = flowy_user::event_map::init(user_session.clone());
let folder_plugin = flowy_folder::event_map::init(folder_manager.clone());
let network_plugin = flowy_net::event_map::init(ws_conn.clone());
let grid_plugin = flowy_database::event_map::init(grid_manager.clone());
let document_plugin = flowy_document::event_map::init(document_manager.clone());
vec![
user_plugin,
folder_plugin,
network_plugin,
grid_plugin,
document_plugin,
]
}

View File

@ -1,10 +1,10 @@
fn main() {
let crate_name = env!("CARGO_PKG_NAME");
flowy_codegen::protobuf_file::gen(crate_name);
let crate_name = env!("CARGO_PKG_NAME");
flowy_codegen::protobuf_file::gen(crate_name);
#[cfg(feature = "dart")]
flowy_codegen::dart_event::gen(crate_name);
#[cfg(feature = "dart")]
flowy_codegen::dart_event::gen(crate_name);
#[cfg(feature = "ts")]
flowy_codegen::ts_event::gen(crate_name);
#[cfg(feature = "ts")]
flowy_codegen::ts_event::gen(crate_name);
}

View File

@ -7,166 +7,169 @@ use std::collections::HashMap;
#[derive(ProtoBuf, Default)]
pub struct CreateSelectOptionPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub database_id: String,
#[pb(index = 2)]
pub database_id: String,
#[pb(index = 3)]
pub option_name: String,
#[pb(index = 3)]
pub option_name: String,
}
pub struct CreateSelectOptionParams {
pub field_id: String,
pub database_id: String,
pub option_name: String,
pub field_id: String,
pub database_id: String,
pub option_name: String,
}
impl TryInto<CreateSelectOptionParams> for CreateSelectOptionPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<CreateSelectOptionParams, Self::Error> {
let option_name = NotEmptyStr::parse(self.option_name).map_err(|_| ErrorCode::SelectOptionNameIsEmpty)?;
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(CreateSelectOptionParams {
field_id: field_id.0,
option_name: option_name.0,
database_id: database_id.0,
})
}
fn try_into(self) -> Result<CreateSelectOptionParams, Self::Error> {
let option_name =
NotEmptyStr::parse(self.option_name).map_err(|_| ErrorCode::SelectOptionNameIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(CreateSelectOptionParams {
field_id: field_id.0,
option_name: option_name.0,
database_id: database_id.0,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CellIdPB {
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub row_id: String,
#[pb(index = 3)]
pub row_id: String,
}
/// Represents as the cell identifier. It's used to locate the cell in corresponding
/// view's row with the field id.
pub struct CellIdParams {
pub database_id: String,
pub field_id: String,
pub row_id: String,
pub database_id: String,
pub field_id: String,
pub row_id: String,
}
impl TryInto<CellIdParams> for CellIdPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<CellIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(CellIdParams {
database_id: database_id.0,
field_id: field_id.0,
row_id: row_id.0,
})
}
fn try_into(self) -> Result<CellIdParams, Self::Error> {
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(CellIdParams {
database_id: database_id.0,
field_id: field_id.0,
row_id: row_id.0,
})
}
}
/// Represents as the data of the cell.
#[derive(Debug, Default, ProtoBuf)]
pub struct CellPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub row_id: String,
#[pb(index = 2)]
pub row_id: String,
/// Encoded the data using the helper struct `CellProtobufBlob`.
/// Check out the `CellProtobufBlob` for more information.
#[pb(index = 3)]
pub data: Vec<u8>,
/// Encoded the data using the helper struct `CellProtobufBlob`.
/// Check out the `CellProtobufBlob` for more information.
#[pb(index = 3)]
pub data: Vec<u8>,
/// the field_type will be None if the field with field_id is not found
#[pb(index = 4, one_of)]
pub field_type: Option<FieldType>,
/// the field_type will be None if the field with field_id is not found
#[pb(index = 4, one_of)]
pub field_type: Option<FieldType>,
}
impl CellPB {
pub fn new(field_id: &str, row_id: &str, field_type: FieldType, data: Vec<u8>) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_string(),
data,
field_type: Some(field_type),
}
pub fn new(field_id: &str, row_id: &str, field_type: FieldType, data: Vec<u8>) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_string(),
data,
field_type: Some(field_type),
}
}
pub fn empty(field_id: &str, row_id: &str) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_owned(),
data: vec![],
field_type: None,
}
pub fn empty(field_id: &str, row_id: &str) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_owned(),
data: vec![],
field_type: None,
}
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedCellPB {
#[pb(index = 1)]
pub items: Vec<CellPB>,
#[pb(index = 1)]
pub items: Vec<CellPB>,
}
impl std::ops::Deref for RepeatedCellPB {
type Target = Vec<CellPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
type Target = Vec<CellPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for RepeatedCellPB {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
impl std::convert::From<Vec<CellPB>> for RepeatedCellPB {
fn from(items: Vec<CellPB>) -> Self {
Self { items }
}
fn from(items: Vec<CellPB>) -> Self {
Self { items }
}
}
///
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct CellChangesetPB {
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 2)]
pub row_id: String,
#[pb(index = 2)]
pub row_id: String,
#[pb(index = 3)]
pub field_id: String,
#[pb(index = 3)]
pub field_id: String,
#[pb(index = 4)]
pub type_cell_data: String,
#[pb(index = 4)]
pub type_cell_data: String,
}
impl std::convert::From<CellChangesetPB> for RowChangeset {
fn from(changeset: CellChangesetPB) -> Self {
let mut cell_by_field_id = HashMap::with_capacity(1);
let field_id = changeset.field_id;
let cell_rev = CellRevision {
type_cell_data: changeset.type_cell_data,
};
cell_by_field_id.insert(field_id, cell_rev);
fn from(changeset: CellChangesetPB) -> Self {
let mut cell_by_field_id = HashMap::with_capacity(1);
let field_id = changeset.field_id;
let cell_rev = CellRevision {
type_cell_data: changeset.type_cell_data,
};
cell_by_field_id.insert(field_id, cell_rev);
RowChangeset {
row_id: changeset.row_id,
height: None,
visibility: None,
cell_by_field_id,
}
RowChangeset {
row_id: changeset.row_id,
height: None,
visibility: None,
cell_by_field_id,
}
}
}

View File

@ -5,58 +5,58 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct CheckboxFilterPB {
#[pb(index = 1)]
pub condition: CheckboxFilterConditionPB,
#[pb(index = 1)]
pub condition: CheckboxFilterConditionPB,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum CheckboxFilterConditionPB {
IsChecked = 0,
IsUnChecked = 1,
IsChecked = 0,
IsUnChecked = 1,
}
impl std::convert::From<CheckboxFilterConditionPB> for u32 {
fn from(value: CheckboxFilterConditionPB) -> Self {
value as u32
}
fn from(value: CheckboxFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for CheckboxFilterConditionPB {
fn default() -> Self {
CheckboxFilterConditionPB::IsChecked
}
fn default() -> Self {
CheckboxFilterConditionPB::IsChecked
}
}
impl std::convert::TryFrom<u8> for CheckboxFilterConditionPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(CheckboxFilterConditionPB::IsChecked),
1 => Ok(CheckboxFilterConditionPB::IsUnChecked),
_ => Err(ErrorCode::InvalidData),
}
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(CheckboxFilterConditionPB::IsChecked),
1 => Ok(CheckboxFilterConditionPB::IsUnChecked),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for CheckboxFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
}
}
impl std::convert::From<&FilterRevision> for CheckboxFilterPB {
fn from(rev: &FilterRevision) -> Self {
CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
fn from(rev: &FilterRevision) -> Self {
CheckboxFilterPB {
condition: CheckboxFilterConditionPB::try_from(rev.condition)
.unwrap_or(CheckboxFilterConditionPB::IsChecked),
}
}
}

View File

@ -5,58 +5,58 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct ChecklistFilterPB {
#[pb(index = 1)]
pub condition: ChecklistFilterConditionPB,
#[pb(index = 1)]
pub condition: ChecklistFilterConditionPB,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum ChecklistFilterConditionPB {
IsComplete = 0,
IsIncomplete = 1,
IsComplete = 0,
IsIncomplete = 1,
}
impl std::convert::From<ChecklistFilterConditionPB> for u32 {
fn from(value: ChecklistFilterConditionPB) -> Self {
value as u32
}
fn from(value: ChecklistFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for ChecklistFilterConditionPB {
fn default() -> Self {
ChecklistFilterConditionPB::IsIncomplete
}
fn default() -> Self {
ChecklistFilterConditionPB::IsIncomplete
}
}
impl std::convert::TryFrom<u8> for ChecklistFilterConditionPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(ChecklistFilterConditionPB::IsComplete),
1 => Ok(ChecklistFilterConditionPB::IsIncomplete),
_ => Err(ErrorCode::InvalidData),
}
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(ChecklistFilterConditionPB::IsComplete),
1 => Ok(ChecklistFilterConditionPB::IsIncomplete),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for ChecklistFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
}
}
impl std::convert::From<&FilterRevision> for ChecklistFilterPB {
fn from(rev: &FilterRevision) -> Self {
ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
fn from(rev: &FilterRevision) -> Self {
ChecklistFilterPB {
condition: ChecklistFilterConditionPB::try_from(rev.condition)
.unwrap_or(ChecklistFilterConditionPB::IsIncomplete),
}
}
}

View File

@ -7,114 +7,116 @@ use std::str::FromStr;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DateFilterPB {
#[pb(index = 1)]
pub condition: DateFilterConditionPB,
#[pb(index = 1)]
pub condition: DateFilterConditionPB,
#[pb(index = 2, one_of)]
pub start: Option<i64>,
#[pb(index = 2, one_of)]
pub start: Option<i64>,
#[pb(index = 3, one_of)]
pub end: Option<i64>,
#[pb(index = 3, one_of)]
pub end: Option<i64>,
#[pb(index = 4, one_of)]
pub timestamp: Option<i64>,
#[pb(index = 4, one_of)]
pub timestamp: Option<i64>,
}
#[derive(Deserialize, Serialize, Default, Clone, Debug)]
pub struct DateFilterContentPB {
pub start: Option<i64>,
pub end: Option<i64>,
pub timestamp: Option<i64>,
pub start: Option<i64>,
pub end: Option<i64>,
pub timestamp: Option<i64>,
}
impl ToString for DateFilterContentPB {
fn to_string(&self) -> String {
serde_json::to_string(self).unwrap()
}
fn to_string(&self) -> String {
serde_json::to_string(self).unwrap()
}
}
impl FromStr for DateFilterContentPB {
type Err = serde_json::Error;
type Err = serde_json::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_str(s)
}
fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_str(s)
}
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum DateFilterConditionPB {
DateIs = 0,
DateBefore = 1,
DateAfter = 2,
DateOnOrBefore = 3,
DateOnOrAfter = 4,
DateWithIn = 5,
DateIsEmpty = 6,
DateIsNotEmpty = 7,
DateIs = 0,
DateBefore = 1,
DateAfter = 2,
DateOnOrBefore = 3,
DateOnOrAfter = 4,
DateWithIn = 5,
DateIsEmpty = 6,
DateIsNotEmpty = 7,
}
impl std::convert::From<DateFilterConditionPB> for u32 {
fn from(value: DateFilterConditionPB) -> Self {
value as u32
}
fn from(value: DateFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for DateFilterConditionPB {
fn default() -> Self {
DateFilterConditionPB::DateIs
}
fn default() -> Self {
DateFilterConditionPB::DateIs
}
}
impl std::convert::TryFrom<u8> for DateFilterConditionPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(DateFilterConditionPB::DateIs),
1 => Ok(DateFilterConditionPB::DateBefore),
2 => Ok(DateFilterConditionPB::DateAfter),
3 => Ok(DateFilterConditionPB::DateOnOrBefore),
4 => Ok(DateFilterConditionPB::DateOnOrAfter),
5 => Ok(DateFilterConditionPB::DateWithIn),
6 => Ok(DateFilterConditionPB::DateIsEmpty),
_ => Err(ErrorCode::InvalidData),
}
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(DateFilterConditionPB::DateIs),
1 => Ok(DateFilterConditionPB::DateBefore),
2 => Ok(DateFilterConditionPB::DateAfter),
3 => Ok(DateFilterConditionPB::DateOnOrBefore),
4 => Ok(DateFilterConditionPB::DateOnOrAfter),
5 => Ok(DateFilterConditionPB::DateWithIn),
6 => Ok(DateFilterConditionPB::DateIsEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for DateFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
let condition = DateFilterConditionPB::try_from(filter_rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()
};
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
let condition = DateFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()
};
if let Ok(content) = DateFilterContentPB::from_str(&filter_rev.content) {
filter.start = content.start;
filter.end = content.end;
filter.timestamp = content.timestamp;
};
if let Ok(content) = DateFilterContentPB::from_str(&filter_rev.content) {
filter.start = content.start;
filter.end = content.end;
filter.timestamp = content.timestamp;
};
filter
}
filter
}
}
impl std::convert::From<&FilterRevision> for DateFilterPB {
fn from(rev: &FilterRevision) -> Self {
let condition = DateFilterConditionPB::try_from(rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()
};
fn from(rev: &FilterRevision) -> Self {
let condition =
DateFilterConditionPB::try_from(rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()
};
if let Ok(content) = DateFilterContentPB::from_str(&rev.content) {
filter.start = content.start;
filter.end = content.end;
filter.timestamp = content.timestamp;
};
if let Ok(content) = DateFilterContentPB::from_str(&rev.content) {
filter.start = content.start;
filter.end = content.end;
filter.timestamp = content.timestamp;
};
filter
}
filter
}
}

View File

@ -3,52 +3,52 @@ use flowy_derive::ProtoBuf;
#[derive(Debug, Default, ProtoBuf)]
pub struct FilterChangesetNotificationPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub insert_filters: Vec<FilterPB>,
#[pb(index = 2)]
pub insert_filters: Vec<FilterPB>,
#[pb(index = 3)]
pub delete_filters: Vec<FilterPB>,
#[pb(index = 3)]
pub delete_filters: Vec<FilterPB>,
#[pb(index = 4)]
pub update_filters: Vec<UpdatedFilter>,
#[pb(index = 4)]
pub update_filters: Vec<UpdatedFilter>,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct UpdatedFilter {
#[pb(index = 1)]
pub filter_id: String,
#[pb(index = 1)]
pub filter_id: String,
#[pb(index = 2, one_of)]
pub filter: Option<FilterPB>,
#[pb(index = 2, one_of)]
pub filter: Option<FilterPB>,
}
impl FilterChangesetNotificationPB {
pub fn from_insert(view_id: &str, filters: Vec<FilterPB>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: filters,
delete_filters: Default::default(),
update_filters: Default::default(),
}
pub fn from_insert(view_id: &str, filters: Vec<FilterPB>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: filters,
delete_filters: Default::default(),
update_filters: Default::default(),
}
pub fn from_delete(view_id: &str, filters: Vec<FilterPB>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: Default::default(),
delete_filters: filters,
update_filters: Default::default(),
}
}
pub fn from_delete(view_id: &str, filters: Vec<FilterPB>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: Default::default(),
delete_filters: filters,
update_filters: Default::default(),
}
}
pub fn from_update(view_id: &str, filters: Vec<UpdatedFilter>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: Default::default(),
delete_filters: Default::default(),
update_filters: filters,
}
pub fn from_update(view_id: &str, filters: Vec<UpdatedFilter>) -> Self {
Self {
view_id: view_id.to_string(),
insert_filters: Default::default(),
delete_filters: Default::default(),
update_filters: filters,
}
}
}

View File

@ -5,72 +5,73 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct NumberFilterPB {
#[pb(index = 1)]
pub condition: NumberFilterConditionPB,
#[pb(index = 1)]
pub condition: NumberFilterConditionPB,
#[pb(index = 2)]
pub content: String,
#[pb(index = 2)]
pub content: String,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum NumberFilterConditionPB {
Equal = 0,
NotEqual = 1,
GreaterThan = 2,
LessThan = 3,
GreaterThanOrEqualTo = 4,
LessThanOrEqualTo = 5,
NumberIsEmpty = 6,
NumberIsNotEmpty = 7,
Equal = 0,
NotEqual = 1,
GreaterThan = 2,
LessThan = 3,
GreaterThanOrEqualTo = 4,
LessThanOrEqualTo = 5,
NumberIsEmpty = 6,
NumberIsNotEmpty = 7,
}
impl std::default::Default for NumberFilterConditionPB {
fn default() -> Self {
NumberFilterConditionPB::Equal
}
fn default() -> Self {
NumberFilterConditionPB::Equal
}
}
impl std::convert::From<NumberFilterConditionPB> for u32 {
fn from(value: NumberFilterConditionPB) -> Self {
value as u32
}
fn from(value: NumberFilterConditionPB) -> Self {
value as u32
}
}
impl std::convert::TryFrom<u8> for NumberFilterConditionPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_from(n: u8) -> Result<Self, Self::Error> {
match n {
0 => Ok(NumberFilterConditionPB::Equal),
1 => Ok(NumberFilterConditionPB::NotEqual),
2 => Ok(NumberFilterConditionPB::GreaterThan),
3 => Ok(NumberFilterConditionPB::LessThan),
4 => Ok(NumberFilterConditionPB::GreaterThanOrEqualTo),
5 => Ok(NumberFilterConditionPB::LessThanOrEqualTo),
6 => Ok(NumberFilterConditionPB::NumberIsEmpty),
7 => Ok(NumberFilterConditionPB::NumberIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
fn try_from(n: u8) -> Result<Self, Self::Error> {
match n {
0 => Ok(NumberFilterConditionPB::Equal),
1 => Ok(NumberFilterConditionPB::NotEqual),
2 => Ok(NumberFilterConditionPB::GreaterThan),
3 => Ok(NumberFilterConditionPB::LessThan),
4 => Ok(NumberFilterConditionPB::GreaterThanOrEqualTo),
5 => Ok(NumberFilterConditionPB::LessThanOrEqualTo),
6 => Ok(NumberFilterConditionPB::NumberIsEmpty),
7 => Ok(NumberFilterConditionPB::NumberIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for NumberFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
NumberFilterPB {
condition: NumberFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(NumberFilterConditionPB::Equal),
content: filter_rev.content.clone(),
}
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
NumberFilterPB {
condition: NumberFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(NumberFilterConditionPB::Equal),
content: filter_rev.content.clone(),
}
}
}
impl std::convert::From<&FilterRevision> for NumberFilterPB {
fn from(rev: &FilterRevision) -> Self {
NumberFilterPB {
condition: NumberFilterConditionPB::try_from(rev.condition).unwrap_or(NumberFilterConditionPB::Equal),
content: rev.content.clone(),
}
fn from(rev: &FilterRevision) -> Self {
NumberFilterPB {
condition: NumberFilterConditionPB::try_from(rev.condition)
.unwrap_or(NumberFilterConditionPB::Equal),
content: rev.content.clone(),
}
}
}

View File

@ -6,67 +6,68 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SelectOptionFilterPB {
#[pb(index = 1)]
pub condition: SelectOptionConditionPB,
#[pb(index = 1)]
pub condition: SelectOptionConditionPB,
#[pb(index = 2)]
pub option_ids: Vec<String>,
#[pb(index = 2)]
pub option_ids: Vec<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum SelectOptionConditionPB {
OptionIs = 0,
OptionIsNot = 1,
OptionIsEmpty = 2,
OptionIsNotEmpty = 3,
OptionIs = 0,
OptionIsNot = 1,
OptionIsEmpty = 2,
OptionIsNotEmpty = 3,
}
impl std::convert::From<SelectOptionConditionPB> for u32 {
fn from(value: SelectOptionConditionPB) -> Self {
value as u32
}
fn from(value: SelectOptionConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for SelectOptionConditionPB {
fn default() -> Self {
SelectOptionConditionPB::OptionIs
}
fn default() -> Self {
SelectOptionConditionPB::OptionIs
}
}
impl std::convert::TryFrom<u8> for SelectOptionConditionPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(SelectOptionConditionPB::OptionIs),
1 => Ok(SelectOptionConditionPB::OptionIsNot),
2 => Ok(SelectOptionConditionPB::OptionIsEmpty),
3 => Ok(SelectOptionConditionPB::OptionIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(SelectOptionConditionPB::OptionIs),
1 => Ok(SelectOptionConditionPB::OptionIsNot),
2 => Ok(SelectOptionConditionPB::OptionIsEmpty),
3 => Ok(SelectOptionConditionPB::OptionIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for SelectOptionFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
let ids = SelectOptionIds::from(filter_rev.content.clone());
SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(filter_rev.condition)
.unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(),
}
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
let ids = SelectOptionIds::from(filter_rev.content.clone());
SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(filter_rev.condition)
.unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(),
}
}
}
impl std::convert::From<&FilterRevision> for SelectOptionFilterPB {
fn from(rev: &FilterRevision) -> Self {
let ids = SelectOptionIds::from(rev.content.clone());
SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(rev.condition).unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(),
}
fn from(rev: &FilterRevision) -> Self {
let ids = SelectOptionIds::from(rev.content.clone());
SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(rev.condition)
.unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(),
}
}
}

View File

@ -5,73 +5,75 @@ use grid_model::FilterRevision;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct TextFilterPB {
#[pb(index = 1)]
pub condition: TextFilterConditionPB,
#[pb(index = 1)]
pub condition: TextFilterConditionPB,
#[pb(index = 2)]
pub content: String,
#[pb(index = 2)]
pub content: String,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum TextFilterConditionPB {
Is = 0,
IsNot = 1,
Contains = 2,
DoesNotContain = 3,
StartsWith = 4,
EndsWith = 5,
TextIsEmpty = 6,
TextIsNotEmpty = 7,
Is = 0,
IsNot = 1,
Contains = 2,
DoesNotContain = 3,
StartsWith = 4,
EndsWith = 5,
TextIsEmpty = 6,
TextIsNotEmpty = 7,
}
impl std::convert::From<TextFilterConditionPB> for u32 {
fn from(value: TextFilterConditionPB) -> Self {
value as u32
}
fn from(value: TextFilterConditionPB) -> Self {
value as u32
}
}
impl std::default::Default for TextFilterConditionPB {
fn default() -> Self {
TextFilterConditionPB::Is
}
fn default() -> Self {
TextFilterConditionPB::Is
}
}
impl std::convert::TryFrom<u8> for TextFilterConditionPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(TextFilterConditionPB::Is),
1 => Ok(TextFilterConditionPB::IsNot),
2 => Ok(TextFilterConditionPB::Contains),
3 => Ok(TextFilterConditionPB::DoesNotContain),
4 => Ok(TextFilterConditionPB::StartsWith),
5 => Ok(TextFilterConditionPB::EndsWith),
6 => Ok(TextFilterConditionPB::TextIsEmpty),
7 => Ok(TextFilterConditionPB::TextIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(TextFilterConditionPB::Is),
1 => Ok(TextFilterConditionPB::IsNot),
2 => Ok(TextFilterConditionPB::Contains),
3 => Ok(TextFilterConditionPB::DoesNotContain),
4 => Ok(TextFilterConditionPB::StartsWith),
5 => Ok(TextFilterConditionPB::EndsWith),
6 => Ok(TextFilterConditionPB::TextIsEmpty),
7 => Ok(TextFilterConditionPB::TextIsNotEmpty),
_ => Err(ErrorCode::InvalidData),
}
}
}
impl FromFilterString for TextFilterPB {
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
TextFilterPB {
condition: TextFilterConditionPB::try_from(filter_rev.condition).unwrap_or(TextFilterConditionPB::Is),
content: filter_rev.content.clone(),
}
fn from_filter_rev(filter_rev: &FilterRevision) -> Self
where
Self: Sized,
{
TextFilterPB {
condition: TextFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(TextFilterConditionPB::Is),
content: filter_rev.content.clone(),
}
}
}
impl std::convert::From<&FilterRevision> for TextFilterPB {
fn from(rev: &FilterRevision) -> Self {
TextFilterPB {
condition: TextFilterConditionPB::try_from(rev.condition).unwrap_or(TextFilterConditionPB::Is),
content: rev.content.clone(),
}
fn from(rev: &FilterRevision) -> Self {
TextFilterPB {
condition: TextFilterConditionPB::try_from(rev.condition)
.unwrap_or(TextFilterConditionPB::Is),
content: rev.content.clone(),
}
}
}

View File

@ -1,7 +1,7 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{
CheckboxFilterPB, ChecklistFilterPB, DateFilterContentPB, DateFilterPB, FieldType, NumberFilterPB,
SelectOptionFilterPB, TextFilterPB,
CheckboxFilterPB, ChecklistFilterPB, DateFilterContentPB, DateFilterPB, FieldType,
NumberFilterPB, SelectOptionFilterPB, TextFilterPB,
};
use crate::services::field::SelectOptionIds;
use crate::services::filter::FilterType;
@ -14,217 +14,221 @@ use std::sync::Arc;
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct FilterPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub data: Vec<u8>,
#[pb(index = 4)]
pub data: Vec<u8>,
}
impl std::convert::From<&FilterRevision> for FilterPB {
fn from(rev: &FilterRevision) -> Self {
let field_type: FieldType = rev.field_type.into();
let bytes: Bytes = match field_type {
FieldType::RichText => TextFilterPB::from(rev).try_into().unwrap(),
FieldType::Number => NumberFilterPB::from(rev).try_into().unwrap(),
FieldType::DateTime => DateFilterPB::from(rev).try_into().unwrap(),
FieldType::SingleSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::MultiSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::Checklist => ChecklistFilterPB::from(rev).try_into().unwrap(),
FieldType::Checkbox => CheckboxFilterPB::from(rev).try_into().unwrap(),
FieldType::URL => TextFilterPB::from(rev).try_into().unwrap(),
};
Self {
id: rev.id.clone(),
field_id: rev.field_id.clone(),
field_type: rev.field_type.into(),
data: bytes.to_vec(),
}
fn from(rev: &FilterRevision) -> Self {
let field_type: FieldType = rev.field_type.into();
let bytes: Bytes = match field_type {
FieldType::RichText => TextFilterPB::from(rev).try_into().unwrap(),
FieldType::Number => NumberFilterPB::from(rev).try_into().unwrap(),
FieldType::DateTime => DateFilterPB::from(rev).try_into().unwrap(),
FieldType::SingleSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::MultiSelect => SelectOptionFilterPB::from(rev).try_into().unwrap(),
FieldType::Checklist => ChecklistFilterPB::from(rev).try_into().unwrap(),
FieldType::Checkbox => CheckboxFilterPB::from(rev).try_into().unwrap(),
FieldType::URL => TextFilterPB::from(rev).try_into().unwrap(),
};
Self {
id: rev.id.clone(),
field_id: rev.field_id.clone(),
field_type: rev.field_type.into(),
data: bytes.to_vec(),
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedFilterPB {
#[pb(index = 1)]
pub items: Vec<FilterPB>,
#[pb(index = 1)]
pub items: Vec<FilterPB>,
}
impl std::convert::From<Vec<Arc<FilterRevision>>> for RepeatedFilterPB {
fn from(revs: Vec<Arc<FilterRevision>>) -> Self {
RepeatedFilterPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
fn from(revs: Vec<Arc<FilterRevision>>) -> Self {
RepeatedFilterPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
}
}
impl std::convert::From<Vec<FilterPB>> for RepeatedFilterPB {
fn from(items: Vec<FilterPB>) -> Self {
Self { items }
}
fn from(items: Vec<FilterPB>) -> Self {
Self { items }
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteFilterPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub field_type: FieldType,
#[pb(index = 2)]
pub field_type: FieldType,
#[pb(index = 3)]
pub filter_id: String,
#[pb(index = 3)]
pub filter_id: String,
#[pb(index = 4)]
pub view_id: String,
#[pb(index = 4)]
pub view_id: String,
}
impl TryInto<DeleteFilterParams> for DeleteFilterPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<DeleteFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
fn try_into(self) -> Result<DeleteFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let filter_id = NotEmptyStr::parse(self.filter_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0;
let filter_id = NotEmptyStr::parse(self.filter_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0;
let filter_type = FilterType {
field_id,
field_type: self.field_type,
};
let filter_type = FilterType {
field_id,
field_type: self.field_type,
};
Ok(DeleteFilterParams {
view_id,
filter_id,
filter_type,
})
}
Ok(DeleteFilterParams {
view_id,
filter_id,
filter_type,
})
}
}
#[derive(Debug)]
pub struct DeleteFilterParams {
pub view_id: String,
pub filter_type: FilterType,
pub filter_id: String,
pub view_id: String,
pub filter_type: FilterType,
pub filter_id: String,
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct AlterFilterPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub field_type: FieldType,
#[pb(index = 2)]
pub field_type: FieldType,
/// Create a new filter if the filter_id is None
#[pb(index = 3, one_of)]
pub filter_id: Option<String>,
/// Create a new filter if the filter_id is None
#[pb(index = 3, one_of)]
pub filter_id: Option<String>,
#[pb(index = 4)]
pub data: Vec<u8>,
#[pb(index = 4)]
pub data: Vec<u8>,
#[pb(index = 5)]
pub view_id: String,
#[pb(index = 5)]
pub view_id: String,
}
impl AlterFilterPayloadPB {
#[allow(dead_code)]
pub fn new<T: TryInto<Bytes, Error = ::protobuf::ProtobufError>>(
view_id: &str,
field_rev: &FieldRevision,
data: T,
) -> Self {
let data = data.try_into().unwrap_or_else(|_| Bytes::new());
Self {
view_id: view_id.to_owned(),
field_id: field_rev.id.clone(),
field_type: field_rev.ty.into(),
filter_id: None,
data: data.to_vec(),
}
#[allow(dead_code)]
pub fn new<T: TryInto<Bytes, Error = ::protobuf::ProtobufError>>(
view_id: &str,
field_rev: &FieldRevision,
data: T,
) -> Self {
let data = data.try_into().unwrap_or_else(|_| Bytes::new());
Self {
view_id: view_id.to_owned(),
field_id: field_rev.id.clone(),
field_type: field_rev.ty.into(),
filter_id: None,
data: data.to_vec(),
}
}
}
impl TryInto<AlterFilterParams> for AlterFilterPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<AlterFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
fn try_into(self) -> Result<AlterFilterParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let filter_id = match self.filter_id {
None => None,
Some(filter_id) => Some(NotEmptyStr::parse(filter_id).map_err(|_| ErrorCode::FilterIdIsEmpty)?.0),
};
let condition;
let mut content = "".to_string();
let bytes: &[u8] = self.data.as_ref();
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let filter_id = match self.filter_id {
None => None,
Some(filter_id) => Some(
NotEmptyStr::parse(filter_id)
.map_err(|_| ErrorCode::FilterIdIsEmpty)?
.0,
),
};
let condition;
let mut content = "".to_string();
let bytes: &[u8] = self.data.as_ref();
match self.field_type {
FieldType::RichText | FieldType::URL => {
let filter = TextFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
}
FieldType::Checkbox => {
let filter = CheckboxFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
}
FieldType::Number => {
let filter = NumberFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
}
FieldType::DateTime => {
let filter = DateFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = DateFilterContentPB {
start: filter.start,
end: filter.end,
timestamp: filter.timestamp,
}
.to_string();
}
FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => {
let filter = SelectOptionFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = SelectOptionIds::from(filter.option_ids).to_string();
}
match self.field_type {
FieldType::RichText | FieldType::URL => {
let filter = TextFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
},
FieldType::Checkbox => {
let filter = CheckboxFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
},
FieldType::Number => {
let filter = NumberFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
},
FieldType::DateTime => {
let filter = DateFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = DateFilterContentPB {
start: filter.start,
end: filter.end,
timestamp: filter.timestamp,
}
Ok(AlterFilterParams {
view_id,
field_id,
filter_id,
field_type: self.field_type.into(),
condition,
content,
})
.to_string();
},
FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => {
let filter = SelectOptionFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = SelectOptionIds::from(filter.option_ids).to_string();
},
}
Ok(AlterFilterParams {
view_id,
field_id,
filter_id,
field_type: self.field_type.into(),
condition,
content,
})
}
}
#[derive(Debug)]
pub struct AlterFilterParams {
pub view_id: String,
pub field_id: String,
/// Create a new filter if the filter_id is None
pub filter_id: Option<String>,
pub field_type: FieldTypeRevision,
pub condition: u8,
pub content: String,
pub view_id: String,
pub field_id: String,
/// Create a new filter if the filter_id is None
pub filter_id: Option<String>,
pub field_type: FieldTypeRevision,
pub condition: u8,
pub content: String,
}

View File

@ -6,145 +6,150 @@ use flowy_error::ErrorCode;
/// [DatabasePB] describes how many fields and blocks the grid has
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct DatabasePB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub fields: Vec<FieldIdPB>,
#[pb(index = 2)]
pub fields: Vec<FieldIdPB>,
#[pb(index = 3)]
pub rows: Vec<RowPB>,
#[pb(index = 3)]
pub rows: Vec<RowPB>,
}
#[derive(ProtoBuf, Default)]
pub struct CreateDatabasePayloadPB {
#[pb(index = 1)]
pub name: String,
#[pb(index = 1)]
pub name: String,
}
#[derive(Clone, ProtoBuf, Default, Debug)]
pub struct DatabaseIdPB {
#[pb(index = 1)]
pub value: String,
#[pb(index = 1)]
pub value: String,
}
impl AsRef<str> for DatabaseIdPB {
fn as_ref(&self) -> &str {
&self.value
}
fn as_ref(&self) -> &str {
&self.value
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveFieldPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub from_index: i32,
#[pb(index = 3)]
pub from_index: i32,
#[pb(index = 4)]
pub to_index: i32,
#[pb(index = 4)]
pub to_index: i32,
}
#[derive(Clone)]
pub struct MoveFieldParams {
pub view_id: String,
pub field_id: String,
pub from_index: i32,
pub to_index: i32,
pub view_id: String,
pub field_id: String,
pub from_index: i32,
pub to_index: i32,
}
impl TryInto<MoveFieldParams> for MoveFieldPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<MoveFieldParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let item_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::InvalidData)?;
Ok(MoveFieldParams {
view_id: view_id.0,
field_id: item_id.0,
from_index: self.from_index,
to_index: self.to_index,
})
}
fn try_into(self) -> Result<MoveFieldParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let item_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::InvalidData)?;
Ok(MoveFieldParams {
view_id: view_id.0,
field_id: item_id.0,
from_index: self.from_index,
to_index: self.to_index,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveRowPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub from_row_id: String,
#[pb(index = 2)]
pub from_row_id: String,
#[pb(index = 4)]
pub to_row_id: String,
#[pb(index = 4)]
pub to_row_id: String,
}
pub struct MoveRowParams {
pub view_id: String,
pub from_row_id: String,
pub to_row_id: String,
pub view_id: String,
pub from_row_id: String,
pub to_row_id: String,
}
impl TryInto<MoveRowParams> for MoveRowPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<MoveRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_row_id = NotEmptyStr::parse(self.to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
fn try_into(self) -> Result<MoveRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_row_id = NotEmptyStr::parse(self.to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(MoveRowParams {
view_id: view_id.0,
from_row_id: from_row_id.0,
to_row_id: to_row_id.0,
})
}
Ok(MoveRowParams {
view_id: view_id.0,
from_row_id: from_row_id.0,
to_row_id: to_row_id.0,
})
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct MoveGroupRowPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub from_row_id: String,
#[pb(index = 2)]
pub from_row_id: String,
#[pb(index = 3)]
pub to_group_id: String,
#[pb(index = 3)]
pub to_group_id: String,
#[pb(index = 4, one_of)]
pub to_row_id: Option<String>,
#[pb(index = 4, one_of)]
pub to_row_id: Option<String>,
}
pub struct MoveGroupRowParams {
pub view_id: String,
pub from_row_id: String,
pub to_group_id: String,
pub to_row_id: Option<String>,
pub view_id: String,
pub from_row_id: String,
pub to_group_id: String,
pub to_row_id: Option<String>,
}
impl TryInto<MoveGroupRowParams> for MoveGroupRowPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<MoveGroupRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_group_id = NotEmptyStr::parse(self.to_group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
fn try_into(self) -> Result<MoveGroupRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_group_id =
NotEmptyStr::parse(self.to_group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let to_row_id = match self.to_row_id {
None => None,
Some(to_row_id) => Some(NotEmptyStr::parse(to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?.0),
};
let to_row_id = match self.to_row_id {
None => None,
Some(to_row_id) => Some(
NotEmptyStr::parse(to_row_id)
.map_err(|_| ErrorCode::RowIdIsEmpty)?
.0,
),
};
Ok(MoveGroupRowParams {
view_id: view_id.0,
from_row_id: from_row_id.0,
to_group_id: to_group_id.0,
to_row_id,
})
}
Ok(MoveGroupRowParams {
view_id: view_id.0,
from_row_id: from_row_id.0,
to_group_id: to_group_id.0,
to_row_id,
})
}
}

View File

@ -3,81 +3,83 @@ use grid_model::{GroupRevision, SelectOptionGroupConfigurationRevision};
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct UrlGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
#[pb(index = 1)]
hide_empty: bool,
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct TextGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
#[pb(index = 1)]
hide_empty: bool,
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SelectOptionGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
#[pb(index = 1)]
hide_empty: bool,
}
impl std::convert::From<SelectOptionGroupConfigurationRevision> for SelectOptionGroupConfigurationPB {
fn from(rev: SelectOptionGroupConfigurationRevision) -> Self {
Self {
hide_empty: rev.hide_empty,
}
impl std::convert::From<SelectOptionGroupConfigurationRevision>
for SelectOptionGroupConfigurationPB
{
fn from(rev: SelectOptionGroupConfigurationRevision) -> Self {
Self {
hide_empty: rev.hide_empty,
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct GroupRecordPB {
#[pb(index = 1)]
group_id: String,
#[pb(index = 1)]
group_id: String,
#[pb(index = 2)]
visible: bool,
#[pb(index = 2)]
visible: bool,
}
impl std::convert::From<GroupRevision> for GroupRecordPB {
fn from(rev: GroupRevision) -> Self {
Self {
group_id: rev.id,
visible: rev.visible,
}
fn from(rev: GroupRevision) -> Self {
Self {
group_id: rev.id,
visible: rev.visible,
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct NumberGroupConfigurationPB {
#[pb(index = 1)]
hide_empty: bool,
#[pb(index = 1)]
hide_empty: bool,
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DateGroupConfigurationPB {
#[pb(index = 1)]
pub condition: DateCondition,
#[pb(index = 1)]
pub condition: DateCondition,
#[pb(index = 2)]
hide_empty: bool,
#[pb(index = 2)]
hide_empty: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum DateCondition {
Relative = 0,
Day = 1,
Week = 2,
Month = 3,
Year = 4,
Relative = 0,
Day = 1,
Week = 2,
Month = 3,
Year = 4,
}
impl std::default::Default for DateCondition {
fn default() -> Self {
DateCondition::Relative
}
fn default() -> Self {
DateCondition::Relative
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct CheckboxGroupConfigurationPB {
#[pb(index = 1)]
pub(crate) hide_empty: bool,
#[pb(index = 1)]
pub(crate) hide_empty: bool,
}

View File

@ -9,188 +9,193 @@ use std::sync::Arc;
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct CreateBoardCardPayloadPB {
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 3, one_of)]
pub start_row_id: Option<String>,
#[pb(index = 3, one_of)]
pub start_row_id: Option<String>,
}
impl TryInto<CreateRowParams> for CreateBoardCardPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let group_id = NotEmptyStr::parse(self.group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let start_row_id = match self.start_row_id {
None => None,
Some(start_row_id) => Some(NotEmptyStr::parse(start_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?.0),
};
Ok(CreateRowParams {
database_id: database_id.0,
start_row_id,
group_id: Some(group_id.0),
layout: LayoutTypePB::Board,
})
}
fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let group_id = NotEmptyStr::parse(self.group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let start_row_id = match self.start_row_id {
None => None,
Some(start_row_id) => Some(
NotEmptyStr::parse(start_row_id)
.map_err(|_| ErrorCode::RowIdIsEmpty)?
.0,
),
};
Ok(CreateRowParams {
database_id: database_id.0,
start_row_id,
group_id: Some(group_id.0),
layout: LayoutTypePB::Board,
})
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct GroupConfigurationPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 2)]
pub field_id: String,
}
impl std::convert::From<&GroupConfigurationRevision> for GroupConfigurationPB {
fn from(rev: &GroupConfigurationRevision) -> Self {
GroupConfigurationPB {
id: rev.id.clone(),
field_id: rev.field_id.clone(),
}
fn from(rev: &GroupConfigurationRevision) -> Self {
GroupConfigurationPB {
id: rev.id.clone(),
field_id: rev.field_id.clone(),
}
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedGroupPB {
#[pb(index = 1)]
pub items: Vec<GroupPB>,
#[pb(index = 1)]
pub items: Vec<GroupPB>,
}
impl std::ops::Deref for RepeatedGroupPB {
type Target = Vec<GroupPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
type Target = Vec<GroupPB>;
fn deref(&self) -> &Self::Target {
&self.items
}
}
impl std::ops::DerefMut for RepeatedGroupPB {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.items
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct GroupPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 3)]
pub desc: String,
#[pb(index = 4)]
pub rows: Vec<RowPB>,
#[pb(index = 4)]
pub rows: Vec<RowPB>,
#[pb(index = 5)]
pub is_default: bool,
#[pb(index = 5)]
pub is_default: bool,
#[pb(index = 6)]
pub is_visible: bool,
#[pb(index = 6)]
pub is_visible: bool,
}
impl std::convert::From<Group> for GroupPB {
fn from(group: Group) -> Self {
Self {
field_id: group.field_id,
group_id: group.id,
desc: group.name,
rows: group.rows,
is_default: group.is_default,
is_visible: group.is_visible,
}
fn from(group: Group) -> Self {
Self {
field_id: group.field_id,
group_id: group.id,
desc: group.name,
rows: group.rows,
is_default: group.is_default,
is_visible: group.is_visible,
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedGroupConfigurationPB {
#[pb(index = 1)]
pub items: Vec<GroupConfigurationPB>,
#[pb(index = 1)]
pub items: Vec<GroupConfigurationPB>,
}
impl std::convert::From<Vec<GroupConfigurationPB>> for RepeatedGroupConfigurationPB {
fn from(items: Vec<GroupConfigurationPB>) -> Self {
Self { items }
}
fn from(items: Vec<GroupConfigurationPB>) -> Self {
Self { items }
}
}
impl std::convert::From<Vec<Arc<GroupConfigurationRevision>>> for RepeatedGroupConfigurationPB {
fn from(revs: Vec<Arc<GroupConfigurationRevision>>) -> Self {
RepeatedGroupConfigurationPB {
items: revs.iter().map(|rev| rev.as_ref().into()).collect(),
}
fn from(revs: Vec<Arc<GroupConfigurationRevision>>) -> Self {
RepeatedGroupConfigurationPB {
items: revs.iter().map(|rev| rev.as_ref().into()).collect(),
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct InsertGroupPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub field_type: FieldType,
#[pb(index = 2)]
pub field_type: FieldType,
}
impl TryInto<InsertGroupParams> for InsertGroupPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<InsertGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
fn try_into(self) -> Result<InsertGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
Ok(InsertGroupParams {
field_id,
field_type_rev: self.field_type.into(),
})
}
Ok(InsertGroupParams {
field_id,
field_type_rev: self.field_type.into(),
})
}
}
pub struct InsertGroupParams {
pub field_id: String,
pub field_type_rev: FieldTypeRevision,
pub field_id: String,
pub field_type_rev: FieldTypeRevision,
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteGroupPayloadPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 2)]
pub group_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 3)]
pub field_type: FieldType,
}
impl TryInto<DeleteGroupParams> for DeleteGroupPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<DeleteGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let group_id = NotEmptyStr::parse(self.group_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
fn try_into(self) -> Result<DeleteGroupParams, Self::Error> {
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let group_id = NotEmptyStr::parse(self.group_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
Ok(DeleteGroupParams {
field_id,
field_type_rev: self.field_type.into(),
group_id,
})
}
Ok(DeleteGroupParams {
field_id,
field_type_rev: self.field_type.into(),
group_id,
})
}
}
pub struct DeleteGroupParams {
pub field_id: String,
pub group_id: String,
pub field_type_rev: FieldTypeRevision,
pub field_id: String,
pub group_id: String,
pub field_type_rev: FieldTypeRevision,
}

View File

@ -6,158 +6,158 @@ use std::fmt::Formatter;
#[derive(Debug, Default, ProtoBuf)]
pub struct GroupRowsNotificationPB {
#[pb(index = 1)]
pub group_id: String,
#[pb(index = 1)]
pub group_id: String,
#[pb(index = 2, one_of)]
pub group_name: Option<String>,
#[pb(index = 2, one_of)]
pub group_name: Option<String>,
#[pb(index = 3)]
pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 3)]
pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 4)]
pub deleted_rows: Vec<String>,
#[pb(index = 4)]
pub deleted_rows: Vec<String>,
#[pb(index = 5)]
pub updated_rows: Vec<RowPB>,
#[pb(index = 5)]
pub updated_rows: Vec<RowPB>,
}
impl std::fmt::Display for GroupRowsNotificationPB {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
for inserted_row in &self.inserted_rows {
f.write_fmt(format_args!(
"Insert: {} row at {:?}",
inserted_row.row.id, inserted_row.index
))?;
}
for deleted_row in &self.deleted_rows {
f.write_fmt(format_args!("Delete: {} row", deleted_row))?;
}
Ok(())
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
for inserted_row in &self.inserted_rows {
f.write_fmt(format_args!(
"Insert: {} row at {:?}",
inserted_row.row.id, inserted_row.index
))?;
}
for deleted_row in &self.deleted_rows {
f.write_fmt(format_args!("Delete: {} row", deleted_row))?;
}
Ok(())
}
}
impl GroupRowsNotificationPB {
pub fn is_empty(&self) -> bool {
self.group_name.is_none()
&& self.inserted_rows.is_empty()
&& self.deleted_rows.is_empty()
&& self.updated_rows.is_empty()
}
pub fn is_empty(&self) -> bool {
self.group_name.is_none()
&& self.inserted_rows.is_empty()
&& self.deleted_rows.is_empty()
&& self.updated_rows.is_empty()
}
pub fn new(group_id: String) -> Self {
Self {
group_id,
..Default::default()
}
pub fn new(group_id: String) -> Self {
Self {
group_id,
..Default::default()
}
}
pub fn name(group_id: String, name: &str) -> Self {
Self {
group_id,
group_name: Some(name.to_owned()),
..Default::default()
}
pub fn name(group_id: String, name: &str) -> Self {
Self {
group_id,
group_name: Some(name.to_owned()),
..Default::default()
}
}
pub fn insert(group_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self {
group_id,
inserted_rows,
..Default::default()
}
pub fn insert(group_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self {
group_id,
inserted_rows,
..Default::default()
}
}
pub fn delete(group_id: String, deleted_rows: Vec<String>) -> Self {
Self {
group_id,
deleted_rows,
..Default::default()
}
pub fn delete(group_id: String, deleted_rows: Vec<String>) -> Self {
Self {
group_id,
deleted_rows,
..Default::default()
}
}
pub fn update(group_id: String, updated_rows: Vec<RowPB>) -> Self {
Self {
group_id,
updated_rows,
..Default::default()
}
pub fn update(group_id: String, updated_rows: Vec<RowPB>) -> Self {
Self {
group_id,
updated_rows,
..Default::default()
}
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct MoveGroupPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub from_group_id: String,
#[pb(index = 2)]
pub from_group_id: String,
#[pb(index = 3)]
pub to_group_id: String,
#[pb(index = 3)]
pub to_group_id: String,
}
#[derive(Debug)]
pub struct MoveGroupParams {
pub view_id: String,
pub from_group_id: String,
pub to_group_id: String,
pub view_id: String,
pub from_group_id: String,
pub to_group_id: String,
}
impl TryInto<MoveGroupParams> for MoveGroupPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<MoveGroupParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let from_group_id = NotEmptyStr::parse(self.from_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0;
let to_group_id = NotEmptyStr::parse(self.to_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0;
Ok(MoveGroupParams {
view_id,
from_group_id,
to_group_id,
})
}
fn try_into(self) -> Result<MoveGroupParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let from_group_id = NotEmptyStr::parse(self.from_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0;
let to_group_id = NotEmptyStr::parse(self.to_group_id)
.map_err(|_| ErrorCode::GroupIdIsEmpty)?
.0;
Ok(MoveGroupParams {
view_id,
from_group_id,
to_group_id,
})
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct GroupChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub inserted_groups: Vec<InsertedGroupPB>,
#[pb(index = 2)]
pub inserted_groups: Vec<InsertedGroupPB>,
#[pb(index = 3)]
pub initial_groups: Vec<GroupPB>,
#[pb(index = 3)]
pub initial_groups: Vec<GroupPB>,
#[pb(index = 4)]
pub deleted_groups: Vec<String>,
#[pb(index = 4)]
pub deleted_groups: Vec<String>,
#[pb(index = 5)]
pub update_groups: Vec<GroupPB>,
#[pb(index = 5)]
pub update_groups: Vec<GroupPB>,
}
impl GroupChangesetPB {
pub fn is_empty(&self) -> bool {
self.initial_groups.is_empty()
&& self.inserted_groups.is_empty()
&& self.deleted_groups.is_empty()
&& self.update_groups.is_empty()
}
pub fn is_empty(&self) -> bool {
self.initial_groups.is_empty()
&& self.inserted_groups.is_empty()
&& self.deleted_groups.is_empty()
&& self.update_groups.is_empty()
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct InsertedGroupPB {
#[pb(index = 1)]
pub group: GroupPB,
#[pb(index = 1)]
pub group: GroupPB,
#[pb(index = 2)]
pub index: i32,
#[pb(index = 2)]
pub index: i32,
}

View File

@ -2,16 +2,16 @@
pub struct NotEmptyStr(pub String);
impl NotEmptyStr {
pub fn parse(s: String) -> Result<Self, String> {
if s.trim().is_empty() {
return Err("Input string is empty".to_owned());
}
Ok(Self(s))
pub fn parse(s: String) -> Result<Self, String> {
if s.trim().is_empty() {
return Err("Input string is empty".to_owned());
}
Ok(Self(s))
}
}
impl AsRef<str> for NotEmptyStr {
fn as_ref(&self) -> &str {
&self.0
}
fn as_ref(&self) -> &str {
&self.0
}
}

View File

@ -8,196 +8,198 @@ use std::sync::Arc;
/// [RowPB] Describes a row. Has the id of the parent Block. Has the metadata of the row.
#[derive(Debug, Default, Clone, ProtoBuf, Eq, PartialEq)]
pub struct RowPB {
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 2)]
pub id: String,
#[pb(index = 2)]
pub id: String,
#[pb(index = 3)]
pub height: i32,
#[pb(index = 3)]
pub height: i32,
}
impl RowPB {
pub fn row_id(&self) -> &str {
&self.id
}
pub fn row_id(&self) -> &str {
&self.id
}
pub fn block_id(&self) -> &str {
&self.block_id
}
pub fn block_id(&self) -> &str {
&self.block_id
}
}
impl std::convert::From<&RowRevision> for RowPB {
fn from(rev: &RowRevision) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
fn from(rev: &RowRevision) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
}
}
impl std::convert::From<&mut RowRevision> for RowPB {
fn from(rev: &mut RowRevision) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
fn from(rev: &mut RowRevision) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
}
}
impl std::convert::From<&Arc<RowRevision>> for RowPB {
fn from(rev: &Arc<RowRevision>) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
fn from(rev: &Arc<RowRevision>) -> Self {
Self {
block_id: rev.block_id.clone(),
id: rev.id.clone(),
height: rev.height,
}
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct OptionalRowPB {
#[pb(index = 1, one_of)]
pub row: Option<RowPB>,
#[pb(index = 1, one_of)]
pub row: Option<RowPB>,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct RepeatedRowPB {
#[pb(index = 1)]
pub items: Vec<RowPB>,
#[pb(index = 1)]
pub items: Vec<RowPB>,
}
impl std::convert::From<Vec<RowPB>> for RepeatedRowPB {
fn from(items: Vec<RowPB>) -> Self {
Self { items }
}
fn from(items: Vec<RowPB>) -> Self {
Self { items }
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct InsertedRowPB {
#[pb(index = 1)]
pub row: RowPB,
#[pb(index = 1)]
pub row: RowPB,
#[pb(index = 2, one_of)]
pub index: Option<i32>,
#[pb(index = 2, one_of)]
pub index: Option<i32>,
#[pb(index = 3)]
pub is_new: bool,
#[pb(index = 3)]
pub is_new: bool,
}
impl InsertedRowPB {
pub fn new(row: RowPB) -> Self {
Self {
row,
index: None,
is_new: false,
}
pub fn new(row: RowPB) -> Self {
Self {
row,
index: None,
is_new: false,
}
}
pub fn with_index(row: RowPB, index: i32) -> Self {
Self {
row,
index: Some(index),
is_new: false,
}
pub fn with_index(row: RowPB, index: i32) -> Self {
Self {
row,
index: Some(index),
is_new: false,
}
}
}
impl std::convert::From<RowPB> for InsertedRowPB {
fn from(row: RowPB) -> Self {
Self {
row,
index: None,
is_new: false,
}
fn from(row: RowPB) -> Self {
Self {
row,
index: None,
is_new: false,
}
}
}
impl std::convert::From<&RowRevision> for InsertedRowPB {
fn from(row: &RowRevision) -> Self {
let row_order = RowPB::from(row);
Self::from(row_order)
}
fn from(row: &RowRevision) -> Self {
let row_order = RowPB::from(row);
Self::from(row_order)
}
}
#[derive(Debug, Clone, Default, ProtoBuf)]
pub struct UpdatedRowPB {
#[pb(index = 1)]
pub row: RowPB,
#[pb(index = 1)]
pub row: RowPB,
// represents as the cells that were updated in this row.
#[pb(index = 2)]
pub field_ids: Vec<String>,
// represents as the cells that were updated in this row.
#[pb(index = 2)]
pub field_ids: Vec<String>,
}
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct RowIdPB {
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 2)]
pub row_id: String,
#[pb(index = 2)]
pub row_id: String,
}
pub struct RowIdParams {
pub database_id: String,
pub row_id: String,
pub database_id: String,
pub row_id: String,
}
impl TryInto<RowIdParams> for RowIdPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<RowIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
fn try_into(self) -> Result<RowIdParams, Self::Error> {
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(RowIdParams {
database_id: database_id.0,
row_id: row_id.0,
})
}
Ok(RowIdParams {
database_id: database_id.0,
row_id: row_id.0,
})
}
}
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct BlockRowIdPB {
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 1)]
pub block_id: String,
#[pb(index = 2)]
pub row_id: String,
#[pb(index = 2)]
pub row_id: String,
}
#[derive(ProtoBuf, Default)]
pub struct CreateRowPayloadPB {
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 2, one_of)]
pub start_row_id: Option<String>,
#[pb(index = 2, one_of)]
pub start_row_id: Option<String>,
}
#[derive(Default)]
pub struct CreateRowParams {
pub database_id: String,
pub start_row_id: Option<String>,
pub group_id: Option<String>,
pub layout: LayoutTypePB,
pub database_id: String,
pub start_row_id: Option<String>,
pub group_id: Option<String>,
pub layout: LayoutTypePB,
}
impl TryInto<CreateRowParams> for CreateRowPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
Ok(CreateRowParams {
database_id: database_id.0,
start_row_id: self.start_row_id,
group_id: None,
layout: LayoutTypePB::Grid,
})
}
Ok(CreateRowParams {
database_id: database_id.0,
start_row_id: self.start_row_id,
group_id: None,
layout: LayoutTypePB::Grid,
})
}
}

View File

@ -1,8 +1,9 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{
AlterFilterParams, AlterFilterPayloadPB, AlterSortParams, AlterSortPayloadPB, DeleteFilterParams,
DeleteFilterPayloadPB, DeleteGroupParams, DeleteGroupPayloadPB, DeleteSortParams, DeleteSortPayloadPB,
InsertGroupParams, InsertGroupPayloadPB, RepeatedFilterPB, RepeatedGroupConfigurationPB, RepeatedSortPB,
AlterFilterParams, AlterFilterPayloadPB, AlterSortParams, AlterSortPayloadPB, DeleteFilterParams,
DeleteFilterPayloadPB, DeleteGroupParams, DeleteGroupPayloadPB, DeleteSortParams,
DeleteSortPayloadPB, InsertGroupParams, InsertGroupPayloadPB, RepeatedFilterPB,
RepeatedGroupConfigurationPB, RepeatedSortPB,
};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;
@ -14,164 +15,164 @@ use strum_macros::EnumIter;
/// [DatabaseViewSettingPB] defines the setting options for the grid. Such as the filter, group, and sort.
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct DatabaseViewSettingPB {
#[pb(index = 1)]
pub support_layouts: Vec<ViewLayoutPB>,
#[pb(index = 1)]
pub support_layouts: Vec<ViewLayoutPB>,
#[pb(index = 2)]
pub current_layout: LayoutTypePB,
#[pb(index = 2)]
pub current_layout: LayoutTypePB,
#[pb(index = 3)]
pub filters: RepeatedFilterPB,
#[pb(index = 3)]
pub filters: RepeatedFilterPB,
#[pb(index = 4)]
pub group_configurations: RepeatedGroupConfigurationPB,
#[pb(index = 4)]
pub group_configurations: RepeatedGroupConfigurationPB,
#[pb(index = 5)]
pub sorts: RepeatedSortPB,
#[pb(index = 5)]
pub sorts: RepeatedSortPB,
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct ViewLayoutPB {
#[pb(index = 1)]
ty: LayoutTypePB,
#[pb(index = 1)]
ty: LayoutTypePB,
}
impl ViewLayoutPB {
pub fn all() -> Vec<ViewLayoutPB> {
let mut layouts = vec![];
for layout_ty in LayoutTypePB::iter() {
layouts.push(ViewLayoutPB { ty: layout_ty })
}
layouts
pub fn all() -> Vec<ViewLayoutPB> {
let mut layouts = vec![];
for layout_ty in LayoutTypePB::iter() {
layouts.push(ViewLayoutPB { ty: layout_ty })
}
layouts
}
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum, EnumIter)]
#[repr(u8)]
pub enum LayoutTypePB {
Grid = 0,
Board = 1,
Calendar = 2,
Grid = 0,
Board = 1,
Calendar = 2,
}
impl std::default::Default for LayoutTypePB {
fn default() -> Self {
LayoutTypePB::Grid
}
fn default() -> Self {
LayoutTypePB::Grid
}
}
impl std::convert::From<LayoutRevision> for LayoutTypePB {
fn from(rev: LayoutRevision) -> Self {
match rev {
LayoutRevision::Grid => LayoutTypePB::Grid,
LayoutRevision::Board => LayoutTypePB::Board,
LayoutRevision::Calendar => LayoutTypePB::Calendar,
}
fn from(rev: LayoutRevision) -> Self {
match rev {
LayoutRevision::Grid => LayoutTypePB::Grid,
LayoutRevision::Board => LayoutTypePB::Board,
LayoutRevision::Calendar => LayoutTypePB::Calendar,
}
}
}
impl std::convert::From<LayoutTypePB> for LayoutRevision {
fn from(layout: LayoutTypePB) -> Self {
match layout {
LayoutTypePB::Grid => LayoutRevision::Grid,
LayoutTypePB::Board => LayoutRevision::Board,
LayoutTypePB::Calendar => LayoutRevision::Calendar,
}
fn from(layout: LayoutTypePB) -> Self {
match layout {
LayoutTypePB::Grid => LayoutRevision::Grid,
LayoutTypePB::Board => LayoutRevision::Board,
LayoutTypePB::Calendar => LayoutRevision::Calendar,
}
}
}
#[derive(Default, ProtoBuf)]
pub struct DatabaseSettingChangesetPB {
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 1)]
pub database_id: String,
#[pb(index = 2)]
pub layout_type: LayoutTypePB,
#[pb(index = 2)]
pub layout_type: LayoutTypePB,
#[pb(index = 3, one_of)]
pub alter_filter: Option<AlterFilterPayloadPB>,
#[pb(index = 3, one_of)]
pub alter_filter: Option<AlterFilterPayloadPB>,
#[pb(index = 4, one_of)]
pub delete_filter: Option<DeleteFilterPayloadPB>,
#[pb(index = 4, one_of)]
pub delete_filter: Option<DeleteFilterPayloadPB>,
#[pb(index = 5, one_of)]
pub insert_group: Option<InsertGroupPayloadPB>,
#[pb(index = 5, one_of)]
pub insert_group: Option<InsertGroupPayloadPB>,
#[pb(index = 6, one_of)]
pub delete_group: Option<DeleteGroupPayloadPB>,
#[pb(index = 6, one_of)]
pub delete_group: Option<DeleteGroupPayloadPB>,
#[pb(index = 7, one_of)]
pub alter_sort: Option<AlterSortPayloadPB>,
#[pb(index = 7, one_of)]
pub alter_sort: Option<AlterSortPayloadPB>,
#[pb(index = 8, one_of)]
pub delete_sort: Option<DeleteSortPayloadPB>,
#[pb(index = 8, one_of)]
pub delete_sort: Option<DeleteSortPayloadPB>,
}
impl TryInto<DatabaseSettingChangesetParams> for DatabaseSettingChangesetPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<DatabaseSettingChangesetParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id)
.map_err(|_| ErrorCode::ViewIdInvalid)?
.0;
fn try_into(self) -> Result<DatabaseSettingChangesetParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id)
.map_err(|_| ErrorCode::ViewIdInvalid)?
.0;
let insert_filter = match self.alter_filter {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let insert_filter = match self.alter_filter {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let delete_filter = match self.delete_filter {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let delete_filter = match self.delete_filter {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let insert_group = match self.insert_group {
Some(payload) => Some(payload.try_into()?),
None => None,
};
let insert_group = match self.insert_group {
Some(payload) => Some(payload.try_into()?),
None => None,
};
let delete_group = match self.delete_group {
Some(payload) => Some(payload.try_into()?),
None => None,
};
let delete_group = match self.delete_group {
Some(payload) => Some(payload.try_into()?),
None => None,
};
let alert_sort = match self.alter_sort {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let alert_sort = match self.alter_sort {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let delete_sort = match self.delete_sort {
None => None,
Some(payload) => Some(payload.try_into()?),
};
let delete_sort = match self.delete_sort {
None => None,
Some(payload) => Some(payload.try_into()?),
};
Ok(DatabaseSettingChangesetParams {
database_id,
layout_type: self.layout_type.into(),
insert_filter,
delete_filter,
insert_group,
delete_group,
alert_sort,
delete_sort,
})
}
Ok(DatabaseSettingChangesetParams {
database_id,
layout_type: self.layout_type.into(),
insert_filter,
delete_filter,
insert_group,
delete_group,
alert_sort,
delete_sort,
})
}
}
pub struct DatabaseSettingChangesetParams {
pub database_id: String,
pub layout_type: LayoutRevision,
pub insert_filter: Option<AlterFilterParams>,
pub delete_filter: Option<DeleteFilterParams>,
pub insert_group: Option<InsertGroupParams>,
pub delete_group: Option<DeleteGroupParams>,
pub alert_sort: Option<AlterSortParams>,
pub delete_sort: Option<DeleteSortParams>,
pub database_id: String,
pub layout_type: LayoutRevision,
pub insert_filter: Option<AlterFilterParams>,
pub delete_filter: Option<DeleteFilterParams>,
pub insert_group: Option<InsertGroupParams>,
pub delete_group: Option<DeleteGroupParams>,
pub alert_sort: Option<AlterSortParams>,
pub delete_sort: Option<DeleteSortParams>,
}
impl DatabaseSettingChangesetParams {
pub fn is_filter_changed(&self) -> bool {
self.insert_filter.is_some() || self.delete_filter.is_some()
}
pub fn is_filter_changed(&self) -> bool {
self.insert_filter.is_some() || self.delete_filter.is_some()
}
}

View File

@ -9,227 +9,231 @@ use grid_model::{FieldTypeRevision, SortCondition, SortRevision};
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct SortPB {
#[pb(index = 1)]
pub id: String,
#[pb(index = 1)]
pub id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub condition: SortConditionPB,
#[pb(index = 4)]
pub condition: SortConditionPB,
}
impl std::convert::From<&SortRevision> for SortPB {
fn from(sort_rev: &SortRevision) -> Self {
Self {
id: sort_rev.id.clone(),
field_id: sort_rev.field_id.clone(),
field_type: sort_rev.field_type.into(),
condition: sort_rev.condition.clone().into(),
}
fn from(sort_rev: &SortRevision) -> Self {
Self {
id: sort_rev.id.clone(),
field_id: sort_rev.field_id.clone(),
field_type: sort_rev.field_type.into(),
condition: sort_rev.condition.clone().into(),
}
}
}
#[derive(Eq, PartialEq, ProtoBuf, Debug, Default, Clone)]
pub struct RepeatedSortPB {
#[pb(index = 1)]
pub items: Vec<SortPB>,
#[pb(index = 1)]
pub items: Vec<SortPB>,
}
impl std::convert::From<Vec<Arc<SortRevision>>> for RepeatedSortPB {
fn from(revs: Vec<Arc<SortRevision>>) -> Self {
RepeatedSortPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
fn from(revs: Vec<Arc<SortRevision>>) -> Self {
RepeatedSortPB {
items: revs.into_iter().map(|rev| rev.as_ref().into()).collect(),
}
}
}
impl std::convert::From<Vec<SortPB>> for RepeatedSortPB {
fn from(items: Vec<SortPB>) -> Self {
Self { items }
}
fn from(items: Vec<SortPB>) -> Self {
Self { items }
}
}
#[derive(Debug, Clone, PartialEq, Eq, ProtoBuf_Enum)]
#[repr(u8)]
pub enum SortConditionPB {
Ascending = 0,
Descending = 1,
Ascending = 0,
Descending = 1,
}
impl std::default::Default for SortConditionPB {
fn default() -> Self {
Self::Ascending
}
fn default() -> Self {
Self::Ascending
}
}
impl std::convert::From<SortCondition> for SortConditionPB {
fn from(condition: SortCondition) -> Self {
match condition {
SortCondition::Ascending => SortConditionPB::Ascending,
SortCondition::Descending => SortConditionPB::Descending,
}
fn from(condition: SortCondition) -> Self {
match condition {
SortCondition::Ascending => SortConditionPB::Ascending,
SortCondition::Descending => SortConditionPB::Descending,
}
}
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct AlterSortPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 3)]
pub field_type: FieldType,
/// Create a new sort if the sort_id is None
#[pb(index = 4, one_of)]
pub sort_id: Option<String>,
/// Create a new sort if the sort_id is None
#[pb(index = 4, one_of)]
pub sort_id: Option<String>,
#[pb(index = 5)]
pub condition: SortConditionPB,
#[pb(index = 5)]
pub condition: SortConditionPB,
}
impl TryInto<AlterSortParams> for AlterSortPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<AlterSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
fn try_into(self) -> Result<AlterSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let sort_id = match self.sort_id {
None => None,
Some(sort_id) => Some(NotEmptyStr::parse(sort_id).map_err(|_| ErrorCode::SortIdIsEmpty)?.0),
};
let sort_id = match self.sort_id {
None => None,
Some(sort_id) => Some(
NotEmptyStr::parse(sort_id)
.map_err(|_| ErrorCode::SortIdIsEmpty)?
.0,
),
};
Ok(AlterSortParams {
view_id,
field_id,
sort_id,
field_type: self.field_type.into(),
condition: self.condition as u8,
})
}
Ok(AlterSortParams {
view_id,
field_id,
sort_id,
field_type: self.field_type.into(),
condition: self.condition as u8,
})
}
}
#[derive(Debug)]
pub struct AlterSortParams {
pub view_id: String,
pub field_id: String,
/// Create a new sort if the sort is None
pub sort_id: Option<String>,
pub field_type: FieldTypeRevision,
pub condition: u8,
pub view_id: String,
pub field_id: String,
/// Create a new sort if the sort is None
pub sort_id: Option<String>,
pub field_type: FieldTypeRevision,
pub condition: u8,
}
#[derive(ProtoBuf, Debug, Default, Clone)]
pub struct DeleteSortPayloadPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 2)]
pub field_id: String,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 3)]
pub field_type: FieldType,
#[pb(index = 4)]
pub sort_id: String,
#[pb(index = 4)]
pub sort_id: String,
}
impl TryInto<DeleteSortParams> for DeleteSortPayloadPB {
type Error = ErrorCode;
type Error = ErrorCode;
fn try_into(self) -> Result<DeleteSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
fn try_into(self) -> Result<DeleteSortParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id)
.map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?
.0;
let field_id = NotEmptyStr::parse(self.field_id)
.map_err(|_| ErrorCode::FieldIdIsEmpty)?
.0;
let sort_id = NotEmptyStr::parse(self.sort_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0;
let sort_id = NotEmptyStr::parse(self.sort_id)
.map_err(|_| ErrorCode::UnexpectedEmptyString)?
.0;
let sort_type = SortType {
field_id,
field_type: self.field_type,
};
let sort_type = SortType {
field_id,
field_type: self.field_type,
};
Ok(DeleteSortParams {
view_id,
sort_type,
sort_id,
})
}
Ok(DeleteSortParams {
view_id,
sort_type,
sort_id,
})
}
}
#[derive(Debug, Clone)]
pub struct DeleteSortParams {
pub view_id: String,
pub sort_type: SortType,
pub sort_id: String,
pub view_id: String,
pub sort_type: SortType,
pub sort_id: String,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct SortChangesetNotificationPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub insert_sorts: Vec<SortPB>,
#[pb(index = 2)]
pub insert_sorts: Vec<SortPB>,
#[pb(index = 3)]
pub delete_sorts: Vec<SortPB>,
#[pb(index = 3)]
pub delete_sorts: Vec<SortPB>,
#[pb(index = 4)]
pub update_sorts: Vec<SortPB>,
#[pb(index = 4)]
pub update_sorts: Vec<SortPB>,
}
impl SortChangesetNotificationPB {
pub fn new(view_id: String) -> Self {
Self {
view_id,
insert_sorts: vec![],
delete_sorts: vec![],
update_sorts: vec![],
}
pub fn new(view_id: String) -> Self {
Self {
view_id,
insert_sorts: vec![],
delete_sorts: vec![],
update_sorts: vec![],
}
}
pub fn extend(&mut self, other: SortChangesetNotificationPB) {
self.insert_sorts.extend(other.insert_sorts);
self.delete_sorts.extend(other.delete_sorts);
self.update_sorts.extend(other.update_sorts);
}
pub fn extend(&mut self, other: SortChangesetNotificationPB) {
self.insert_sorts.extend(other.insert_sorts);
self.delete_sorts.extend(other.delete_sorts);
self.update_sorts.extend(other.update_sorts);
}
pub fn is_empty(&self) -> bool {
self.insert_sorts.is_empty() && self.delete_sorts.is_empty() && self.update_sorts.is_empty()
}
pub fn is_empty(&self) -> bool {
self.insert_sorts.is_empty() && self.delete_sorts.is_empty() && self.update_sorts.is_empty()
}
}
#[derive(Debug, Default, ProtoBuf)]
pub struct ReorderAllRowsPB {
#[pb(index = 1)]
pub row_orders: Vec<String>,
#[pb(index = 1)]
pub row_orders: Vec<String>,
}
#[derive(Debug, Default, ProtoBuf)]
pub struct ReorderSingleRowPB {
#[pb(index = 1)]
pub row_id: String,
#[pb(index = 1)]
pub row_id: String,
#[pb(index = 2)]
pub old_index: i32,
#[pb(index = 2)]
pub old_index: i32,
#[pb(index = 3)]
pub new_index: i32,
#[pb(index = 3)]
pub new_index: i32,
}

View File

@ -3,62 +3,66 @@ use flowy_derive::ProtoBuf;
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct ViewRowsVisibilityChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 5)]
pub visible_rows: Vec<InsertedRowPB>,
#[pb(index = 5)]
pub visible_rows: Vec<InsertedRowPB>,
#[pb(index = 6)]
pub invisible_rows: Vec<String>,
#[pb(index = 6)]
pub invisible_rows: Vec<String>,
}
#[derive(Debug, Default, Clone, ProtoBuf)]
pub struct ViewRowsChangesetPB {
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 1)]
pub view_id: String,
#[pb(index = 2)]
pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 2)]
pub inserted_rows: Vec<InsertedRowPB>,
#[pb(index = 3)]
pub deleted_rows: Vec<String>,
#[pb(index = 3)]
pub deleted_rows: Vec<String>,
#[pb(index = 4)]
pub updated_rows: Vec<UpdatedRowPB>,
#[pb(index = 4)]
pub updated_rows: Vec<UpdatedRowPB>,
}
impl ViewRowsChangesetPB {
pub fn from_insert(view_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self {
view_id,
inserted_rows,
..Default::default()
}
pub fn from_insert(view_id: String, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self {
view_id,
inserted_rows,
..Default::default()
}
}
pub fn from_delete(view_id: String, deleted_rows: Vec<String>) -> Self {
Self {
view_id,
deleted_rows,
..Default::default()
}
pub fn from_delete(view_id: String, deleted_rows: Vec<String>) -> Self {
Self {
view_id,
deleted_rows,
..Default::default()
}
}
pub fn from_update(view_id: String, updated_rows: Vec<UpdatedRowPB>) -> Self {
Self {
view_id,
updated_rows,
..Default::default()
}
pub fn from_update(view_id: String, updated_rows: Vec<UpdatedRowPB>) -> Self {
Self {
view_id,
updated_rows,
..Default::default()
}
}
pub fn from_move(view_id: String, deleted_rows: Vec<String>, inserted_rows: Vec<InsertedRowPB>) -> Self {
Self {
view_id,
inserted_rows,
deleted_rows,
..Default::default()
}
pub fn from_move(
view_id: String,
deleted_rows: Vec<String>,
inserted_rows: Vec<InsertedRowPB>,
) -> Self {
Self {
view_id,
inserted_rows,
deleted_rows,
..Default::default()
}
}
}

View File

@ -2,10 +2,10 @@ use crate::entities::*;
use crate::manager::DatabaseManager;
use crate::services::cell::{FromCellString, ToCellChangesetString, TypeCellData};
use crate::services::field::{
default_type_option_builder_from_type, select_type_option_from_field_rev, type_option_builder_from_json_str,
DateCellChangeset, DateChangesetPB, SelectOptionCellChangeset, SelectOptionCellChangesetPB,
SelectOptionCellChangesetParams, SelectOptionCellDataPB, SelectOptionChangeset, SelectOptionChangesetPB,
SelectOptionIds, SelectOptionPB,
default_type_option_builder_from_type, select_type_option_from_field_rev,
type_option_builder_from_json_str, DateCellChangeset, DateChangesetPB, SelectOptionCellChangeset,
SelectOptionCellChangesetPB, SelectOptionCellChangesetParams, SelectOptionCellDataPB,
SelectOptionChangeset, SelectOptionChangesetPB, SelectOptionIds, SelectOptionPB,
};
use crate::services::row::make_row_from_row_rev;
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
@ -15,528 +15,562 @@ use std::sync::Arc;
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_data_handler(
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<DatabasePB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?;
let database = editor.get_database(database_id.as_ref()).await?;
data_result(database)
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?;
let database = editor.get_database(database_id.as_ref()).await?;
data_result(database)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_setting_handler(
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<DatabaseViewSettingPB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id).await?;
let database_setting = editor.get_setting().await?;
data_result(database_setting)
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id).await?;
let database_setting = editor.get_setting().await?;
data_result(database_setting)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_database_setting_handler(
data: AFPluginData<DatabaseSettingChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DatabaseSettingChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?;
let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
if let Some(insert_params) = params.insert_group {
editor.insert_group(insert_params).await?;
}
let editor = manager.get_database_editor(&params.database_id).await?;
if let Some(insert_params) = params.insert_group {
editor.insert_group(insert_params).await?;
}
if let Some(delete_params) = params.delete_group {
editor.delete_group(delete_params).await?;
}
if let Some(delete_params) = params.delete_group {
editor.delete_group(delete_params).await?;
}
if let Some(alter_filter) = params.insert_filter {
editor.create_or_update_filter(alter_filter).await?;
}
if let Some(alter_filter) = params.insert_filter {
editor.create_or_update_filter(alter_filter).await?;
}
if let Some(delete_filter) = params.delete_filter {
editor.delete_filter(delete_filter).await?;
}
if let Some(delete_filter) = params.delete_filter {
editor.delete_filter(delete_filter).await?;
}
if let Some(alter_sort) = params.alert_sort {
let _ = editor.create_or_update_sort(alter_sort).await?;
}
if let Some(delete_sort) = params.delete_sort {
editor.delete_sort(delete_sort).await?;
}
Ok(())
if let Some(alter_sort) = params.alert_sort {
let _ = editor.create_or_update_sort(alter_sort).await?;
}
if let Some(delete_sort) = params.delete_sort {
editor.delete_sort(delete_sort).await?;
}
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_filters_handler(
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedFilterPB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id).await?;
let filters = RepeatedFilterPB {
items: editor.get_all_filters().await?,
};
data_result(filters)
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id).await?;
let filters = RepeatedFilterPB {
items: editor.get_all_filters().await?,
};
data_result(filters)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_sorts_handler(
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedSortPB, FlowyError> {
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?;
let sorts = RepeatedSortPB {
items: editor.get_all_sorts(database_id.as_ref()).await?,
};
data_result(sorts)
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?;
let sorts = RepeatedSortPB {
items: editor.get_all_sorts(database_id.as_ref()).await?,
};
data_result(sorts)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_all_sorts_handler(
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?;
editor.delete_all_sorts(database_id.as_ref()).await?;
Ok(())
let database_id: DatabaseIdPB = data.into_inner();
let editor = manager.open_database(database_id.as_ref()).await?;
editor.delete_all_sorts(database_id.as_ref()).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_fields_handler(
data: AFPluginData<GetFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<GetFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedFieldPB, FlowyError> {
let params: GetFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let field_revs = editor.get_field_revs(params.field_ids).await?;
let repeated_field: RepeatedFieldPB = field_revs.into_iter().map(FieldPB::from).collect::<Vec<_>>().into();
data_result(repeated_field)
let params: GetFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let field_revs = editor.get_field_revs(params.field_ids).await?;
let repeated_field: RepeatedFieldPB = field_revs
.into_iter()
.map(FieldPB::from)
.collect::<Vec<_>>()
.into();
data_result(repeated_field)
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_handler(
data: AFPluginData<FieldChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<FieldChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let changeset: FieldChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&changeset.database_id).await?;
editor.update_field(changeset).await?;
Ok(())
let changeset: FieldChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&changeset.database_id).await?;
editor.update_field(changeset).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_type_option_handler(
data: AFPluginData<TypeOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<TypeOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor
.update_field_type_option(
&params.database_id,
&params.field_id,
params.type_option_data,
old_field_rev,
)
.await?;
Ok(())
let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor
.update_field_type_option(
&params.database_id,
&params.field_id,
params.type_option_data,
old_field_rev,
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_field_handler(
data: AFPluginData<DeleteFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DeleteFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.delete_field(&params.field_id).await?;
Ok(())
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.delete_field(&params.field_id).await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn switch_to_field_handler(
data: AFPluginData<UpdateFieldTypePayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<UpdateFieldTypePayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: EditFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor
.switch_to_field_type(&params.field_id, &params.field_type)
.await?;
let params: EditFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await;
editor
.switch_to_field_type(&params.field_id, &params.field_type)
.await?;
// Get the field_rev with field_id, if it doesn't exist, we create the default FieldRevision from the FieldType.
let new_field_rev = editor
.get_field_rev(&params.field_id)
.await
.unwrap_or(Arc::new(editor.next_field_rev(&params.field_type).await?));
// Get the field_rev with field_id, if it doesn't exist, we create the default FieldRevision from the FieldType.
let new_field_rev = editor
.get_field_rev(&params.field_id)
.await
.unwrap_or(Arc::new(editor.next_field_rev(&params.field_type).await?));
// Update the type-option data after the field type has been changed
let type_option_data = get_type_option_data(&new_field_rev, &params.field_type).await?;
editor
.update_field_type_option(&params.database_id, &new_field_rev.id, type_option_data, old_field_rev)
.await?;
// Update the type-option data after the field type has been changed
let type_option_data = get_type_option_data(&new_field_rev, &params.field_type).await?;
editor
.update_field_type_option(
&params.database_id,
&new_field_rev.id,
type_option_data,
old_field_rev,
)
.await?;
Ok(())
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn duplicate_field_handler(
data: AFPluginData<DuplicateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DuplicateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.duplicate_field(&params.field_id).await?;
Ok(())
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.duplicate_field(&params.field_id).await?;
Ok(())
}
/// Return the FieldTypeOptionData if the Field exists otherwise return record not found error.
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_field_type_option_data_handler(
data: AFPluginData<TypeOptionPathPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<TypeOptionPathPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> {
let params: TypeOptionPathParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => Err(FlowyError::record_not_found()),
Some(field_rev) => {
let field_type = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
let data = TypeOptionPB {
database_id: params.database_id,
field: field_rev.into(),
type_option_data,
};
data_result(data)
}
}
let params: TypeOptionPathParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => Err(FlowyError::record_not_found()),
Some(field_rev) => {
let field_type = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
let data = TypeOptionPB {
database_id: params.database_id,
field: field_rev.into(),
type_option_data,
};
data_result(data)
},
}
}
/// Create FieldMeta and save it. Return the FieldTypeOptionData.
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn create_field_type_option_data_handler(
data: AFPluginData<CreateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<CreateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> {
let params: CreateFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let field_rev = editor
.create_new_field_rev_with_type_option(&params.field_type, params.type_option_data)
.await?;
let field_type: FieldType = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
let params: CreateFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let field_rev = editor
.create_new_field_rev_with_type_option(&params.field_type, params.type_option_data)
.await?;
let field_type: FieldType = field_rev.ty.into();
let type_option_data = get_type_option_data(&field_rev, &field_type).await?;
data_result(TypeOptionPB {
database_id: params.database_id,
field: field_rev.into(),
type_option_data,
})
data_result(TypeOptionPB {
database_id: params.database_id,
field: field_rev.into(),
type_option_data,
})
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn move_field_handler(
data: AFPluginData<MoveFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<MoveFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: MoveFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_field(params).await?;
Ok(())
let params: MoveFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_field(params).await?;
Ok(())
}
/// The [FieldRevision] contains multiple data, each of them belongs to a specific FieldType.
async fn get_type_option_data(field_rev: &FieldRevision, field_type: &FieldType) -> FlowyResult<Vec<u8>> {
let s = field_rev
.get_type_option_str(field_type)
.map(|value| value.to_owned())
.unwrap_or_else(|| {
default_type_option_builder_from_type(field_type)
.serializer()
.json_str()
});
let field_type: FieldType = field_rev.ty.into();
let builder = type_option_builder_from_json_str(&s, &field_type);
let type_option_data = builder.serializer().protobuf_bytes().to_vec();
async fn get_type_option_data(
field_rev: &FieldRevision,
field_type: &FieldType,
) -> FlowyResult<Vec<u8>> {
let s = field_rev
.get_type_option_str(field_type)
.map(|value| value.to_owned())
.unwrap_or_else(|| {
default_type_option_builder_from_type(field_type)
.serializer()
.json_str()
});
let field_type: FieldType = field_rev.ty.into();
let builder = type_option_builder_from_json_str(&s, &field_type);
let type_option_data = builder.serializer().protobuf_bytes().to_vec();
Ok(type_option_data)
Ok(type_option_data)
}
// #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_row_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<OptionalRowPB, FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let row = editor.get_row_rev(&params.row_id).await?.map(make_row_from_row_rev);
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let row = editor
.get_row_rev(&params.row_id)
.await?
.map(make_row_from_row_rev);
data_result(OptionalRowPB { row })
data_result(OptionalRowPB { row })
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn delete_row_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.delete_row(&params.row_id).await?;
Ok(())
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.delete_row(&params.row_id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn duplicate_row_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.duplicate_row(&params.row_id).await?;
Ok(())
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
editor.duplicate_row(&params.row_id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_row_handler(
data: AFPluginData<MoveRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<MoveRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: MoveRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_row(params).await?;
Ok(())
let params: MoveRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.view_id).await?;
editor.move_row(params).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn create_table_row_handler(
data: AFPluginData<CreateRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<CreateRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.database_id.as_ref()).await?;
let row = editor.create_row(params).await?;
data_result(row)
let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager
.get_database_editor(params.database_id.as_ref())
.await?;
let row = editor.create_row(params).await?;
data_result(row)
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_cell_handler(
data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<CellPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_cell(&params).await {
None => data_result(CellPB::empty(&params.field_id, &params.row_id)),
Some(cell) => data_result(cell),
}
let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_cell(&params).await {
None => data_result(CellPB::empty(&params.field_id, &params.row_id)),
Some(cell) => data_result(cell),
}
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_cell_handler(
data: AFPluginData<CellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<CellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let changeset: CellChangesetPB = data.into_inner();
let editor = manager.get_database_editor(&changeset.database_id).await?;
editor
.update_cell_with_changeset(&changeset.row_id, &changeset.field_id, changeset.type_cell_data)
.await?;
Ok(())
let changeset: CellChangesetPB = data.into_inner();
let editor = manager.get_database_editor(&changeset.database_id).await?;
editor
.update_cell_with_changeset(
&changeset.row_id,
&changeset.field_id,
changeset.type_cell_data,
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn new_select_option_handler(
data: AFPluginData<CreateSelectOptionPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<CreateSelectOptionPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<SelectOptionPB, FlowyError> {
let params: CreateSelectOptionParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => Err(ErrorCode::InvalidData.into()),
Some(field_rev) => {
let type_option = select_type_option_from_field_rev(&field_rev)?;
let select_option = type_option.create_option(&params.option_name);
data_result(select_option)
}
}
let params: CreateSelectOptionParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => Err(ErrorCode::InvalidData.into()),
Some(field_rev) => {
let type_option = select_type_option_from_field_rev(&field_rev)?;
let select_option = type_option.create_option(&params.option_name);
data_result(select_option)
},
}
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_select_option_handler(
data: AFPluginData<SelectOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<SelectOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let changeset: SelectOptionChangeset = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&changeset.cell_path.database_id).await?;
let field_id = changeset.cell_path.field_id.clone();
let (tx, rx) = tokio::sync::oneshot::channel();
editor
.modify_field_rev(&field_id, |field_rev| {
let mut type_option = select_type_option_from_field_rev(field_rev)?;
let mut cell_changeset_str = None;
let mut is_changed = None;
let changeset: SelectOptionChangeset = data.into_inner().try_into()?;
let editor = manager
.get_database_editor(&changeset.cell_path.database_id)
.await?;
let field_id = changeset.cell_path.field_id.clone();
let (tx, rx) = tokio::sync::oneshot::channel();
editor
.modify_field_rev(&field_id, |field_rev| {
let mut type_option = select_type_option_from_field_rev(field_rev)?;
let mut cell_changeset_str = None;
let mut is_changed = None;
for option in changeset.insert_options {
cell_changeset_str =
Some(SelectOptionCellChangeset::from_insert_option_id(&option.id).to_cell_changeset_str());
type_option.insert_option(option);
is_changed = Some(());
}
for option in changeset.insert_options {
cell_changeset_str = Some(
SelectOptionCellChangeset::from_insert_option_id(&option.id).to_cell_changeset_str(),
);
type_option.insert_option(option);
is_changed = Some(());
}
for option in changeset.update_options {
type_option.insert_option(option);
is_changed = Some(());
}
for option in changeset.update_options {
type_option.insert_option(option);
is_changed = Some(());
}
for option in changeset.delete_options {
cell_changeset_str =
Some(SelectOptionCellChangeset::from_delete_option_id(&option.id).to_cell_changeset_str());
type_option.delete_option(option);
is_changed = Some(());
}
for option in changeset.delete_options {
cell_changeset_str = Some(
SelectOptionCellChangeset::from_delete_option_id(&option.id).to_cell_changeset_str(),
);
type_option.delete_option(option);
is_changed = Some(());
}
if is_changed.is_some() {
field_rev.insert_type_option(&*type_option);
}
let _ = tx.send(cell_changeset_str);
Ok(is_changed)
})
.await?;
if is_changed.is_some() {
field_rev.insert_type_option(&*type_option);
}
let _ = tx.send(cell_changeset_str);
Ok(is_changed)
})
.await?;
if let Ok(Some(cell_changeset_str)) = rx.await {
match editor
.update_cell_with_changeset(
&changeset.cell_path.row_id,
&changeset.cell_path.field_id,
cell_changeset_str,
)
.await
{
Ok(_) => {}
Err(e) => tracing::error!("{}", e),
}
if let Ok(Some(cell_changeset_str)) = rx.await {
match editor
.update_cell_with_changeset(
&changeset.cell_path.row_id,
&changeset.cell_path.field_id,
cell_changeset_str,
)
.await
{
Ok(_) => {},
Err(e) => tracing::error!("{}", e),
}
Ok(())
}
Ok(())
}
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_select_option_handler(
data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<SelectOptionCellDataPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => {
tracing::error!("Can't find the select option field with id: {}", params.field_id);
data_result(SelectOptionCellDataPB::default())
}
Some(field_rev) => {
//
let cell_rev = editor.get_cell_rev(&params.row_id, &params.field_id).await?;
let type_option = select_type_option_from_field_rev(&field_rev)?;
let type_cell_data: TypeCellData = match cell_rev {
None => TypeCellData {
cell_str: "".to_string(),
field_type: field_rev.ty.into(),
},
Some(cell_rev) => cell_rev.try_into()?,
};
let ids = SelectOptionIds::from_cell_str(&type_cell_data.cell_str)?;
let selected_options = type_option.get_selected_options(ids);
data_result(selected_options)
}
}
let params: CellIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => {
tracing::error!(
"Can't find the select option field with id: {}",
params.field_id
);
data_result(SelectOptionCellDataPB::default())
},
Some(field_rev) => {
//
let cell_rev = editor
.get_cell_rev(&params.row_id, &params.field_id)
.await?;
let type_option = select_type_option_from_field_rev(&field_rev)?;
let type_cell_data: TypeCellData = match cell_rev {
None => TypeCellData {
cell_str: "".to_string(),
field_type: field_rev.ty.into(),
},
Some(cell_rev) => cell_rev.try_into()?,
};
let ids = SelectOptionIds::from_cell_str(&type_cell_data.cell_str)?;
let selected_options = type_option.get_selected_options(ids);
data_result(selected_options)
},
}
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_select_option_cell_handler(
data: AFPluginData<SelectOptionCellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<SelectOptionCellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.cell_identifier.database_id).await?;
let changeset = SelectOptionCellChangeset {
insert_option_ids: params.insert_option_ids,
delete_option_ids: params.delete_option_ids,
};
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let editor = manager
.get_database_editor(&params.cell_identifier.database_id)
.await?;
let changeset = SelectOptionCellChangeset {
insert_option_ids: params.insert_option_ids,
delete_option_ids: params.delete_option_ids,
};
editor
.update_cell_with_changeset(
&params.cell_identifier.row_id,
&params.cell_identifier.field_id,
changeset,
)
.await?;
Ok(())
editor
.update_cell_with_changeset(
&params.cell_identifier.row_id,
&params.cell_identifier.field_id,
changeset,
)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_date_cell_handler(
data: AFPluginData<DateChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DateChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let data = data.into_inner();
let cell_path: CellIdParams = data.cell_path.try_into()?;
let cell_changeset = DateCellChangeset {
date: data.date,
time: data.time,
is_utc: data.is_utc,
};
let data = data.into_inner();
let cell_path: CellIdParams = data.cell_path.try_into()?;
let cell_changeset = DateCellChangeset {
date: data.date,
time: data.time,
is_utc: data.is_utc,
};
let editor = manager.get_database_editor(&cell_path.database_id).await?;
editor
.update_cell(cell_path.row_id, cell_path.field_id, cell_changeset)
.await?;
Ok(())
let editor = manager.get_database_editor(&cell_path.database_id).await?;
editor
.update_cell(cell_path.row_id, cell_path.field_id, cell_changeset)
.await?;
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_groups_handler(
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<DatabaseIdPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RepeatedGroupPB, FlowyError> {
let params: DatabaseIdPB = data.into_inner();
let editor = manager.get_database_editor(&params.value).await?;
let group = editor.load_groups().await?;
data_result(group)
let params: DatabaseIdPB = data.into_inner();
let editor = manager.get_database_editor(&params.value).await?;
let group = editor.load_groups().await?;
data_result(group)
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn create_board_card_handler(
data: AFPluginData<CreateBoardCardPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<CreateBoardCardPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.database_id.as_ref()).await?;
let row = editor.create_row(params).await?;
data_result(row)
let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager
.get_database_editor(params.database_id.as_ref())
.await?;
let row = editor.create_row(params).await?;
data_result(row)
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_handler(
data: AFPluginData<MoveGroupPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<MoveGroupPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> FlowyResult<()> {
let params: MoveGroupParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group(params).await?;
Ok(())
let params: MoveGroupParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group(params).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_row_handler(
data: AFPluginData<MoveGroupRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
data: AFPluginData<MoveGroupRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager>>,
) -> FlowyResult<()> {
let params: MoveGroupRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group_row(params).await?;
Ok(())
let params: MoveGroupRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.view_id.as_ref()).await?;
editor.move_group_row(params).await?;
Ok(())
}

View File

@ -6,8 +6,10 @@ use std::sync::Arc;
use strum_macros::Display;
pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
let mut plugin = AFPlugin::new().name(env!("CARGO_PKG_NAME")).state(database_manager);
plugin = plugin
let mut plugin = AFPlugin::new()
.name(env!("CARGO_PKG_NAME"))
.state(database_manager);
plugin = plugin
.event(DatabaseEvent::GetDatabase, get_database_data_handler)
// .event(GridEvent::GetGridBlocks, get_grid_blocks_handler)
.event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler)
@ -47,7 +49,7 @@ pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
.event(DatabaseEvent::MoveGroupRow, move_group_row_handler)
.event(DatabaseEvent::GetGroup, get_groups_handler);
plugin
plugin
}
/// [DatabaseEvent] defines events that are used to interact with the Grid. You could check [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/backend/protobuf)
@ -55,176 +57,176 @@ pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
#[event_err = "FlowyError"]
pub enum DatabaseEvent {
/// [GetDatabase] event is used to get the [DatabasePB]
///
/// The event handler accepts a [DatabaseIdPB] and returns a [DatabasePB] if there are no errors.
#[event(input = "DatabaseIdPB", output = "DatabasePB")]
GetDatabase = 0,
/// [GetDatabase] event is used to get the [DatabasePB]
///
/// The event handler accepts a [DatabaseIdPB] and returns a [DatabasePB] if there are no errors.
#[event(input = "DatabaseIdPB", output = "DatabasePB")]
GetDatabase = 0,
/// [GetDatabaseSetting] event is used to get the database's settings.
///
/// The event handler accepts [DatabaseIdPB] and return [DatabaseViewSettingPB]
/// if there is no errors.
#[event(input = "DatabaseIdPB", output = "DatabaseViewSettingPB")]
GetDatabaseSetting = 2,
/// [GetDatabaseSetting] event is used to get the database's settings.
///
/// The event handler accepts [DatabaseIdPB] and return [DatabaseViewSettingPB]
/// if there is no errors.
#[event(input = "DatabaseIdPB", output = "DatabaseViewSettingPB")]
GetDatabaseSetting = 2,
/// [UpdateDatabaseSetting] event is used to update the database's settings.
///
/// The event handler accepts [DatabaseSettingChangesetPB] and return errors if failed to modify the grid's settings.
#[event(input = "DatabaseSettingChangesetPB")]
UpdateDatabaseSetting = 3,
/// [UpdateDatabaseSetting] event is used to update the database's settings.
///
/// The event handler accepts [DatabaseSettingChangesetPB] and return errors if failed to modify the grid's settings.
#[event(input = "DatabaseSettingChangesetPB")]
UpdateDatabaseSetting = 3,
#[event(input = "DatabaseIdPB", output = "RepeatedFilterPB")]
GetAllFilters = 4,
#[event(input = "DatabaseIdPB", output = "RepeatedFilterPB")]
GetAllFilters = 4,
#[event(input = "DatabaseIdPB", output = "RepeatedSortPB")]
GetAllSorts = 5,
#[event(input = "DatabaseIdPB", output = "RepeatedSortPB")]
GetAllSorts = 5,
#[event(input = "DatabaseIdPB")]
DeleteAllSorts = 6,
#[event(input = "DatabaseIdPB")]
DeleteAllSorts = 6,
/// [GetFields] event is used to get the database's settings.
///
/// The event handler accepts a [GetFieldPayloadPB] and returns a [RepeatedFieldPB]
/// if there are no errors.
#[event(input = "GetFieldPayloadPB", output = "RepeatedFieldPB")]
GetFields = 10,
/// [GetFields] event is used to get the database's settings.
///
/// The event handler accepts a [GetFieldPayloadPB] and returns a [RepeatedFieldPB]
/// if there are no errors.
#[event(input = "GetFieldPayloadPB", output = "RepeatedFieldPB")]
GetFields = 10,
/// [UpdateField] event is used to update a field's attributes.
///
/// The event handler accepts a [FieldChangesetPB] and returns errors if failed to modify the
/// field.
#[event(input = "FieldChangesetPB")]
UpdateField = 11,
/// [UpdateField] event is used to update a field's attributes.
///
/// The event handler accepts a [FieldChangesetPB] and returns errors if failed to modify the
/// field.
#[event(input = "FieldChangesetPB")]
UpdateField = 11,
/// [UpdateFieldTypeOption] event is used to update the field's type-option data. Certain field
/// types have user-defined options such as color, date format, number format, or a list of values
/// for a multi-select list. These options are defined within a specialization of the
/// FieldTypeOption class.
///
/// Check out [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/frontend/grid#fieldtype)
/// for more information.
///
/// The event handler accepts a [TypeOptionChangesetPB] and returns errors if failed to modify the
/// field.
#[event(input = "TypeOptionChangesetPB")]
UpdateFieldTypeOption = 12,
/// [UpdateFieldTypeOption] event is used to update the field's type-option data. Certain field
/// types have user-defined options such as color, date format, number format, or a list of values
/// for a multi-select list. These options are defined within a specialization of the
/// FieldTypeOption class.
///
/// Check out [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/frontend/grid#fieldtype)
/// for more information.
///
/// The event handler accepts a [TypeOptionChangesetPB] and returns errors if failed to modify the
/// field.
#[event(input = "TypeOptionChangesetPB")]
UpdateFieldTypeOption = 12,
/// [DeleteField] event is used to delete a Field. [DeleteFieldPayloadPB] is the context that
/// is used to delete the field from the Database.
#[event(input = "DeleteFieldPayloadPB")]
DeleteField = 14,
/// [DeleteField] event is used to delete a Field. [DeleteFieldPayloadPB] is the context that
/// is used to delete the field from the Database.
#[event(input = "DeleteFieldPayloadPB")]
DeleteField = 14,
/// [UpdateFieldType] event is used to update the current Field's type.
/// It will insert a new FieldTypeOptionData if the new FieldType doesn't exist before, otherwise
/// reuse the existing FieldTypeOptionData. You could check the [DatabaseRevisionPad] for more details.
#[event(input = "UpdateFieldTypePayloadPB")]
UpdateFieldType = 20,
/// [UpdateFieldType] event is used to update the current Field's type.
/// It will insert a new FieldTypeOptionData if the new FieldType doesn't exist before, otherwise
/// reuse the existing FieldTypeOptionData. You could check the [DatabaseRevisionPad] for more details.
#[event(input = "UpdateFieldTypePayloadPB")]
UpdateFieldType = 20,
/// [DuplicateField] event is used to duplicate a Field. The duplicated field data is kind of
/// deep copy of the target field. The passed in [DuplicateFieldPayloadPB] is the context that is
/// used to duplicate the field.
///
/// Return errors if failed to duplicate the field.
///
#[event(input = "DuplicateFieldPayloadPB")]
DuplicateField = 21,
/// [DuplicateField] event is used to duplicate a Field. The duplicated field data is kind of
/// deep copy of the target field. The passed in [DuplicateFieldPayloadPB] is the context that is
/// used to duplicate the field.
///
/// Return errors if failed to duplicate the field.
///
#[event(input = "DuplicateFieldPayloadPB")]
DuplicateField = 21,
/// [MoveItem] event is used to move an item. For the moment, Item has two types defined in
/// [MoveItemTypePB].
#[event(input = "MoveFieldPayloadPB")]
MoveField = 22,
/// [MoveItem] event is used to move an item. For the moment, Item has two types defined in
/// [MoveItemTypePB].
#[event(input = "MoveFieldPayloadPB")]
MoveField = 22,
/// [TypeOptionPathPB] event is used to get the FieldTypeOption data for a specific field type.
///
/// Check out the [TypeOptionPB] for more details. If the [FieldTypeOptionData] does exist
/// for the target type, the [TypeOptionBuilder] will create the default data for that type.
///
/// Return the [TypeOptionPB] if there are no errors.
#[event(input = "TypeOptionPathPB", output = "TypeOptionPB")]
GetTypeOption = 23,
/// [TypeOptionPathPB] event is used to get the FieldTypeOption data for a specific field type.
///
/// Check out the [TypeOptionPB] for more details. If the [FieldTypeOptionData] does exist
/// for the target type, the [TypeOptionBuilder] will create the default data for that type.
///
/// Return the [TypeOptionPB] if there are no errors.
#[event(input = "TypeOptionPathPB", output = "TypeOptionPB")]
GetTypeOption = 23,
/// [CreateTypeOption] event is used to create a new FieldTypeOptionData.
#[event(input = "CreateFieldPayloadPB", output = "TypeOptionPB")]
CreateTypeOption = 24,
/// [CreateTypeOption] event is used to create a new FieldTypeOptionData.
#[event(input = "CreateFieldPayloadPB", output = "TypeOptionPB")]
CreateTypeOption = 24,
/// [CreateSelectOption] event is used to create a new select option. Returns a [SelectOptionPB] if
/// there are no errors.
#[event(input = "CreateSelectOptionPayloadPB", output = "SelectOptionPB")]
CreateSelectOption = 30,
/// [CreateSelectOption] event is used to create a new select option. Returns a [SelectOptionPB] if
/// there are no errors.
#[event(input = "CreateSelectOptionPayloadPB", output = "SelectOptionPB")]
CreateSelectOption = 30,
/// [GetSelectOptionCellData] event is used to get the select option data for cell editing.
/// [CellIdPB] locate which cell data that will be read from. The return value, [SelectOptionCellDataPB]
/// contains the available options and the currently selected options.
#[event(input = "CellIdPB", output = "SelectOptionCellDataPB")]
GetSelectOptionCellData = 31,
/// [GetSelectOptionCellData] event is used to get the select option data for cell editing.
/// [CellIdPB] locate which cell data that will be read from. The return value, [SelectOptionCellDataPB]
/// contains the available options and the currently selected options.
#[event(input = "CellIdPB", output = "SelectOptionCellDataPB")]
GetSelectOptionCellData = 31,
/// [UpdateSelectOption] event is used to update a FieldTypeOptionData whose field_type is
/// FieldType::SingleSelect or FieldType::MultiSelect.
///
/// This event may trigger the DatabaseNotification::DidUpdateCell event.
/// For example, DatabaseNotification::DidUpdateCell will be triggered if the [SelectOptionChangesetPB]
/// carries a change that updates the name of the option.
#[event(input = "SelectOptionChangesetPB")]
UpdateSelectOption = 32,
/// [UpdateSelectOption] event is used to update a FieldTypeOptionData whose field_type is
/// FieldType::SingleSelect or FieldType::MultiSelect.
///
/// This event may trigger the DatabaseNotification::DidUpdateCell event.
/// For example, DatabaseNotification::DidUpdateCell will be triggered if the [SelectOptionChangesetPB]
/// carries a change that updates the name of the option.
#[event(input = "SelectOptionChangesetPB")]
UpdateSelectOption = 32,
#[event(input = "CreateRowPayloadPB", output = "RowPB")]
CreateRow = 50,
#[event(input = "CreateRowPayloadPB", output = "RowPB")]
CreateRow = 50,
/// [GetRow] event is used to get the row data,[RowPB]. [OptionalRowPB] is a wrapper that enables
/// to return a nullable row data.
#[event(input = "RowIdPB", output = "OptionalRowPB")]
GetRow = 51,
/// [GetRow] event is used to get the row data,[RowPB]. [OptionalRowPB] is a wrapper that enables
/// to return a nullable row data.
#[event(input = "RowIdPB", output = "OptionalRowPB")]
GetRow = 51,
#[event(input = "RowIdPB")]
DeleteRow = 52,
#[event(input = "RowIdPB")]
DeleteRow = 52,
#[event(input = "RowIdPB")]
DuplicateRow = 53,
#[event(input = "RowIdPB")]
DuplicateRow = 53,
#[event(input = "MoveRowPayloadPB")]
MoveRow = 54,
#[event(input = "MoveRowPayloadPB")]
MoveRow = 54,
#[event(input = "CellIdPB", output = "CellPB")]
GetCell = 70,
#[event(input = "CellIdPB", output = "CellPB")]
GetCell = 70,
/// [UpdateCell] event is used to update the cell content. The passed in data, [CellChangesetPB],
/// carries the changes that will be applied to the cell content by calling `update_cell` function.
///
/// The 'content' property of the [CellChangesetPB] is a String type. It can be used directly if the
/// cell uses string data. For example, the TextCell or NumberCell.
///
/// But,it can be treated as a generic type, because we can use [serde] to deserialize the string
/// into a specific data type. For the moment, the 'content' will be deserialized to a concrete type
/// when the FieldType is SingleSelect, DateTime, and MultiSelect. Please see
/// the [UpdateSelectOptionCell] and [UpdateDateCell] events for more details.
#[event(input = "CellChangesetPB")]
UpdateCell = 71,
/// [UpdateCell] event is used to update the cell content. The passed in data, [CellChangesetPB],
/// carries the changes that will be applied to the cell content by calling `update_cell` function.
///
/// The 'content' property of the [CellChangesetPB] is a String type. It can be used directly if the
/// cell uses string data. For example, the TextCell or NumberCell.
///
/// But,it can be treated as a generic type, because we can use [serde] to deserialize the string
/// into a specific data type. For the moment, the 'content' will be deserialized to a concrete type
/// when the FieldType is SingleSelect, DateTime, and MultiSelect. Please see
/// the [UpdateSelectOptionCell] and [UpdateDateCell] events for more details.
#[event(input = "CellChangesetPB")]
UpdateCell = 71,
/// [UpdateSelectOptionCell] event is used to update a select option cell's data. [SelectOptionCellChangesetPB]
/// contains options that will be deleted or inserted. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function.
#[event(input = "SelectOptionCellChangesetPB")]
UpdateSelectOptionCell = 72,
/// [UpdateSelectOptionCell] event is used to update a select option cell's data. [SelectOptionCellChangesetPB]
/// contains options that will be deleted or inserted. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function.
#[event(input = "SelectOptionCellChangesetPB")]
UpdateSelectOptionCell = 72,
/// [UpdateDateCell] event is used to update a date cell's data. [DateChangesetPB]
/// contains the date and the time string. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function.
#[event(input = "DateChangesetPB")]
UpdateDateCell = 80,
/// [UpdateDateCell] event is used to update a date cell's data. [DateChangesetPB]
/// contains the date and the time string. It can be cast to [CellChangesetPB] that
/// will be used by the `update_cell` function.
#[event(input = "DateChangesetPB")]
UpdateDateCell = 80,
#[event(input = "DatabaseIdPB", output = "RepeatedGroupPB")]
GetGroup = 100,
#[event(input = "DatabaseIdPB", output = "RepeatedGroupPB")]
GetGroup = 100,
#[event(input = "CreateBoardCardPayloadPB", output = "RowPB")]
CreateBoardCard = 110,
#[event(input = "CreateBoardCardPayloadPB", output = "RowPB")]
CreateBoardCard = 110,
#[event(input = "MoveGroupPayloadPB")]
MoveGroup = 111,
#[event(input = "MoveGroupPayloadPB")]
MoveGroup = 111,
#[event(input = "MoveGroupRowPayloadPB")]
MoveGroupRow = 112,
#[event(input = "MoveGroupRowPayloadPB")]
MoveGroupRow = 112,
#[event(input = "MoveGroupRowPayloadPB")]
GroupByField = 113,
#[event(input = "MoveGroupRowPayloadPB")]
GroupByField = 113,
}

View File

@ -1,92 +1,92 @@
#[macro_export]
macro_rules! impl_into_box_type_option_builder {
($target: ident) => {
impl std::convert::From<$target> for BoxTypeOptionBuilder {
fn from(target: $target) -> BoxTypeOptionBuilder {
Box::new(target)
}
}
};
($target: ident) => {
impl std::convert::From<$target> for BoxTypeOptionBuilder {
fn from(target: $target) -> BoxTypeOptionBuilder {
Box::new(target)
}
}
};
}
macro_rules! impl_builder_from_json_str_and_from_bytes {
($target: ident,$type_option: ident) => {
impl $target {
pub fn from_protobuf_bytes(bytes: Bytes) -> $target {
let type_option = $type_option::from_protobuf_bytes(bytes);
$target(type_option)
}
($target: ident,$type_option: ident) => {
impl $target {
pub fn from_protobuf_bytes(bytes: Bytes) -> $target {
let type_option = $type_option::from_protobuf_bytes(bytes);
$target(type_option)
}
pub fn from_json_str(s: &str) -> $target {
let type_option = $type_option::from_json_str(s);
$target(type_option)
}
}
};
pub fn from_json_str(s: &str) -> $target {
let type_option = $type_option::from_json_str(s);
$target(type_option)
}
}
};
}
#[macro_export]
macro_rules! impl_type_option {
($target: ident, $field_type:expr) => {
impl std::convert::From<&FieldRevision> for $target {
fn from(field_rev: &FieldRevision) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(),
Some(target) => target,
}
}
($target: ident, $field_type:expr) => {
impl std::convert::From<&FieldRevision> for $target {
fn from(field_rev: &FieldRevision) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(),
Some(target) => target,
}
}
}
impl std::convert::From<&std::sync::Arc<FieldRevision>> for $target {
fn from(field_rev: &std::sync::Arc<FieldRevision>) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(),
Some(target) => target,
}
}
impl std::convert::From<&std::sync::Arc<FieldRevision>> for $target {
fn from(field_rev: &std::sync::Arc<FieldRevision>) -> $target {
match field_rev.get_type_option::<$target>($field_type.into()) {
None => $target::default(),
Some(target) => target,
}
}
}
impl std::convert::From<$target> for String {
fn from(type_option: $target) -> String {
type_option.json_str()
}
impl std::convert::From<$target> for String {
fn from(type_option: $target) -> String {
type_option.json_str()
}
}
impl TypeOptionDataSerializer for $target {
fn json_str(&self) -> String {
match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
tracing::error!("Field type data serialize to json fail, error: {:?}", e);
serde_json::to_string(&$target::default()).unwrap()
},
}
}
impl TypeOptionDataSerializer for $target {
fn json_str(&self) -> String {
match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
tracing::error!("Field type data serialize to json fail, error: {:?}", e);
serde_json::to_string(&$target::default()).unwrap()
}
}
}
fn protobuf_bytes(&self) -> Bytes {
self.clone().try_into().unwrap()
}
}
fn protobuf_bytes(&self) -> Bytes {
self.clone().try_into().unwrap()
}
impl TypeOptionDataDeserializer for $target {
fn from_json_str(s: &str) -> $target {
match serde_json::from_str(s) {
Ok(obj) => obj,
Err(err) => {
tracing::error!(
"{} type option deserialize from {} failed, {:?}",
stringify!($target),
s,
err
);
$target::default()
},
}
}
impl TypeOptionDataDeserializer for $target {
fn from_json_str(s: &str) -> $target {
match serde_json::from_str(s) {
Ok(obj) => obj,
Err(err) => {
tracing::error!(
"{} type option deserialize from {} failed, {:?}",
stringify!($target),
s,
err
);
$target::default()
}
}
}
fn from_protobuf_bytes(bytes: Bytes) -> $target {
$target::try_from(bytes).unwrap_or($target::default())
}
}
};
fn from_protobuf_bytes(bytes: Bytes) -> $target {
$target::try_from(bytes).unwrap_or($target::default())
}
}
};
}

View File

@ -1,21 +1,23 @@
use crate::entities::LayoutTypePB;
use crate::services::grid_editor::{
DatabaseRevisionEditor, GridRevisionCloudService, GridRevisionMergeable, GridRevisionSerde,
DatabaseRevisionEditor, GridRevisionCloudService, GridRevisionMergeable, GridRevisionSerde,
};
use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::persistence::kv::DatabaseKVPersistence;
use crate::services::persistence::migration::DatabaseMigration;
use crate::services::persistence::rev_sqlite::{
SQLiteDatabaseRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
SQLiteDatabaseRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
};
use crate::services::persistence::GridDatabase;
use crate::services::view_editor::make_database_view_rev_manager;
use bytes::Bytes;
use flowy_client_sync::client_database::{
make_database_block_operations, make_database_operations, make_grid_view_operations,
make_database_block_operations, make_database_operations, make_grid_view_operations,
};
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket};
use flowy_revision::{
RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket,
};
use flowy_sqlite::ConnectionPool;
use grid_model::{BuildDatabaseContext, DatabaseRevision, DatabaseViewRevision};
use lib_infra::async_trait::async_trait;
@ -28,228 +30,264 @@ use std::sync::Arc;
use tokio::sync::RwLock;
pub trait DatabaseUser: Send + Sync {
fn user_id(&self) -> Result<String, FlowyError>;
fn token(&self) -> Result<String, FlowyError>;
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
fn user_id(&self) -> Result<String, FlowyError>;
fn token(&self) -> Result<String, FlowyError>;
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
}
pub struct DatabaseManager {
database_editors: RwLock<RefCountHashMap<Arc<DatabaseRevisionEditor>>>,
database_user: Arc<dyn DatabaseUser>,
block_index_cache: Arc<BlockIndexCache>,
#[allow(dead_code)]
kv_persistence: Arc<DatabaseKVPersistence>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
migration: DatabaseMigration,
database_editors: RwLock<RefCountHashMap<Arc<DatabaseRevisionEditor>>>,
database_user: Arc<dyn DatabaseUser>,
block_index_cache: Arc<BlockIndexCache>,
#[allow(dead_code)]
kv_persistence: Arc<DatabaseKVPersistence>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
migration: DatabaseMigration,
}
impl DatabaseManager {
pub fn new(
grid_user: Arc<dyn DatabaseUser>,
_rev_web_socket: Arc<dyn RevisionWebSocket>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
database: Arc<dyn GridDatabase>,
) -> Self {
let grid_editors = RwLock::new(RefCountHashMap::new());
let kv_persistence = Arc::new(DatabaseKVPersistence::new(database.clone()));
let block_index_cache = Arc::new(BlockIndexCache::new(database.clone()));
let migration = DatabaseMigration::new(grid_user.clone(), database);
Self {
database_editors: grid_editors,
database_user: grid_user,
kv_persistence,
block_index_cache,
task_scheduler,
migration,
}
pub fn new(
grid_user: Arc<dyn DatabaseUser>,
_rev_web_socket: Arc<dyn RevisionWebSocket>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
database: Arc<dyn GridDatabase>,
) -> Self {
let grid_editors = RwLock::new(RefCountHashMap::new());
let kv_persistence = Arc::new(DatabaseKVPersistence::new(database.clone()));
let block_index_cache = Arc::new(BlockIndexCache::new(database.clone()));
let migration = DatabaseMigration::new(grid_user.clone(), database);
Self {
database_editors: grid_editors,
database_user: grid_user,
kv_persistence,
block_index_cache,
task_scheduler,
migration,
}
}
pub async fn initialize_with_new_user(&self, _user_id: &str, _token: &str) -> FlowyResult<()> {
Ok(())
}
pub async fn initialize(&self, _user_id: &str, _token: &str) -> FlowyResult<()> {
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database<T: AsRef<str>>(
&self,
database_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let database_id = database_id.as_ref();
let db_pool = self.database_user.db_pool()?;
let rev_manager = self.make_database_rev_manager(database_id, db_pool)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
async fn create_database_view<T: AsRef<str>>(
&self,
view_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let rev_manager = make_database_view_rev_manager(&self.database_user, view_id).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database_block<T: AsRef<str>>(
&self,
block_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let block_id = block_id.as_ref();
let rev_manager = make_database_block_rev_manager(&self.database_user, block_id)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
pub async fn open_database<T: AsRef<str>>(
&self,
database_id: T,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let database_id = database_id.as_ref();
let _ = self.migration.run_v1_migration(database_id).await;
self.get_or_create_database_editor(database_id).await
}
#[tracing::instrument(level = "debug", skip_all, fields(database_id), err)]
pub async fn close_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<()> {
let database_id = database_id.as_ref();
tracing::Span::current().record("database_id", database_id);
self
.database_editors
.write()
.await
.remove(database_id)
.await;
Ok(())
}
// #[tracing::instrument(level = "debug", skip(self), err)]
pub async fn get_database_editor(
&self,
database_id: &str,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let read_guard = self.database_editors.read().await;
let editor = read_guard.get(database_id);
match editor {
None => {
// Drop the read_guard ASAP in case of the following read/write lock
drop(read_guard);
self.open_database(database_id).await
},
Some(editor) => Ok(editor),
}
}
async fn get_or_create_database_editor(
&self,
database_id: &str,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
if let Some(editor) = self.database_editors.read().await.get(database_id) {
return Ok(editor);
}
pub async fn initialize_with_new_user(&self, _user_id: &str, _token: &str) -> FlowyResult<()> {
Ok(())
}
let mut database_editors = self.database_editors.write().await;
let db_pool = self.database_user.db_pool()?;
let editor = self.make_database_rev_editor(database_id, db_pool).await?;
tracing::trace!("Open database: {}", database_id);
database_editors.insert(database_id.to_string(), editor.clone());
Ok(editor)
}
pub async fn initialize(&self, _user_id: &str, _token: &str) -> FlowyResult<()> {
Ok(())
}
#[tracing::instrument(level = "trace", skip(self, pool), err)]
async fn make_database_rev_editor(
&self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> Result<Arc<DatabaseRevisionEditor>, FlowyError> {
let user = self.database_user.clone();
let token = user.token()?;
let cloud = Arc::new(GridRevisionCloudService::new(token));
let mut rev_manager = self.make_database_rev_manager(database_id, pool.clone())?;
let database_pad = Arc::new(RwLock::new(
rev_manager
.initialize::<GridRevisionSerde>(Some(cloud))
.await?,
));
let database_editor = DatabaseRevisionEditor::new(
database_id,
user,
database_pad,
rev_manager,
self.block_index_cache.clone(),
self.task_scheduler.clone(),
)
.await?;
Ok(database_editor)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database<T: AsRef<str>>(&self, database_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
let database_id = database_id.as_ref();
let db_pool = self.database_user.db_pool()?;
let rev_manager = self.make_database_rev_manager(database_id, db_pool)?;
rev_manager.reset_object(revisions).await?;
#[tracing::instrument(level = "trace", skip(self, pool), err)]
pub fn make_database_rev_manager(
&self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
let user_id = self.database_user.user_id()?;
Ok(())
}
// Create revision persistence
let disk_cache = SQLiteDatabaseRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(6, false);
let rev_persistence =
RevisionPersistence::new(&user_id, database_id, disk_cache, configuration);
#[tracing::instrument(level = "debug", skip_all, err)]
async fn create_database_view<T: AsRef<str>>(&self, view_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let rev_manager = make_database_view_rev_manager(&self.database_user, view_id).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
// Create snapshot persistence
let snapshot_object_id = format!("grid:{}", database_id);
let snapshot_persistence =
SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database_block<T: AsRef<str>>(&self, block_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
let block_id = block_id.as_ref();
let rev_manager = make_database_block_rev_manager(&self.database_user, block_id)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
pub async fn open_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let database_id = database_id.as_ref();
let _ = self.migration.run_v1_migration(database_id).await;
self.get_or_create_database_editor(database_id).await
}
#[tracing::instrument(level = "debug", skip_all, fields(database_id), err)]
pub async fn close_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<()> {
let database_id = database_id.as_ref();
tracing::Span::current().record("database_id", database_id);
self.database_editors.write().await.remove(database_id).await;
Ok(())
}
// #[tracing::instrument(level = "debug", skip(self), err)]
pub async fn get_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let read_guard = self.database_editors.read().await;
let editor = read_guard.get(database_id);
match editor {
None => {
// Drop the read_guard ASAP in case of the following read/write lock
drop(read_guard);
self.open_database(database_id).await
}
Some(editor) => Ok(editor),
}
}
async fn get_or_create_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
if let Some(editor) = self.database_editors.read().await.get(database_id) {
return Ok(editor);
}
let mut database_editors = self.database_editors.write().await;
let db_pool = self.database_user.db_pool()?;
let editor = self.make_database_rev_editor(database_id, db_pool).await?;
tracing::trace!("Open database: {}", database_id);
database_editors.insert(database_id.to_string(), editor.clone());
Ok(editor)
}
#[tracing::instrument(level = "trace", skip(self, pool), err)]
async fn make_database_rev_editor(
&self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> Result<Arc<DatabaseRevisionEditor>, FlowyError> {
let user = self.database_user.clone();
let token = user.token()?;
let cloud = Arc::new(GridRevisionCloudService::new(token));
let mut rev_manager = self.make_database_rev_manager(database_id, pool.clone())?;
let database_pad = Arc::new(RwLock::new(
rev_manager.initialize::<GridRevisionSerde>(Some(cloud)).await?,
));
let database_editor = DatabaseRevisionEditor::new(
database_id,
user,
database_pad,
rev_manager,
self.block_index_cache.clone(),
self.task_scheduler.clone(),
)
.await?;
Ok(database_editor)
}
#[tracing::instrument(level = "trace", skip(self, pool), err)]
pub fn make_database_rev_manager(
&self,
database_id: &str,
pool: Arc<ConnectionPool>,
) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
let user_id = self.database_user.user_id()?;
// Create revision persistence
let disk_cache = SQLiteDatabaseRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(6, false);
let rev_persistence = RevisionPersistence::new(&user_id, database_id, disk_cache, configuration);
// Create snapshot persistence
let snapshot_object_id = format!("grid:{}", database_id);
let snapshot_persistence = SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = GridRevisionMergeable();
let rev_manager = RevisionManager::new(
&user_id,
database_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
}
let rev_compress = GridRevisionMergeable();
let rev_manager = RevisionManager::new(
&user_id,
database_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
}
}
pub async fn make_database_view_data(
_user_id: &str,
view_id: &str,
layout: LayoutTypePB,
database_manager: Arc<DatabaseManager>,
build_context: BuildDatabaseContext,
_user_id: &str,
view_id: &str,
layout: LayoutTypePB,
database_manager: Arc<DatabaseManager>,
build_context: BuildDatabaseContext,
) -> FlowyResult<Bytes> {
let BuildDatabaseContext {
field_revs,
block_metas,
blocks,
grid_view_revision_data,
} = build_context;
let BuildDatabaseContext {
field_revs,
block_metas,
blocks,
grid_view_revision_data,
} = build_context;
for block_meta_data in &blocks {
let block_id = &block_meta_data.block_id;
// Indexing the block's rows
block_meta_data.rows.iter().for_each(|row| {
let _ = database_manager.block_index_cache.insert(&row.block_id, &row.id);
});
for block_meta_data in &blocks {
let block_id = &block_meta_data.block_id;
// Indexing the block's rows
block_meta_data.rows.iter().for_each(|row| {
let _ = database_manager
.block_index_cache
.insert(&row.block_id, &row.id);
});
// Create grid's block
let grid_block_delta = make_database_block_operations(block_meta_data);
let block_delta_data = grid_block_delta.json_bytes();
let revision = Revision::initial_revision(block_id, block_delta_data);
database_manager
.create_database_block(&block_id, vec![revision])
.await?;
}
// Create grid's block
let grid_block_delta = make_database_block_operations(block_meta_data);
let block_delta_data = grid_block_delta.json_bytes();
let revision = Revision::initial_revision(block_id, block_delta_data);
database_manager
.create_database_block(&block_id, vec![revision])
.await?;
}
// Will replace the grid_id with the value returned by the gen_grid_id()
let grid_id = view_id.to_owned();
let grid_rev = DatabaseRevision::from_build_context(&grid_id, field_revs, block_metas);
// Will replace the grid_id with the value returned by the gen_grid_id()
let grid_id = view_id.to_owned();
let grid_rev = DatabaseRevision::from_build_context(&grid_id, field_revs, block_metas);
// Create grid
let grid_rev_delta = make_database_operations(&grid_rev);
let grid_rev_delta_bytes = grid_rev_delta.json_bytes();
let revision = Revision::initial_revision(&grid_id, grid_rev_delta_bytes.clone());
database_manager.create_database(&grid_id, vec![revision]).await?;
// Create grid
let grid_rev_delta = make_database_operations(&grid_rev);
let grid_rev_delta_bytes = grid_rev_delta.json_bytes();
let revision = Revision::initial_revision(&grid_id, grid_rev_delta_bytes.clone());
database_manager
.create_database(&grid_id, vec![revision])
.await?;
// Create grid view
let grid_view = if grid_view_revision_data.is_empty() {
DatabaseViewRevision::new(grid_id, view_id.to_owned(), layout.into())
} else {
DatabaseViewRevision::from_json(grid_view_revision_data)?
};
let grid_view_delta = make_grid_view_operations(&grid_view);
let grid_view_delta_bytes = grid_view_delta.json_bytes();
let revision = Revision::initial_revision(view_id, grid_view_delta_bytes);
database_manager.create_database_view(view_id, vec![revision]).await?;
// Create grid view
let grid_view = if grid_view_revision_data.is_empty() {
DatabaseViewRevision::new(grid_id, view_id.to_owned(), layout.into())
} else {
DatabaseViewRevision::from_json(grid_view_revision_data)?
};
let grid_view_delta = make_grid_view_operations(&grid_view);
let grid_view_delta_bytes = grid_view_delta.json_bytes();
let revision = Revision::initial_revision(view_id, grid_view_delta_bytes);
database_manager
.create_database_view(view_id, vec![revision])
.await?;
Ok(grid_rev_delta_bytes)
Ok(grid_rev_delta_bytes)
}
#[async_trait]
impl RefCountValue for DatabaseRevisionEditor {
async fn did_remove(&self) {
self.close().await;
}
async fn did_remove(&self) {
self.close().await;
}
}

View File

@ -4,48 +4,48 @@ const OBSERVABLE_CATEGORY: &str = "Grid";
#[derive(ProtoBuf_Enum, Debug)]
pub enum DatabaseNotification {
Unknown = 0,
/// Trigger after inserting/deleting/updating a row
DidUpdateViewRows = 20,
/// Trigger when the visibility of the row was changed. For example, updating the filter will trigger the notification
DidUpdateViewRowsVisibility = 21,
/// Trigger after inserting/deleting/updating a field
DidUpdateFields = 22,
/// Trigger after editing a cell
DidUpdateCell = 40,
/// Trigger after editing a field properties including rename,update type option, etc
DidUpdateField = 50,
/// Trigger after the number of groups is changed
DidUpdateGroups = 60,
/// Trigger after inserting/deleting/updating/moving a row
DidUpdateGroupRow = 61,
/// Trigger when setting a new grouping field
DidGroupByField = 62,
/// Trigger after inserting/deleting/updating a filter
DidUpdateFilter = 63,
/// Trigger after inserting/deleting/updating a sort
DidUpdateSort = 64,
/// Trigger after the sort configurations are changed
DidReorderRows = 65,
/// Trigger after editing the row that hit the sort rule
DidReorderSingleRow = 66,
/// Trigger when the settings of the database are changed
DidUpdateSettings = 70,
Unknown = 0,
/// Trigger after inserting/deleting/updating a row
DidUpdateViewRows = 20,
/// Trigger when the visibility of the row was changed. For example, updating the filter will trigger the notification
DidUpdateViewRowsVisibility = 21,
/// Trigger after inserting/deleting/updating a field
DidUpdateFields = 22,
/// Trigger after editing a cell
DidUpdateCell = 40,
/// Trigger after editing a field properties including rename,update type option, etc
DidUpdateField = 50,
/// Trigger after the number of groups is changed
DidUpdateGroups = 60,
/// Trigger after inserting/deleting/updating/moving a row
DidUpdateGroupRow = 61,
/// Trigger when setting a new grouping field
DidGroupByField = 62,
/// Trigger after inserting/deleting/updating a filter
DidUpdateFilter = 63,
/// Trigger after inserting/deleting/updating a sort
DidUpdateSort = 64,
/// Trigger after the sort configurations are changed
DidReorderRows = 65,
/// Trigger after editing the row that hit the sort rule
DidReorderSingleRow = 66,
/// Trigger when the settings of the database are changed
DidUpdateSettings = 70,
}
impl std::default::Default for DatabaseNotification {
fn default() -> Self {
DatabaseNotification::Unknown
}
fn default() -> Self {
DatabaseNotification::Unknown
}
}
impl std::convert::From<DatabaseNotification> for i32 {
fn from(notification: DatabaseNotification) -> Self {
notification as i32
}
fn from(notification: DatabaseNotification) -> Self {
notification as i32
}
}
#[tracing::instrument(level = "trace")]
pub fn send_notification(id: &str, ty: DatabaseNotification) -> NotificationBuilder {
NotificationBuilder::new(id, ty, OBSERVABLE_CATEGORY)
NotificationBuilder::new(id, ty, OBSERVABLE_CATEGORY)
}

View File

@ -4,7 +4,8 @@ use flowy_client_sync::client_database::{GridBlockRevisionChangeset, GridBlockRe
use flowy_client_sync::make_operations_from_revisions;
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionObjectSerializer,
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer,
RevisionObjectSerializer,
};
use flowy_sqlite::ConnectionPool;
use grid_model::{CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision};
@ -17,201 +18,218 @@ use std::sync::Arc;
use tokio::sync::RwLock;
pub struct DatabaseBlockRevisionEditor {
#[allow(dead_code)]
user_id: String,
pub block_id: String,
pad: Arc<RwLock<GridBlockRevisionPad>>,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
#[allow(dead_code)]
user_id: String,
pub block_id: String,
pad: Arc<RwLock<GridBlockRevisionPad>>,
rev_manager: Arc<RevisionManager<Arc<ConnectionPool>>>,
}
impl DatabaseBlockRevisionEditor {
pub async fn new(
user_id: &str,
token: &str,
block_id: &str,
mut rev_manager: RevisionManager<Arc<ConnectionPool>>,
) -> FlowyResult<Self> {
let cloud = Arc::new(GridBlockRevisionCloudService {
token: token.to_owned(),
});
let block_revision_pad = rev_manager
.initialize::<DatabaseBlockRevisionSerde>(Some(cloud))
.await?;
let pad = Arc::new(RwLock::new(block_revision_pad));
let rev_manager = Arc::new(rev_manager);
let user_id = user_id.to_owned();
let block_id = block_id.to_owned();
Ok(Self {
user_id,
block_id,
pad,
rev_manager,
})
}
pub async fn new(
user_id: &str,
token: &str,
block_id: &str,
mut rev_manager: RevisionManager<Arc<ConnectionPool>>,
) -> FlowyResult<Self> {
let cloud = Arc::new(GridBlockRevisionCloudService {
token: token.to_owned(),
});
let block_revision_pad = rev_manager
.initialize::<DatabaseBlockRevisionSerde>(Some(cloud))
.await?;
let pad = Arc::new(RwLock::new(block_revision_pad));
let rev_manager = Arc::new(rev_manager);
let user_id = user_id.to_owned();
let block_id = block_id.to_owned();
Ok(Self {
user_id,
block_id,
pad,
rev_manager,
})
}
pub async fn close(&self) {
self.rev_manager.generate_snapshot().await;
self.rev_manager.close().await;
}
pub async fn close(&self) {
self.rev_manager.generate_snapshot().await;
self.rev_manager.close().await;
}
pub async fn duplicate_block(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
self.pad.read().await.duplicate_data(duplicated_block_id)
}
pub async fn duplicate_block(&self, duplicated_block_id: &str) -> DatabaseBlockRevision {
self.pad.read().await.duplicate_data(duplicated_block_id)
}
/// Create a row after the the with prev_row_id. If prev_row_id is None, the row will be appended to the list
pub(crate) async fn create_row(
&self,
row: RowRevision,
prev_row_id: Option<String>,
) -> FlowyResult<(i32, Option<i32>)> {
let mut row_count = 0;
let mut row_index = None;
self.modify(|block_pad| {
if let Some(start_row_id) = prev_row_id.as_ref() {
match block_pad.index_of_row(start_row_id) {
None => {}
Some(index) => row_index = Some(index as i32 + 1),
}
}
let change = block_pad.add_row_rev(row, prev_row_id)?;
row_count = block_pad.number_of_rows();
if row_index.is_none() {
row_index = Some(row_count - 1);
}
Ok(change)
})
.await?;
Ok((row_count, row_index))
}
pub async fn delete_rows(&self, ids: Vec<Cow<'_, String>>) -> FlowyResult<i32> {
let mut row_count = 0;
self.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
})
.await?;
Ok(row_count)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
self.modify(|block_pad| Ok(block_pad.update_row(changeset)?)).await?;
Ok(())
}
pub async fn move_row(&self, row_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
.await?;
Ok(())
}
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.pad.read().await.index_of_row(row_id)
}
pub async fn number_of_rows(&self) -> i32 {
self.pad.read().await.rows.len() as i32
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
if let Ok(pad) = self.pad.try_read() {
Ok(pad.get_row_rev(row_id))
} else {
tracing::error!("Required grid block read lock failed, retrying");
let retry = GetRowDataRetryAction {
row_id: row_id.to_owned(),
pad: self.pad.clone(),
};
match spawn_retry(3, 300, retry).await {
Ok(value) => Ok(value),
Err(err) => {
tracing::error!("Read row revision failed with: {}", err);
Ok(None)
}
}
/// Create a row after the the with prev_row_id. If prev_row_id is None, the row will be appended to the list
pub(crate) async fn create_row(
&self,
row: RowRevision,
prev_row_id: Option<String>,
) -> FlowyResult<(i32, Option<i32>)> {
let mut row_count = 0;
let mut row_index = None;
self
.modify(|block_pad| {
if let Some(start_row_id) = prev_row_id.as_ref() {
match block_pad.index_of_row(start_row_id) {
None => {},
Some(index) => row_index = Some(index as i32 + 1),
}
}
}
pub async fn get_row_revs<T>(&self, row_ids: Option<Vec<Cow<'_, T>>>) -> FlowyResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
let row_revs = self.pad.read().await.get_row_revs(row_ids)?;
Ok(row_revs)
}
let change = block_pad.add_row_rev(row, prev_row_id)?;
row_count = block_pad.number_of_rows();
pub async fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> FlowyResult<Vec<CellRevision>> {
let cell_revs = self.pad.read().await.get_cell_revs(field_id, row_ids)?;
Ok(cell_revs)
}
async fn modify<F>(&self, f: F) -> FlowyResult<()>
where
F: for<'a> FnOnce(&'a mut GridBlockRevisionPad) -> FlowyResult<Option<GridBlockRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
let changeset = f(&mut write_guard)?;
match changeset {
None => {}
Some(changeset) => {
self.apply_change(changeset).await?;
}
if row_index.is_none() {
row_index = Some(row_count - 1);
}
Ok(())
}
Ok(change)
})
.await?;
async fn apply_change(&self, change: GridBlockRevisionChangeset) -> FlowyResult<()> {
let GridBlockRevisionChangeset { operations: delta, md5 } = change;
let data = delta.json_bytes();
let _ = self.rev_manager.add_local_revision(data, md5).await?;
Ok(())
Ok((row_count, row_index))
}
pub async fn delete_rows(&self, ids: Vec<Cow<'_, String>>) -> FlowyResult<i32> {
let mut row_count = 0;
self
.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
})
.await?;
Ok(row_count)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
self
.modify(|block_pad| Ok(block_pad.update_row(changeset)?))
.await?;
Ok(())
}
pub async fn move_row(&self, row_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self
.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
.await?;
Ok(())
}
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
self.pad.read().await.index_of_row(row_id)
}
pub async fn number_of_rows(&self) -> i32 {
self.pad.read().await.rows.len() as i32
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
if let Ok(pad) = self.pad.try_read() {
Ok(pad.get_row_rev(row_id))
} else {
tracing::error!("Required grid block read lock failed, retrying");
let retry = GetRowDataRetryAction {
row_id: row_id.to_owned(),
pad: self.pad.clone(),
};
match spawn_retry(3, 300, retry).await {
Ok(value) => Ok(value),
Err(err) => {
tracing::error!("Read row revision failed with: {}", err);
Ok(None)
},
}
}
}
pub async fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> FlowyResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
let row_revs = self.pad.read().await.get_row_revs(row_ids)?;
Ok(row_revs)
}
pub async fn get_cell_revs(
&self,
field_id: &str,
row_ids: Option<Vec<Cow<'_, String>>>,
) -> FlowyResult<Vec<CellRevision>> {
let cell_revs = self.pad.read().await.get_cell_revs(field_id, row_ids)?;
Ok(cell_revs)
}
async fn modify<F>(&self, f: F) -> FlowyResult<()>
where
F: for<'a> FnOnce(
&'a mut GridBlockRevisionPad,
) -> FlowyResult<Option<GridBlockRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
let changeset = f(&mut write_guard)?;
match changeset {
None => {},
Some(changeset) => {
self.apply_change(changeset).await?;
},
}
Ok(())
}
async fn apply_change(&self, change: GridBlockRevisionChangeset) -> FlowyResult<()> {
let GridBlockRevisionChangeset {
operations: delta,
md5,
} = change;
let data = delta.json_bytes();
let _ = self.rev_manager.add_local_revision(data, md5).await?;
Ok(())
}
}
struct GridBlockRevisionCloudService {
#[allow(dead_code)]
token: String,
#[allow(dead_code)]
token: String,
}
impl RevisionCloudService for GridBlockRevisionCloudService {
#[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(&self, _user_id: &str, _object_id: &str) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
#[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(
&self,
_user_id: &str,
_object_id: &str,
) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
}
struct DatabaseBlockRevisionSerde();
impl RevisionObjectDeserializer for DatabaseBlockRevisionSerde {
type Output = GridBlockRevisionPad;
type Output = GridBlockRevisionPad;
fn deserialize_revisions(object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
let pad = GridBlockRevisionPad::from_revisions(object_id, revisions)?;
Ok(pad)
}
fn deserialize_revisions(object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
let pad = GridBlockRevisionPad::from_revisions(object_id, revisions)?;
Ok(pad)
}
fn recover_from_revisions(_revisions: Vec<Revision>) -> Option<(Self::Output, i64)> {
None
}
fn recover_from_revisions(_revisions: Vec<Revision>) -> Option<(Self::Output, i64)> {
None
}
}
impl RevisionObjectSerializer for DatabaseBlockRevisionSerde {
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
fn combine_revisions(revisions: Vec<Revision>) -> FlowyResult<Bytes> {
let operations = make_operations_from_revisions::<EmptyAttributes>(revisions)?;
Ok(operations.json_bytes())
}
}
pub struct GridBlockRevisionMergeable();
impl RevisionMergeable for GridBlockRevisionMergeable {
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DatabaseBlockRevisionSerde::combine_revisions(revisions)
}
fn combine_revisions(&self, revisions: Vec<Revision>) -> FlowyResult<Bytes> {
DatabaseBlockRevisionSerde::combine_revisions(revisions)
}
}

View File

@ -4,14 +4,16 @@ use crate::notification::{send_notification, DatabaseNotification};
use crate::services::block_editor::{DatabaseBlockRevisionEditor, GridBlockRevisionMergeable};
use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::persistence::rev_sqlite::{
SQLiteDatabaseBlockRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
SQLiteDatabaseBlockRevisionPersistence, SQLiteDatabaseRevisionSnapshotPersistence,
};
use crate::services::row::{make_row_from_row_rev, DatabaseBlockRow, DatabaseBlockRowRevision};
use dashmap::DashMap;
use flowy_error::FlowyResult;
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration};
use flowy_sqlite::ConnectionPool;
use grid_model::{GridBlockMetaRevision, GridBlockMetaRevisionChangeset, RowChangeset, RowRevision};
use grid_model::{
GridBlockMetaRevision, GridBlockMetaRevisionChangeset, RowChangeset, RowRevision,
};
use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::Arc;
@ -19,301 +21,335 @@ use tokio::sync::broadcast;
#[derive(Debug, Clone)]
pub enum DatabaseBlockEvent {
InsertRow {
block_id: String,
row: InsertedRowPB,
},
UpdateRow {
block_id: String,
row: UpdatedRowPB,
},
DeleteRow {
block_id: String,
row_id: String,
},
Move {
block_id: String,
deleted_row_id: String,
inserted_row: InsertedRowPB,
},
InsertRow {
block_id: String,
row: InsertedRowPB,
},
UpdateRow {
block_id: String,
row: UpdatedRowPB,
},
DeleteRow {
block_id: String,
row_id: String,
},
Move {
block_id: String,
deleted_row_id: String,
inserted_row: InsertedRowPB,
},
}
type BlockId = String;
pub(crate) struct DatabaseBlockManager {
user: Arc<dyn DatabaseUser>,
persistence: Arc<BlockIndexCache>,
block_editors: DashMap<BlockId, Arc<DatabaseBlockRevisionEditor>>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>,
user: Arc<dyn DatabaseUser>,
persistence: Arc<BlockIndexCache>,
block_editors: DashMap<BlockId, Arc<DatabaseBlockRevisionEditor>>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>,
}
impl DatabaseBlockManager {
pub(crate) async fn new(
user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<GridBlockMetaRevision>>,
persistence: Arc<BlockIndexCache>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>,
) -> FlowyResult<Self> {
let block_editors = make_block_editors(user, block_meta_revs).await?;
let user = user.clone();
let manager = Self {
user,
block_editors,
persistence,
event_notifier,
};
Ok(manager)
}
pub(crate) async fn new(
user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<GridBlockMetaRevision>>,
persistence: Arc<BlockIndexCache>,
event_notifier: broadcast::Sender<DatabaseBlockEvent>,
) -> FlowyResult<Self> {
let block_editors = make_block_editors(user, block_meta_revs).await?;
let user = user.clone();
let manager = Self {
user,
block_editors,
persistence,
event_notifier,
};
Ok(manager)
}
pub async fn close(&self) {
for block_editor in self.block_editors.iter() {
block_editor.close().await;
}
pub async fn close(&self) {
for block_editor in self.block_editors.iter() {
block_editor.close().await;
}
}
// #[tracing::instrument(level = "trace", skip(self))]
pub(crate) async fn get_block_editor(&self, block_id: &str) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
debug_assert!(!block_id.is_empty());
match self.block_editors.get(block_id) {
None => {
tracing::error!("This is a fatal error, block with id:{} is not exist", block_id);
let editor = Arc::new(make_database_block_editor(&self.user, block_id).await?);
self.block_editors.insert(block_id.to_owned(), editor.clone());
Ok(editor)
}
Some(editor) => Ok(editor.clone()),
}
// #[tracing::instrument(level = "trace", skip(self))]
pub(crate) async fn get_block_editor(
&self,
block_id: &str,
) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
debug_assert!(!block_id.is_empty());
match self.block_editors.get(block_id) {
None => {
tracing::error!(
"This is a fatal error, block with id:{} is not exist",
block_id
);
let editor = Arc::new(make_database_block_editor(&self.user, block_id).await?);
self
.block_editors
.insert(block_id.to_owned(), editor.clone());
Ok(editor)
},
Some(editor) => Ok(editor.clone()),
}
}
pub(crate) async fn get_editor_from_row_id(&self, row_id: &str) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
let block_id = self.persistence.get_block_id(row_id)?;
self.get_block_editor(&block_id).await
}
pub(crate) async fn get_editor_from_row_id(
&self,
row_id: &str,
) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
let block_id = self.persistence.get_block_id(row_id)?;
self.get_block_editor(&block_id).await
}
#[tracing::instrument(level = "trace", skip(self, start_row_id), err)]
pub(crate) async fn create_row(&self, row_rev: RowRevision, start_row_id: Option<String>) -> FlowyResult<i32> {
let block_id = row_rev.block_id.clone();
#[tracing::instrument(level = "trace", skip(self, start_row_id), err)]
pub(crate) async fn create_row(
&self,
row_rev: RowRevision,
start_row_id: Option<String>,
) -> FlowyResult<i32> {
let block_id = row_rev.block_id.clone();
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let editor = self.get_block_editor(&row_rev.block_id).await?;
let mut row = InsertedRowPB::from(&row_rev);
let (number_of_rows, index) = editor.create_row(row_rev, start_row_id).await?;
row.index = index;
let _ = self
.event_notifier
.send(DatabaseBlockEvent::InsertRow { block_id, row });
Ok(number_of_rows)
}
pub(crate) async fn insert_row(
&self,
rows_by_block_id: HashMap<String, Vec<RowRevision>>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for (block_id, row_revs) in rows_by_block_id {
let editor = self.get_block_editor(&block_id).await?;
for row_rev in row_revs {
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let editor = self.get_block_editor(&row_rev.block_id).await?;
let mut row = InsertedRowPB::from(&row_rev);
let (number_of_rows, index) = editor.create_row(row_rev, start_row_id).await?;
row.index = index;
let _ = self
.event_notifier
.send(DatabaseBlockEvent::InsertRow { block_id, row });
Ok(number_of_rows)
row.index = editor.create_row(row_rev, None).await?.1;
let _ = self.event_notifier.send(DatabaseBlockEvent::InsertRow {
block_id: block_id.clone(),
row,
});
}
changesets.push(GridBlockMetaRevisionChangeset::from_row_count(
block_id.clone(),
editor.number_of_rows().await,
));
}
pub(crate) async fn insert_row(
&self,
rows_by_block_id: HashMap<String, Vec<RowRevision>>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for (block_id, row_revs) in rows_by_block_id {
let editor = self.get_block_editor(&block_id).await?;
for row_rev in row_revs {
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let mut row = InsertedRowPB::from(&row_rev);
row.index = editor.create_row(row_rev, None).await?.1;
let _ = self.event_notifier.send(DatabaseBlockEvent::InsertRow {
block_id: block_id.clone(),
row,
});
}
changesets.push(GridBlockMetaRevisionChangeset::from_row_count(
block_id.clone(),
editor.number_of_rows().await,
));
}
Ok(changesets)
}
Ok(changesets)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&changeset.row_id).await?;
editor.update_row(changeset.clone()).await?;
match editor.get_row_rev(&changeset.row_id).await? {
None => tracing::error!("Update row failed, can't find the row with id: {}", changeset.row_id),
Some((_, row_rev)) => {
let changed_field_ids = changeset.cell_by_field_id.keys().cloned().collect::<Vec<String>>();
let row = UpdatedRowPB {
row: make_row_from_row_rev(row_rev),
field_ids: changed_field_ids,
};
let _ = self.event_notifier.send(DatabaseBlockEvent::UpdateRow {
block_id: editor.block_id.clone(),
row,
});
}
}
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn delete_row(&self, row_id: &str) -> FlowyResult<Option<Arc<RowRevision>>> {
let row_id = row_id.to_owned();
let block_id = self.persistence.get_block_id(&row_id)?;
let editor = self.get_block_editor(&block_id).await?;
match editor.get_row_rev(&row_id).await? {
None => Ok(None),
Some((_, row_rev)) => {
let _ = editor.delete_rows(vec![Cow::Borrowed(&row_id)]).await?;
let _ = self.event_notifier.send(DatabaseBlockEvent::DeleteRow {
block_id: editor.block_id.clone(),
row_id: row_rev.id.clone(),
});
Ok(Some(row_rev))
}
}
}
pub(crate) async fn delete_rows(
&self,
block_rows: Vec<DatabaseBlockRow>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for block_row in block_rows {
let editor = self.get_block_editor(&block_row.block_id).await?;
let row_ids = block_row
.row_ids
.into_iter()
.map(Cow::Owned)
.collect::<Vec<Cow<String>>>();
let row_count = editor.delete_rows(row_ids).await?;
let changeset = GridBlockMetaRevisionChangeset::from_row_count(block_row.block_id, row_count);
changesets.push(changeset);
}
Ok(changesets)
}
// This function will be moved to GridViewRevisionEditor
pub(crate) async fn move_row(&self, row_rev: Arc<RowRevision>, from: usize, to: usize) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&row_rev.id).await?;
editor.move_row(&row_rev.id, from, to).await?;
let delete_row_id = row_rev.id.clone();
let insert_row = InsertedRowPB {
index: Some(to as i32),
row: make_row_from_row_rev(row_rev),
is_new: false,
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&changeset.row_id).await?;
editor.update_row(changeset.clone()).await?;
match editor.get_row_rev(&changeset.row_id).await? {
None => tracing::error!(
"Update row failed, can't find the row with id: {}",
changeset.row_id
),
Some((_, row_rev)) => {
let changed_field_ids = changeset
.cell_by_field_id
.keys()
.cloned()
.collect::<Vec<String>>();
let row = UpdatedRowPB {
row: make_row_from_row_rev(row_rev),
field_ids: changed_field_ids,
};
let _ = self.event_notifier.send(DatabaseBlockEvent::Move {
block_id: editor.block_id.clone(),
deleted_row_id: delete_row_id,
inserted_row: insert_row,
let _ = self.event_notifier.send(DatabaseBlockEvent::UpdateRow {
block_id: editor.block_id.clone(),
row,
});
},
}
Ok(())
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn delete_row(&self, row_id: &str) -> FlowyResult<Option<Arc<RowRevision>>> {
let row_id = row_id.to_owned();
let block_id = self.persistence.get_block_id(&row_id)?;
let editor = self.get_block_editor(&block_id).await?;
match editor.get_row_rev(&row_id).await? {
None => Ok(None),
Some((_, row_rev)) => {
let _ = editor.delete_rows(vec![Cow::Borrowed(&row_id)]).await?;
let _ = self.event_notifier.send(DatabaseBlockEvent::DeleteRow {
block_id: editor.block_id.clone(),
row_id: row_rev.id.clone(),
});
Ok(())
Ok(Some(row_rev))
},
}
}
pub(crate) async fn delete_rows(
&self,
block_rows: Vec<DatabaseBlockRow>,
) -> FlowyResult<Vec<GridBlockMetaRevisionChangeset>> {
let mut changesets = vec![];
for block_row in block_rows {
let editor = self.get_block_editor(&block_row.block_id).await?;
let row_ids = block_row
.row_ids
.into_iter()
.map(Cow::Owned)
.collect::<Vec<Cow<String>>>();
let row_count = editor.delete_rows(row_ids).await?;
let changeset = GridBlockMetaRevisionChangeset::from_row_count(block_row.block_id, row_count);
changesets.push(changeset);
}
// This function will be moved to GridViewRevisionEditor.
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
match self.get_editor_from_row_id(row_id).await {
Ok(editor) => editor.index_of_row(row_id).await,
Err(_) => None,
}
}
Ok(changesets)
}
// This function will be moved to GridViewRevisionEditor
pub(crate) async fn move_row(
&self,
row_rev: Arc<RowRevision>,
from: usize,
to: usize,
) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&row_rev.id).await?;
editor.move_row(&row_rev.id, from, to).await?;
pub async fn update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let row_changeset: RowChangeset = changeset.clone().into();
self.update_row(row_changeset).await?;
self.notify_did_update_cell(changeset).await?;
Ok(())
}
let delete_row_id = row_rev.id.clone();
let insert_row = InsertedRowPB {
index: Some(to as i32),
row: make_row_from_row_rev(row_rev),
is_new: false,
};
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
let editor = self.get_editor_from_row_id(row_id).await?;
editor.get_row_rev(row_id).await
}
let _ = self.event_notifier.send(DatabaseBlockEvent::Move {
block_id: editor.block_id.clone(),
deleted_row_id: delete_row_id,
inserted_row: insert_row,
});
#[allow(dead_code)]
pub async fn get_row_revs(&self) -> FlowyResult<Vec<Arc<RowRevision>>> {
let mut row_revs = vec![];
Ok(())
}
// This function will be moved to GridViewRevisionEditor.
pub async fn index_of_row(&self, row_id: &str) -> Option<usize> {
match self.get_editor_from_row_id(row_id).await {
Ok(editor) => editor.index_of_row(row_id).await,
Err(_) => None,
}
}
pub async fn update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let row_changeset: RowChangeset = changeset.clone().into();
self.update_row(row_changeset).await?;
self.notify_did_update_cell(changeset).await?;
Ok(())
}
pub async fn get_row_rev(&self, row_id: &str) -> FlowyResult<Option<(usize, Arc<RowRevision>)>> {
let editor = self.get_editor_from_row_id(row_id).await?;
editor.get_row_rev(row_id).await
}
#[allow(dead_code)]
pub async fn get_row_revs(&self) -> FlowyResult<Vec<Arc<RowRevision>>> {
let mut row_revs = vec![];
for iter in self.block_editors.iter() {
let editor = iter.value();
row_revs.extend(editor.get_row_revs::<&str>(None).await?);
}
Ok(row_revs)
}
pub(crate) async fn get_blocks(
&self,
block_ids: Option<Vec<String>>,
) -> FlowyResult<Vec<DatabaseBlockRowRevision>> {
let mut blocks = vec![];
match block_ids {
None => {
for iter in self.block_editors.iter() {
let editor = iter.value();
row_revs.extend(editor.get_row_revs::<&str>(None).await?);
let editor = iter.value();
let block_id = editor.block_id.clone();
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
Ok(row_revs)
}
pub(crate) async fn get_blocks(
&self,
block_ids: Option<Vec<String>>,
) -> FlowyResult<Vec<DatabaseBlockRowRevision>> {
let mut blocks = vec![];
match block_ids {
None => {
for iter in self.block_editors.iter() {
let editor = iter.value();
let block_id = editor.block_id.clone();
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
}
Some(block_ids) => {
for block_id in block_ids {
let editor = self.get_block_editor(&block_id).await?;
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
}
},
Some(block_ids) => {
for block_id in block_ids {
let editor = self.get_block_editor(&block_id).await?;
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
Ok(blocks)
},
}
Ok(blocks)
}
async fn notify_did_update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let id = format!("{}:{}", changeset.row_id, changeset.field_id);
send_notification(&id, DatabaseNotification::DidUpdateCell).send();
Ok(())
}
async fn notify_did_update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let id = format!("{}:{}", changeset.row_id, changeset.field_id);
send_notification(&id, DatabaseNotification::DidUpdateCell).send();
Ok(())
}
}
/// Initialize each block editor
async fn make_block_editors(
user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<GridBlockMetaRevision>>,
user: &Arc<dyn DatabaseUser>,
block_meta_revs: Vec<Arc<GridBlockMetaRevision>>,
) -> FlowyResult<DashMap<String, Arc<DatabaseBlockRevisionEditor>>> {
let editor_map = DashMap::new();
for block_meta_rev in block_meta_revs {
let editor = make_database_block_editor(user, &block_meta_rev.block_id).await?;
editor_map.insert(block_meta_rev.block_id.clone(), Arc::new(editor));
}
let editor_map = DashMap::new();
for block_meta_rev in block_meta_revs {
let editor = make_database_block_editor(user, &block_meta_rev.block_id).await?;
editor_map.insert(block_meta_rev.block_id.clone(), Arc::new(editor));
}
Ok(editor_map)
Ok(editor_map)
}
async fn make_database_block_editor(
user: &Arc<dyn DatabaseUser>,
block_id: &str,
user: &Arc<dyn DatabaseUser>,
block_id: &str,
) -> FlowyResult<DatabaseBlockRevisionEditor> {
tracing::trace!("Open block:{} editor", block_id);
let token = user.token()?;
let user_id = user.user_id()?;
let rev_manager = make_database_block_rev_manager(user, block_id)?;
DatabaseBlockRevisionEditor::new(&user_id, &token, block_id, rev_manager).await
tracing::trace!("Open block:{} editor", block_id);
let token = user.token()?;
let user_id = user.user_id()?;
let rev_manager = make_database_block_rev_manager(user, block_id)?;
DatabaseBlockRevisionEditor::new(&user_id, &token, block_id, rev_manager).await
}
pub fn make_database_block_rev_manager(
user: &Arc<dyn DatabaseUser>,
block_id: &str,
user: &Arc<dyn DatabaseUser>,
block_id: &str,
) -> FlowyResult<RevisionManager<Arc<ConnectionPool>>> {
let user_id = user.user_id()?;
let user_id = user.user_id()?;
// Create revision persistence
let pool = user.db_pool()?;
let disk_cache = SQLiteDatabaseBlockRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(4, false);
let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache, configuration);
// Create revision persistence
let pool = user.db_pool()?;
let disk_cache = SQLiteDatabaseBlockRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(4, false);
let rev_persistence = RevisionPersistence::new(&user_id, block_id, disk_cache, configuration);
// Create snapshot persistence
let snapshot_object_id = format!("grid_block:{}", block_id);
let snapshot_persistence = SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
// Create snapshot persistence
let snapshot_object_id = format!("grid_block:{}", block_id);
let snapshot_persistence =
SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = GridBlockRevisionMergeable();
let rev_manager = RevisionManager::new(&user_id, block_id, rev_persistence, rev_compress, snapshot_persistence);
Ok(rev_manager)
let rev_compress = GridBlockRevisionMergeable();
let rev_manager = RevisionManager::new(
&user_id,
block_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
}

View File

@ -16,90 +16,97 @@ pub struct AnyTypeCache<TypeValueKey>(HashMap<TypeValueKey, TypeValue>);
impl<TypeValueKey> AnyTypeCache<TypeValueKey>
where
TypeValueKey: Clone + Hash + Eq,
TypeValueKey: Clone + Hash + Eq,
{
pub fn new() -> Arc<RwLock<AnyTypeCache<TypeValueKey>>> {
Arc::new(RwLock::new(AnyTypeCache(HashMap::default())))
}
pub fn new() -> Arc<RwLock<AnyTypeCache<TypeValueKey>>> {
Arc::new(RwLock::new(AnyTypeCache(HashMap::default())))
}
pub fn insert<T>(&mut self, key: &TypeValueKey, val: T) -> Option<T>
where
T: 'static + Send + Sync,
{
self.0.insert(key.clone(), TypeValue::new(val)).and_then(downcast_owned)
}
pub fn insert<T>(&mut self, key: &TypeValueKey, val: T) -> Option<T>
where
T: 'static + Send + Sync,
{
self
.0
.insert(key.clone(), TypeValue::new(val))
.and_then(downcast_owned)
}
pub fn remove(&mut self, key: &TypeValueKey) {
self.0.remove(key);
}
pub fn remove(&mut self, key: &TypeValueKey) {
self.0.remove(key);
}
// pub fn remove<T, K: AsRef<TypeValueKey>>(&mut self, key: K) -> Option<T>
// where
// T: 'static + Send + Sync,
// {
// self.0.remove(key.as_ref()).and_then(downcast_owned)
// }
// pub fn remove<T, K: AsRef<TypeValueKey>>(&mut self, key: K) -> Option<T>
// where
// T: 'static + Send + Sync,
// {
// self.0.remove(key.as_ref()).and_then(downcast_owned)
// }
pub fn get<T>(&self, key: &TypeValueKey) -> Option<&T>
where
T: 'static + Send + Sync,
{
self.0.get(key).and_then(|type_value| type_value.boxed.downcast_ref())
}
pub fn get<T>(&self, key: &TypeValueKey) -> Option<&T>
where
T: 'static + Send + Sync,
{
self
.0
.get(key)
.and_then(|type_value| type_value.boxed.downcast_ref())
}
pub fn get_mut<T>(&mut self, key: &TypeValueKey) -> Option<&mut T>
where
T: 'static + Send + Sync,
{
self.0
.get_mut(key)
.and_then(|type_value| type_value.boxed.downcast_mut())
}
pub fn get_mut<T>(&mut self, key: &TypeValueKey) -> Option<&mut T>
where
T: 'static + Send + Sync,
{
self
.0
.get_mut(key)
.and_then(|type_value| type_value.boxed.downcast_mut())
}
pub fn contains(&self, key: &TypeValueKey) -> bool {
self.0.contains_key(key)
}
pub fn contains(&self, key: &TypeValueKey) -> bool {
self.0.contains_key(key)
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
fn downcast_owned<T: 'static + Send + Sync>(type_value: TypeValue) -> Option<T> {
type_value.boxed.downcast().ok().map(|boxed| *boxed)
type_value.boxed.downcast().ok().map(|boxed| *boxed)
}
#[derive(Debug)]
struct TypeValue {
boxed: Box<dyn Any + Send + Sync + 'static>,
#[allow(dead_code)]
ty: &'static str,
boxed: Box<dyn Any + Send + Sync + 'static>,
#[allow(dead_code)]
ty: &'static str,
}
impl TypeValue {
pub fn new<T>(value: T) -> Self
where
T: Send + Sync + 'static,
{
Self {
boxed: Box::new(value),
ty: type_name::<T>(),
}
pub fn new<T>(value: T) -> Self
where
T: Send + Sync + 'static,
{
Self {
boxed: Box::new(value),
ty: type_name::<T>(),
}
}
}
impl std::ops::Deref for TypeValue {
type Target = Box<dyn Any + Send + Sync + 'static>;
type Target = Box<dyn Any + Send + Sync + 'static>;
fn deref(&self) -> &Self::Target {
&self.boxed
}
fn deref(&self) -> &Self::Target {
&self.boxed
}
}
impl std::ops::DerefMut for TypeValue {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.boxed
}
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.boxed
}
}
// #[cfg(test)]

View File

@ -9,41 +9,41 @@ use std::fmt::Debug;
/// Decode the opaque cell data into readable format content
pub trait CellDataDecoder: TypeOption {
///
/// Tries to decode the opaque cell string to `decoded_field_type`'s cell data. Sometimes, the `field_type`
/// of the `FieldRevision` is not equal to the `decoded_field_type`(This happened When switching
/// the field type of the `FieldRevision` to another field type). So the cell data is need to do
/// some transformation.
///
/// For example, the current field type of the `FieldRevision` is a checkbox. When switching the field
/// type from the checkbox to single select, it will create two new options,`Yes` and `No`, if they don't exist.
/// But the data of the cell doesn't change. We can't iterate all the rows to transform the cell
/// data that can be parsed by the current field type. One approach is to transform the cell data
/// when it get read. For the moment, the cell data is a string, `Yes` or `No`. It needs to compare
/// with the option's name, if match return the id of the option.
fn decode_cell_str(
&self,
cell_str: String,
decoded_field_type: &FieldType,
field_rev: &FieldRevision,
) -> FlowyResult<<Self as TypeOption>::CellData>;
///
/// Tries to decode the opaque cell string to `decoded_field_type`'s cell data. Sometimes, the `field_type`
/// of the `FieldRevision` is not equal to the `decoded_field_type`(This happened When switching
/// the field type of the `FieldRevision` to another field type). So the cell data is need to do
/// some transformation.
///
/// For example, the current field type of the `FieldRevision` is a checkbox. When switching the field
/// type from the checkbox to single select, it will create two new options,`Yes` and `No`, if they don't exist.
/// But the data of the cell doesn't change. We can't iterate all the rows to transform the cell
/// data that can be parsed by the current field type. One approach is to transform the cell data
/// when it get read. For the moment, the cell data is a string, `Yes` or `No`. It needs to compare
/// with the option's name, if match return the id of the option.
fn decode_cell_str(
&self,
cell_str: String,
decoded_field_type: &FieldType,
field_rev: &FieldRevision,
) -> FlowyResult<<Self as TypeOption>::CellData>;
/// Same as `decode_cell_data` does but Decode the cell data to readable `String`
/// For example, The string of the Multi-Select cell will be a list of the option's name
/// separated by a comma.
fn decode_cell_data_to_str(&self, cell_data: <Self as TypeOption>::CellData) -> String;
/// Same as `decode_cell_data` does but Decode the cell data to readable `String`
/// For example, The string of the Multi-Select cell will be a list of the option's name
/// separated by a comma.
fn decode_cell_data_to_str(&self, cell_data: <Self as TypeOption>::CellData) -> String;
}
pub trait CellDataChangeset: TypeOption {
/// The changeset is able to parse into the concrete data struct if `TypeOption::CellChangeset`
/// implements the `FromCellChangesetString` trait.
/// For example,the SelectOptionCellChangeset,DateCellChangeset. etc.
///
fn apply_changeset(
&self,
changeset: <Self as TypeOption>::CellChangeset,
type_cell_data: Option<TypeCellData>,
) -> FlowyResult<(String, <Self as TypeOption>::CellData)>;
/// The changeset is able to parse into the concrete data struct if `TypeOption::CellChangeset`
/// implements the `FromCellChangesetString` trait.
/// For example,the SelectOptionCellChangeset,DateCellChangeset. etc.
///
fn apply_changeset(
&self,
changeset: <Self as TypeOption>::CellChangeset,
type_cell_data: Option<TypeCellData>,
) -> FlowyResult<(String, <Self as TypeOption>::CellData)>;
}
/// changeset: It will be deserialized into specific data base on the FieldType.
@ -53,78 +53,90 @@ pub trait CellDataChangeset: TypeOption {
///
/// cell_rev: It will be None if the cell does not contain any data.
pub fn apply_cell_data_changeset<C: ToCellChangesetString, T: AsRef<FieldRevision>>(
changeset: C,
cell_rev: Option<CellRevision>,
field_rev: T,
cell_data_cache: Option<AtomicCellDataCache>,
changeset: C,
cell_rev: Option<CellRevision>,
field_rev: T,
cell_data_cache: Option<AtomicCellDataCache>,
) -> Result<String, FlowyError> {
let field_rev = field_rev.as_ref();
let changeset = changeset.to_cell_changeset_str();
let field_type: FieldType = field_rev.ty.into();
let field_rev = field_rev.as_ref();
let changeset = changeset.to_cell_changeset_str();
let field_type: FieldType = field_rev.ty.into();
let type_cell_data = cell_rev.and_then(|cell_rev| match TypeCellData::try_from(cell_rev) {
Ok(type_cell_data) => Some(type_cell_data),
Err(_) => None,
});
let type_cell_data = cell_rev.and_then(|cell_rev| match TypeCellData::try_from(cell_rev) {
Ok(type_cell_data) => Some(type_cell_data),
Err(_) => None,
});
let cell_str = match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(&field_type)
{
None => "".to_string(),
Some(handler) => handler.handle_cell_changeset(changeset, type_cell_data, field_rev)?,
};
Ok(TypeCellData::new(cell_str, field_type).to_json())
let cell_str = match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(&field_type)
{
None => "".to_string(),
Some(handler) => handler.handle_cell_changeset(changeset, type_cell_data, field_rev)?,
};
Ok(TypeCellData::new(cell_str, field_type).to_json())
}
pub fn get_type_cell_protobuf<T: TryInto<TypeCellData, Error = FlowyError> + Debug>(
data: T,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
data: T,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> (FieldType, CellProtobufBlob) {
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData { cell_str, field_type } = type_cell_data;
match try_decode_cell_str_to_cell_protobuf(
cell_str,
&field_type,
&to_field_type,
field_rev,
cell_data_cache,
) {
Ok(cell_bytes) => (field_type, cell_bytes),
Err(e) => {
tracing::error!("Decode cell data failed, {:?}", e);
(field_type, CellProtobufBlob::default())
}
}
}
Err(_err) => {
// It's okay to ignore this error, because it's okay that the current cell can't
// display the existing cell data. For example, the UI of the text cell will be blank if
// the type of the data of cell is Number.
(to_field_type, CellProtobufBlob::default())
}
}
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData {
cell_str,
field_type,
} = type_cell_data;
match try_decode_cell_str_to_cell_protobuf(
cell_str,
&field_type,
&to_field_type,
field_rev,
cell_data_cache,
) {
Ok(cell_bytes) => (field_type, cell_bytes),
Err(e) => {
tracing::error!("Decode cell data failed, {:?}", e);
(field_type, CellProtobufBlob::default())
},
}
},
Err(_err) => {
// It's okay to ignore this error, because it's okay that the current cell can't
// display the existing cell data. For example, the UI of the text cell will be blank if
// the type of the data of cell is Number.
(to_field_type, CellProtobufBlob::default())
},
}
}
pub fn get_type_cell_data<CellData, Output>(
data: CellData,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
data: CellData,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> Option<Output>
where
CellData: TryInto<TypeCellData, Error = FlowyError> + Debug,
Output: Default + 'static,
CellData: TryInto<TypeCellData, Error = FlowyError> + Debug,
Output: Default + 'static,
{
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData { cell_str, field_type } = type_cell_data;
try_decode_cell_str_to_cell_data(cell_str, &field_type, &to_field_type, field_rev, cell_data_cache)
}
Err(_err) => None,
}
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData {
cell_str,
field_type,
} = type_cell_data;
try_decode_cell_str_to_cell_data(
cell_str,
&field_type,
&to_field_type,
field_rev,
cell_data_cache,
)
},
Err(_err) => None,
}
}
/// Decode the opaque cell data from one field type to another using the corresponding `TypeOption`
@ -145,33 +157,33 @@ where
/// returns: CellBytes
///
pub fn try_decode_cell_str_to_cell_protobuf(
cell_str: String,
from_field_type: &FieldType,
to_field_type: &FieldType,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
cell_str: String,
from_field_type: &FieldType,
to_field_type: &FieldType,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> FlowyResult<CellProtobufBlob> {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type)
{
None => Ok(CellProtobufBlob::default()),
Some(handler) => handler.handle_cell_str(cell_str, from_field_type, field_rev),
}
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type)
{
None => Ok(CellProtobufBlob::default()),
Some(handler) => handler.handle_cell_str(cell_str, from_field_type, field_rev),
}
}
pub fn try_decode_cell_str_to_cell_data<T: Default + 'static>(
cell_str: String,
from_field_type: &FieldType,
to_field_type: &FieldType,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
cell_str: String,
from_field_type: &FieldType,
to_field_type: &FieldType,
field_rev: &FieldRevision,
cell_data_cache: Option<AtomicCellDataCache>,
) -> Option<T> {
let handler = TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type)?;
handler
.get_cell_data(cell_str, from_field_type, field_rev)
.ok()?
.unbox_or_none::<T>()
let handler = TypeOptionCellExt::new_with_cell_data_cache(field_rev, cell_data_cache)
.get_type_option_cell_data_handler(to_field_type)?;
handler
.get_cell_data(cell_str, from_field_type, field_rev)
.ok()?
.unbox_or_none::<T>()
}
/// Returns a string that represents the current field_type's cell data.
/// For example, The string of the Multi-Select cell will be a list of the option's name
@ -187,123 +199,133 @@ pub fn try_decode_cell_str_to_cell_data<T: Default + 'static>(
///
/// returns: String
pub fn stringify_cell_data(
cell_str: String,
decoded_field_type: &FieldType,
field_type: &FieldType,
field_rev: &FieldRevision,
cell_str: String,
decoded_field_type: &FieldType,
field_type: &FieldType,
field_rev: &FieldRevision,
) -> String {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, None).get_type_option_cell_data_handler(field_type) {
None => "".to_string(),
Some(handler) => handler.stringify_cell_str(cell_str, decoded_field_type, field_rev),
}
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, None)
.get_type_option_cell_data_handler(field_type)
{
None => "".to_string(),
Some(handler) => handler.stringify_cell_str(cell_str, decoded_field_type, field_rev),
}
}
pub fn insert_text_cell(s: String, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data)
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_number_cell(num: i64, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(num.to_string(), None, field_rev, None).unwrap();
CellRevision::new(data)
let data = apply_cell_data_changeset(num.to_string(), None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_url_cell(url: String, field_rev: &FieldRevision) -> CellRevision {
let data = apply_cell_data_changeset(url, None, field_rev, None).unwrap();
CellRevision::new(data)
let data = apply_cell_data_changeset(url, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_checkbox_cell(is_check: bool, field_rev: &FieldRevision) -> CellRevision {
let s = if is_check {
CHECK.to_string()
} else {
UNCHECK.to_string()
};
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data)
let s = if is_check {
CHECK.to_string()
} else {
UNCHECK.to_string()
};
let data = apply_cell_data_changeset(s, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_date_cell(timestamp: i64, field_rev: &FieldRevision) -> CellRevision {
let cell_data = serde_json::to_string(&DateCellChangeset {
date: Some(timestamp.to_string()),
time: None,
is_utc: true,
})
.unwrap();
let data = apply_cell_data_changeset(cell_data, None, field_rev, None).unwrap();
CellRevision::new(data)
let cell_data = serde_json::to_string(&DateCellChangeset {
date: Some(timestamp.to_string()),
time: None,
is_utc: true,
})
.unwrap();
let data = apply_cell_data_changeset(cell_data, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn insert_select_option_cell(option_ids: Vec<String>, field_rev: &FieldRevision) -> CellRevision {
let changeset = SelectOptionCellChangeset::from_insert_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
pub fn insert_select_option_cell(
option_ids: Vec<String>,
field_rev: &FieldRevision,
) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_insert_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn delete_select_option_cell(option_ids: Vec<String>, field_rev: &FieldRevision) -> CellRevision {
let changeset = SelectOptionCellChangeset::from_delete_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
pub fn delete_select_option_cell(
option_ids: Vec<String>,
field_rev: &FieldRevision,
) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_delete_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
}
/// Deserialize the String into cell specific data type.
pub trait FromCellString {
fn from_cell_str(s: &str) -> FlowyResult<Self>
where
Self: Sized;
fn from_cell_str(s: &str) -> FlowyResult<Self>
where
Self: Sized;
}
/// If the changeset applying to the cell is not String type, it should impl this trait.
/// Deserialize the string into cell specific changeset.
pub trait FromCellChangesetString {
fn from_changeset(changeset: String) -> FlowyResult<Self>
where
Self: Sized;
fn from_changeset(changeset: String) -> FlowyResult<Self>
where
Self: Sized;
}
impl FromCellChangesetString for String {
fn from_changeset(changeset: String) -> FlowyResult<Self>
where
Self: Sized,
{
Ok(changeset)
}
fn from_changeset(changeset: String) -> FlowyResult<Self>
where
Self: Sized,
{
Ok(changeset)
}
}
pub trait ToCellChangesetString: Debug {
fn to_cell_changeset_str(&self) -> String;
fn to_cell_changeset_str(&self) -> String;
}
impl ToCellChangesetString for String {
fn to_cell_changeset_str(&self) -> String {
self.clone()
}
fn to_cell_changeset_str(&self) -> String {
self.clone()
}
}
pub struct AnyCellChangeset<T>(pub Option<T>);
impl<T> AnyCellChangeset<T> {
pub fn try_into_inner(self) -> FlowyResult<T> {
match self.0 {
None => Err(ErrorCode::InvalidData.into()),
Some(data) => Ok(data),
}
pub fn try_into_inner(self) -> FlowyResult<T> {
match self.0 {
None => Err(ErrorCode::InvalidData.into()),
Some(data) => Ok(data),
}
}
}
impl<T, C: ToString> std::convert::From<C> for AnyCellChangeset<T>
where
T: FromCellChangesetString,
T: FromCellChangesetString,
{
fn from(changeset: C) -> Self {
match T::from_changeset(changeset.to_string()) {
Ok(data) => AnyCellChangeset(Some(data)),
Err(e) => {
tracing::error!("Deserialize CellDataChangeset failed: {}", e);
AnyCellChangeset(None)
}
}
fn from(changeset: C) -> Self {
match T::from_changeset(changeset.to_string()) {
Ok(data) => AnyCellChangeset(Some(data)),
Err(e) => {
tracing::error!("Deserialize CellDataChangeset failed: {}", e);
AnyCellChangeset(None)
},
}
}
}
// impl std::convert::From<String> for AnyCellChangeset<String> {
// fn from(s: String) -> Self {

Some files were not shown because too many files have changed in this diff Show More