feat: run rustfmt with custom defined fmt configuration (#1848)

* chore: update rustfmt

* chore: apply rustfmt format
This commit is contained in:
Nathan.fooo 2023-02-13 09:29:49 +08:00 committed by GitHub
parent e2496e734c
commit 6bb1c4e89c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
459 changed files with 50554 additions and 46600 deletions

View File

@ -0,0 +1,12 @@
# https://rust-lang.github.io/rustfmt/?version=master&search=
max_width = 100
tab_spaces = 2
newline_style = "Auto"
match_block_trailing_comma = true
use_field_init_shorthand = true
use_try_shorthand = true
reorder_imports = true
reorder_modules = true
remove_nested_parens = true
merge_derives = true
edition = "2021"

View File

@ -27,7 +27,8 @@ impl TSNotificationSender {
impl NotificationSender for TSNotificationSender {
fn send_subject(&self, subject: SubscribeObject) -> Result<(), String> {
self.handler
self
.handler
.emit_all(AF_NOTIFICATION, subject)
.map_err(|e| format!("{:?}", e))
}

View File

@ -30,7 +30,8 @@ pub extern "C" fn init_sdk(path: *mut c_char) -> i64 {
let server_config = get_client_server_configuration().unwrap();
let log_crates = vec!["flowy-ffi".to_string()];
let config = AppFlowyCoreConfig::new(path, "appflowy".to_string(), server_config).log_filter("info", log_crates);
let config = AppFlowyCoreConfig::new(path, "appflowy".to_string(), server_config)
.log_filter("info", log_crates);
*APPFLOWY_CORE.write() = Some(AppFlowyCore::new(config));
0
@ -50,13 +51,17 @@ pub extern "C" fn async_event(port: i64, input: *const u8, len: usize) {
None => {
log::error!("sdk not init yet.");
return;
}
},
Some(e) => e.event_dispatcher.clone(),
};
let _ = AFPluginDispatcher::async_send_with_callback(dispatcher, request, move |resp: AFPluginEventResponse| {
let _ = AFPluginDispatcher::async_send_with_callback(
dispatcher,
request,
move |resp: AFPluginEventResponse| {
log::trace!("[FFI]: Post data to dart through {} port", port);
Box::pin(post_to_flutter(resp, port))
});
},
);
}
#[no_mangle]
@ -68,7 +73,7 @@ pub extern "C" fn sync_event(input: *const u8, len: usize) -> *const u8 {
None => {
log::error!("sdk not init yet.");
return forget_rust(Vec::default());
}
},
Some(e) => e.event_dispatcher.clone(),
};
let _response = AFPluginDispatcher::sync_send(dispatcher, request);
@ -101,14 +106,14 @@ async fn post_to_flutter(response: AFPluginEventResponse, port: i64) {
{
Ok(_success) => {
log::trace!("[FFI]: Post data to dart success");
}
},
Err(e) => {
if let Some(msg) = e.downcast_ref::<&str>() {
log::error!("[FFI]: {:?}", msg);
} else {
log::error!("[FFI]: allo_isolate post panic");
}
}
},
}
}

View File

@ -4,7 +4,9 @@
use crate::event_attrs::EventEnumAttrs;
use crate::node_attrs::NodeStructAttrs;
use crate::{is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE};
use crate::{
is_recognizable_field, ty_ext::*, ASTResult, PBAttrsContainer, PBStructAttrs, NODE_TYPE,
};
use proc_macro2::Ident;
use syn::Meta::NameValue;
use syn::{self, punctuated::Punctuated};
@ -32,15 +34,20 @@ impl<'a> ASTContainer<'a> {
// https://docs.rs/syn/1.0.48/syn/struct.DataStruct.html
let (style, fields) = struct_from_ast(ast_result, &data.fields);
ASTData::Struct(style, fields)
}
},
syn::Data::Union(_) => {
ast_result.error_spanned_by(ast, "Does not support derive for unions");
return None;
}
},
syn::Data::Enum(data) => {
// https://docs.rs/syn/1.0.48/syn/struct.DataEnum.html
ASTData::Enum(enum_from_ast(ast_result, &ast.ident, &data.variants, &ast.attrs))
}
ASTData::Enum(enum_from_ast(
ast_result,
&ast.ident,
&data.variants,
&ast.attrs,
))
},
};
let ident = ast.ident.clone();
@ -63,7 +70,9 @@ pub enum ASTData<'a> {
impl<'a> ASTData<'a> {
pub fn all_fields(&'a self) -> Box<dyn Iterator<Item = &'a ASTField<'a>> + 'a> {
match self {
ASTData::Enum(variants) => Box::new(variants.iter().flat_map(|variant| variant.fields.iter())),
ASTData::Enum(variants) => {
Box::new(variants.iter().flat_map(|variant| variant.fields.iter()))
},
ASTData::Struct(_, fields) => Box::new(fields.iter()),
}
}
@ -73,11 +82,11 @@ impl<'a> ASTData<'a> {
ASTData::Enum(variants) => {
let iter = variants.iter().map(|variant| &variant.attrs);
Box::new(iter)
}
},
ASTData::Struct(_, fields) => {
let iter = fields.iter().flat_map(|_| None);
Box::new(iter)
}
},
}
}
@ -90,7 +99,7 @@ impl<'a> ASTData<'a> {
_ => None,
});
Box::new(iter)
}
},
}
}
}
@ -140,36 +149,36 @@ impl<'a> ASTField<'a> {
match inner.primitive_ty {
PrimitiveTy::Map(map_info) => {
bracket_category = Some(BracketCategory::Map((map_info.key.clone(), map_info.value)))
}
},
PrimitiveTy::Vec => {
bracket_category = Some(BracketCategory::Vec);
}
},
PrimitiveTy::Opt => {
bracket_category = Some(BracketCategory::Opt);
}
},
PrimitiveTy::Other => {
bracket_category = Some(BracketCategory::Other);
}
},
}
match *inner.bracket_ty_info {
Some(bracketed_inner_ty) => {
bracket_inner_ty = Some(bracketed_inner_ty.ident.clone());
bracket_ty = Some(inner.ident.clone());
}
},
None => {
bracket_ty = Some(inner.ident.clone());
},
}
}
}
},
Ok(None) => {
let msg = format!("Fail to get the ty inner type: {:?}", field);
return Err(msg);
}
},
Err(e) => {
eprintln!("ASTField parser failed: {:?} with error: {}", field, e);
return Err(e);
}
},
}
Ok(ASTField {
@ -214,12 +223,15 @@ pub enum ASTStyle {
Unit,
}
pub fn struct_from_ast<'a>(cx: &ASTResult, fields: &'a syn::Fields) -> (ASTStyle, Vec<ASTField<'a>>) {
pub fn struct_from_ast<'a>(
cx: &ASTResult,
fields: &'a syn::Fields,
) -> (ASTStyle, Vec<ASTField<'a>>) {
match fields {
syn::Fields::Named(fields) => (ASTStyle::Struct, fields_from_ast(cx, &fields.named)),
syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
(ASTStyle::NewType, fields_from_ast(cx, &fields.unnamed))
}
},
syn::Fields::Unnamed(fields) => (ASTStyle::Tuple, fields_from_ast(cx, &fields.unnamed)),
syn::Fields::Unit => (ASTStyle::Unit, Vec::new()),
}
@ -247,7 +259,10 @@ pub fn enum_from_ast<'a>(
.collect()
}
fn fields_from_ast<'a>(cx: &ASTResult, fields: &'a Punctuated<syn::Field, Token![,]>) -> Vec<ASTField<'a>> {
fn fields_from_ast<'a>(
cx: &ASTResult,
fields: &'a Punctuated<syn::Field, Token![,]>,
) -> Vec<ASTField<'a>> {
fields
.iter()
.enumerate()
@ -261,7 +276,11 @@ fn fields_from_ast<'a>(cx: &ASTResult, fields: &'a Punctuated<syn::Field, Token!
.collect()
}
fn get_node_type(ast_result: &ASTResult, struct_name: &Ident, attrs: &[syn::Attribute]) -> Option<String> {
fn get_node_type(
ast_result: &ASTResult,
struct_name: &Ident,
attrs: &[syn::Attribute],
) -> Option<String> {
let mut node_type = None;
attrs
.iter()

View File

@ -14,7 +14,8 @@ impl ASTResult {
}
pub fn error_spanned_by<A: ToTokens, T: Display>(&self, obj: A, msg: T) {
self.errors
self
.errors
.borrow_mut()
.as_mut()
.unwrap()

View File

@ -91,14 +91,17 @@ fn get_event_attrs_from(
}
});
let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item {
let mut extract_event_attr = |attr: &syn::Attribute, meta_item: &syn::NestedMeta| match &meta_item
{
Meta(NameValue(name_value)) => {
if name_value.path == EVENT_INPUT {
if let syn::Lit::Str(s) = &name_value.lit {
let input_type = parse_lit_str(s)
.map_err(|_| {
ast_result
.error_spanned_by(s, format!("failed to parse request deserializer {:?}", s.value()))
ast_result.error_spanned_by(
s,
format!("failed to parse request deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.input = Some(input_type);
@ -109,19 +112,21 @@ fn get_event_attrs_from(
if let syn::Lit::Str(s) = &name_value.lit {
let output_type = parse_lit_str(s)
.map_err(|_| {
ast_result
.error_spanned_by(s, format!("failed to parse response deserializer {:?}", s.value()))
ast_result.error_spanned_by(
s,
format!("failed to parse response deserializer {:?}", s.value()),
)
})
.unwrap();
event_attrs.output = Some(output_type);
}
}
}
},
Meta(Path(word)) => {
if word == EVENT_IGNORE && attr.path == EVENT {
event_attrs.ignore = true;
}
}
},
Lit(s) => ast_result.error_spanned_by(s, "unexpected attribute"),
_ => ast_result.error_spanned_by(meta_item, "unexpected attribute"),
};

View File

@ -38,51 +38,58 @@ impl NodeStructAttrs {
if let syn::Lit::Str(lit) = &m.lit {
rename.set(&m.path, lit.clone());
}
}
},
// Parse '#[node(child_name = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_NAME => {
if let syn::Lit::Str(lit) = &m.lit {
child_name.set(&m.path, lit.clone());
}
}
},
// Parse '#[node(child_index = x)]'
Meta(NameValue(m)) if m.path == CHILD_NODE_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
child_index.set(&m.path, lit.clone());
}
}
},
// Parse `#[node(get_node_value_with = "...")]`
Meta(NameValue(m)) if m.path == GET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, GET_NODE_VALUE_WITH, &m.lit) {
get_node_value_with.set(&m.path, path);
}
}
},
// Parse `#[node(set_node_value_with= "...")]`
Meta(NameValue(m)) if m.path == SET_NODE_VALUE_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SET_NODE_VALUE_WITH, &m.lit) {
set_node_value_with.set(&m.path, path);
}
}
},
// Parse `#[node(with_children= "...")]`
Meta(NameValue(m)) if m.path == WITH_CHILDREN => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, WITH_CHILDREN, &m.lit) {
with_children.set(&m.path, path);
}
}
},
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
ast_result.error_spanned_by(meta_item.path(), format!("unknown node field attribute `{}`", path));
}
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown node field attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
}
},
}
}
let child_name = child_name.get();

View File

@ -34,33 +34,40 @@ impl PBAttrsContainer {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_STRUCT, &m.lit) {
pb_struct_type.set_opt(&m.path, Some(into_ty));
}
}
},
// Parse `#[pb(enum = "Type")]
Meta(NameValue(m)) if m.path == PB_ENUM => {
if let Ok(into_ty) = parse_lit_into_ty(ast_result, PB_ENUM, &m.lit) {
pb_enum_type.set_opt(&m.path, Some(into_ty));
}
}
},
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
ast_result.error_spanned_by(meta_item.path(), format!("unknown container attribute `{}`", path));
}
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown container attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in container attribute");
}
},
}
}
match &item.data {
syn::Data::Struct(_) => {
pb_struct_type.set_if_none(default_pb_type(&ast_result, &item.ident));
}
},
syn::Data::Enum(_) => {
pb_enum_type.set_if_none(default_pb_type(&ast_result, &item.ident));
}
_ => {}
},
_ => {},
}
PBAttrsContainer {
@ -100,7 +107,8 @@ impl<'c, T> ASTAttr<'c, T> {
let tokens = obj.into_token_stream();
if self.value.is_some() {
self.ast_result
self
.ast_result
.error_spanned_by(tokens, format!("duplicate attribute `{}`", self.name));
} else {
self.tokens = tokens;
@ -145,7 +153,10 @@ pub struct PBStructAttrs {
}
pub fn is_recognizable_field(field: &syn::Field) -> bool {
field.attrs.iter().any(|attr| is_recognizable_attribute(attr))
field
.attrs
.iter()
.any(|attr| is_recognizable_attribute(attr))
}
impl PBStructAttrs {
@ -174,42 +185,49 @@ impl PBStructAttrs {
Meta(Path(word)) if word == SKIP => {
skip_pb_serializing.set_true(word);
skip_pb_deserializing.set_true(word);
}
},
// Parse '#[pb(index = x)]'
Meta(NameValue(m)) if m.path == PB_INDEX => {
if let syn::Lit::Int(lit) = &m.lit {
pb_index.set(&m.path, lit.clone());
}
}
},
// Parse `#[pb(one_of)]`
Meta(Path(path)) if path == PB_ONE_OF => {
pb_one_of.set_true(path);
}
},
// Parse `#[pb(serialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == SERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, SERIALIZE_PB_WITH, &m.lit) {
serialize_pb_with.set(&m.path, path);
}
}
},
// Parse `#[pb(deserialize_pb_with = "...")]`
Meta(NameValue(m)) if m.path == DESERIALIZE_PB_WITH => {
if let Ok(path) = parse_lit_into_expr_path(ast_result, DESERIALIZE_PB_WITH, &m.lit) {
deserialize_pb_with.set(&m.path, path);
}
}
},
Meta(meta_item) => {
let path = meta_item.path().into_token_stream().to_string().replace(' ', "");
ast_result.error_spanned_by(meta_item.path(), format!("unknown pb field attribute `{}`", path));
}
let path = meta_item
.path()
.into_token_stream()
.to_string()
.replace(' ', "");
ast_result.error_spanned_by(
meta_item.path(),
format!("unknown pb field attribute `{}`", path),
);
},
Lit(lit) => {
ast_result.error_spanned_by(lit, "unexpected literal in field attribute");
}
},
}
}
@ -226,7 +244,10 @@ impl PBStructAttrs {
#[allow(dead_code)]
pub fn pb_index(&self) -> Option<String> {
self.pb_index.as_ref().map(|lit| lit.base10_digits().to_string())
self
.pb_index
.as_ref()
.map(|lit| lit.base10_digits().to_string())
}
pub fn is_one_of(&self) -> bool {
@ -263,7 +284,10 @@ pub fn is_recognizable_attribute(attr: &syn::Attribute) -> bool {
attr.path == PB_ATTRS || attr.path == EVENT || attr.path == NODE_ATTRS || attr.path == NODES_ATTRS
}
pub fn get_pb_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
pub fn get_pb_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != PB_ATTRS {
return Ok(vec![]);
@ -275,16 +299,19 @@ pub fn get_pb_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<sy
Ok(other) => {
cx.error_spanned_by(other, "expected #[pb(...)]");
Err(())
}
},
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[pb(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
}
},
}
}
pub fn get_node_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
pub fn get_node_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != NODE_ATTRS && attr.path != NODES_ATTRS {
return Ok(vec![]);
@ -298,11 +325,14 @@ pub fn get_node_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<
cx.error_spanned_by(attr, "attribute must be str, e.g. #[node(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
}
},
}
}
pub fn get_event_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec<syn::NestedMeta>, ()> {
pub fn get_event_meta_items(
cx: &ASTResult,
attr: &syn::Attribute,
) -> Result<Vec<syn::NestedMeta>, ()> {
// Only handle the attribute that we have defined
if attr.path != EVENT {
return Ok(vec![]);
@ -314,12 +344,12 @@ pub fn get_event_meta_items(cx: &ASTResult, attr: &syn::Attribute) -> Result<Vec
Ok(other) => {
cx.error_spanned_by(other, "expected #[event(...)]");
Err(())
}
},
Err(err) => {
cx.error_spanned_by(attr, "attribute must be str, e.g. #[event(xx = \"xxx\")]");
cx.syn_error(err);
Err(())
}
},
}
}
@ -329,11 +359,16 @@ pub fn parse_lit_into_expr_path(
lit: &syn::Lit,
) -> Result<syn::ExprPath, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string)
.map_err(|_| ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value())))
parse_lit_str(string).map_err(|_| {
ast_result.error_spanned_by(lit, format!("failed to parse path: {:?}", string.value()))
})
}
fn get_lit_str<'a>(ast_result: &ASTResult, attr_name: Symbol, lit: &'a syn::Lit) -> Result<&'a syn::LitStr, ()> {
fn get_lit_str<'a>(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &'a syn::Lit,
) -> Result<&'a syn::LitStr, ()> {
if let syn::Lit::Str(lit) = lit {
Ok(lit)
} else {
@ -348,7 +383,11 @@ fn get_lit_str<'a>(ast_result: &ASTResult, attr_name: Symbol, lit: &'a syn::Lit)
}
}
fn parse_lit_into_ty(ast_result: &ASTResult, attr_name: Symbol, lit: &syn::Lit) -> Result<syn::Type, ()> {
fn parse_lit_into_ty(
ast_result: &ASTResult,
attr_name: Symbol,
lit: &syn::Lit,
) -> Result<syn::Type, ()> {
let string = get_lit_str(ast_result, attr_name, lit)?;
parse_lit_str(string).map_err(|_| {
@ -373,7 +412,10 @@ fn spanned_tokens(s: &syn::LitStr) -> parse::Result<TokenStream> {
}
fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
stream.into_iter().map(|token| respan_token_tree(token, span)).collect()
stream
.into_iter()
.map(|token| respan_token_tree(token, span))
.collect()
}
fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
@ -392,7 +434,10 @@ fn default_pb_type(ast_result: &ASTResult, ident: &syn::Ident) -> syn::Type {
return pb_struct_ty;
}
}
ast_result.error_spanned_by(ident, format!("❌ Can't find {} protobuf struct", take_ident));
ast_result.error_spanned_by(
ident,
format!("❌ Can't find {} protobuf struct", take_ident),
);
panic!()
}
@ -402,19 +447,19 @@ pub fn is_option(ty: &syn::Type) -> bool {
syn::Type::Path(ty) => &ty.path,
_ => {
return false;
}
},
};
let seg = match path.segments.last() {
Some(seg) => seg,
None => {
return false;
}
},
};
let args = match &seg.arguments {
syn::PathArguments::AngleBracketed(bracketed) => &bracketed.args,
_ => {
return false;
}
},
};
seg.ident == "Option" && args.len() == 1
}

View File

@ -36,12 +36,15 @@ impl<'a> TyInfo<'a> {
Some(b_ty) => b_ty.ident,
None => {
panic!()
}
},
}
}
}
pub fn parse_ty<'a>(ast_result: &ASTResult, ty: &'a syn::Type) -> Result<Option<TyInfo<'a>>, String> {
pub fn parse_ty<'a>(
ast_result: &ASTResult,
ty: &'a syn::Type,
) -> Result<Option<TyInfo<'a>>, String> {
// Type -> TypePath -> Path -> PathSegment -> PathArguments ->
// AngleBracketedGenericArguments -> GenericArgument -> Type.
if let syn::Type::Path(ref p) = ty {
@ -65,7 +68,7 @@ pub fn parse_ty<'a>(ast_result: &ASTResult, ty: &'a syn::Type) -> Result<Option<
let msg = format!("Unsupported type: {}", seg.ident);
ast_result.error_spanned_by(&seg.ident, &msg);
return Err(msg);
}
},
}
} else {
return Ok(Some(TyInfo {

View File

@ -1,7 +1,9 @@
use crate::errors::{SyncError, SyncResult};
use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions;
use grid_model::{gen_block_id, gen_row_id, CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision};
use grid_model::{
gen_block_id, gen_row_id, CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision,
};
use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
use revision_model::Revision;
@ -84,7 +86,10 @@ impl GridBlockRevisionPad {
})
}
pub fn delete_rows(&mut self, row_ids: Vec<Cow<'_, String>>) -> SyncResult<Option<GridBlockRevisionChangeset>> {
pub fn delete_rows(
&mut self,
row_ids: Vec<Cow<'_, String>>,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|rows| {
rows.retain(|row| !row_ids.contains(&Cow::Borrowed(&row.id)));
Ok(Some(()))
@ -100,7 +105,10 @@ impl GridBlockRevisionPad {
None
}
pub fn get_row_revs<T>(&self, row_ids: Option<Vec<Cow<'_, T>>>) -> SyncResult<Vec<Arc<RowRevision>>>
pub fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> SyncResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
@ -114,7 +122,8 @@ impl GridBlockRevisionPad {
.map(|row| (row.id.as_str(), row.clone()))
.collect::<HashMap<&str, Arc<RowRevision>>>();
Ok(row_ids
Ok(
row_ids
.iter()
.flat_map(|row_id| {
let row_id = row_id.as_ref().as_ref();
@ -122,12 +131,13 @@ impl GridBlockRevisionPad {
None => {
tracing::error!("Can't find the row with id: {}", row_id);
None
}
},
Some(row) => Some(row.clone()),
}
})
.collect::<Vec<_>>())
}
.collect::<Vec<_>>(),
)
},
}
}
@ -155,7 +165,10 @@ impl GridBlockRevisionPad {
self.block.rows.iter().position(|row| row.id == row_id)
}
pub fn update_row(&mut self, changeset: RowChangeset) -> SyncResult<Option<GridBlockRevisionChangeset>> {
pub fn update_row(
&mut self,
changeset: RowChangeset,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
let row_id = changeset.row_id.clone();
self.modify_row(&row_id, |row| {
let mut is_changed = None;
@ -171,7 +184,10 @@ impl GridBlockRevisionPad {
if !changeset.cell_by_field_id.is_empty() {
is_changed = Some(());
changeset.cell_by_field_id.into_iter().for_each(|(field_id, cell)| {
changeset
.cell_by_field_id
.into_iter()
.for_each(|(field_id, cell)| {
row.cells.insert(field_id, cell);
})
}
@ -180,7 +196,12 @@ impl GridBlockRevisionPad {
})
}
pub fn move_row(&mut self, row_id: &str, from: usize, to: usize) -> SyncResult<Option<GridBlockRevisionChangeset>> {
pub fn move_row(
&mut self,
row_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<GridBlockRevisionChangeset>> {
self.modify(|row_revs| {
if let Some(position) = row_revs.iter().position(|row_rev| row_rev.id == row_id) {
debug_assert_eq!(from, position);
@ -210,15 +231,18 @@ impl GridBlockRevisionPad {
match cal_diff::<EmptyAttributes>(old, new) {
None => Ok(None),
Some(operations) => {
tracing::trace!("[GridBlockRevision] Composing operations {}", operations.json_str());
tracing::trace!(
"[GridBlockRevision] Composing operations {}",
operations.json_str()
);
self.operations = self.operations.compose(&operations)?;
Ok(Some(GridBlockRevisionChangeset {
operations,
md5: md5(&self.operations.json_bytes()),
}))
},
}
}
}
},
}
}
@ -257,7 +281,10 @@ pub fn make_database_block_operations(block_rev: &DatabaseBlockRevision) -> Grid
GridBlockOperationsBuilder::new().insert(&json).build()
}
pub fn make_grid_block_revisions(_user_id: &str, grid_block_meta_data: &DatabaseBlockRevision) -> Vec<Revision> {
pub fn make_grid_block_revisions(
_user_id: &str,
grid_block_meta_data: &DatabaseBlockRevision,
) -> Vec<Revision> {
let operations = make_database_block_operations(grid_block_meta_data);
let bytes = operations.json_bytes();
let revision = Revision::initial_revision(&grid_block_meta_data.block_id, bytes);
@ -324,7 +351,10 @@ mod tests {
r#"[{"retain":90},{"insert":",{\"id\":\"2\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
);
let change = pad.add_row_rev(row_3.clone(), Some("2".to_string())).unwrap().unwrap();
let change = pad
.add_row_rev(row_3.clone(), Some("2".to_string()))
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":157},{"insert":",{\"id\":\"3\",\"block_id\":\"1\",\"cells\":[],\"height\":0,\"visibility\":false}"},{"retain":2}]"#
@ -354,7 +384,10 @@ mod tests {
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_3.clone(), Some("1".to_string())).unwrap().unwrap();
let _ = pad
.add_row_rev(row_3.clone(), Some("1".to_string()))
.unwrap()
.unwrap();
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_3);
@ -370,7 +403,10 @@ mod tests {
let _ = pad.add_row_rev(row_1.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_2.clone(), None).unwrap().unwrap();
let _ = pad.add_row_rev(row_3.clone(), Some("".to_string())).unwrap().unwrap();
let _ = pad
.add_row_rev(row_3.clone(), Some("".to_string()))
.unwrap()
.unwrap();
assert_eq!(*pad.rows[0], row_1);
assert_eq!(*pad.rows[1], row_2);
@ -390,7 +426,10 @@ mod tests {
};
let _ = pad.add_row_rev(row.clone(), None).unwrap().unwrap();
let change = pad.delete_rows(vec![Cow::Borrowed(&row.id)]).unwrap().unwrap();
let change = pad
.delete_rows(vec![Cow::Borrowed(&row.id)])
.unwrap()
.unwrap();
assert_eq!(
change.operations.json_str(),
r#"[{"retain":24},{"delete":66},{"retain":2}]"#
@ -432,7 +471,8 @@ mod tests {
}
fn test_pad() -> GridBlockRevisionPad {
let operations = GridBlockOperations::from_json(r#"[{"insert":"{\"block_id\":\"1\",\"rows\":[]}"}]"#).unwrap();
let operations =
GridBlockOperations::from_json(r#"[{"insert":"{\"block_id\":\"1\",\"rows\":[]}"}]"#).unwrap();
GridBlockRevisionPad::from_operations(operations).unwrap()
}
}

View File

@ -1,5 +1,7 @@
use crate::errors::{SyncError, SyncResult};
use grid_model::{BuildDatabaseContext, DatabaseBlockRevision, FieldRevision, GridBlockMetaRevision, RowRevision};
use grid_model::{
BuildDatabaseContext, DatabaseBlockRevision, FieldRevision, GridBlockMetaRevision, RowRevision,
};
use std::sync::Arc;
pub struct DatabaseBuilder {
@ -58,7 +60,10 @@ impl DatabaseBuilder {
#[allow(dead_code)]
fn check_rows(fields: &[FieldRevision], rows: &[RowRevision]) -> SyncResult<()> {
let field_ids = fields.iter().map(|field| &field.id).collect::<Vec<&String>>();
let field_ids = fields
.iter()
.map(|field| &field.id)
.collect::<Vec<&String>>();
for row in rows {
let cell_field_ids = row.cells.keys().into_iter().collect::<Vec<&String>>();
if cell_field_ids != field_ids {

View File

@ -2,8 +2,8 @@ use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions;
use grid_model::{
gen_block_id, gen_grid_id, DatabaseRevision, FieldRevision, FieldTypeRevision, GridBlockMetaRevision,
GridBlockMetaRevisionChangeset,
gen_block_id, gen_grid_id, DatabaseRevision, FieldRevision, FieldTypeRevision,
GridBlockMetaRevision, GridBlockMetaRevisionChangeset,
};
use lib_infra::util::md5;
use lib_infra::util::move_vec_element;
@ -29,7 +29,9 @@ impl DatabaseRevisionPad {
pub fn grid_id(&self) -> String {
self.grid_rev.grid_id.clone()
}
pub async fn duplicate_grid_block_meta(&self) -> (Vec<FieldRevision>, Vec<GridBlockMetaRevision>) {
pub async fn duplicate_grid_block_meta(
&self,
) -> (Vec<FieldRevision>, Vec<GridBlockMetaRevision>) {
let fields = self
.grid_rev
.fields
@ -89,7 +91,10 @@ impl DatabaseRevisionPad {
let insert_index = match start_field_id {
None => None,
Some(start_field_id) => grid_meta.fields.iter().position(|field| field.id == start_field_id),
Some(start_field_id) => grid_meta
.fields
.iter()
.position(|field| field.id == start_field_id),
};
let new_field_rev = Arc::new(new_field_rev);
match insert_index {
@ -100,9 +105,16 @@ impl DatabaseRevisionPad {
})
}
pub fn delete_field_rev(&mut self, field_id: &str) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_id) {
pub fn delete_field_rev(
&mut self,
field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_id)
{
None => Ok(None),
Some(index) => {
if grid_meta.fields[index].is_primary {
@ -111,9 +123,9 @@ impl DatabaseRevisionPad {
grid_meta.fields.remove(index);
Ok(Some(()))
}
}
},
)
}
})
}
pub fn duplicate_field_rev(
@ -121,18 +133,24 @@ impl DatabaseRevisionPad {
field_id: &str,
duplicated_field_id: &str,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_id) {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_id)
{
None => Ok(None),
Some(index) => {
let mut duplicate_field_rev = grid_meta.fields[index].as_ref().clone();
duplicate_field_rev.id = duplicated_field_id.to_string();
duplicate_field_rev.name = format!("{} (copy)", duplicate_field_rev.name);
grid_meta.fields.insert(index + 1, Arc::new(duplicate_field_rev));
grid_meta
.fields
.insert(index + 1, Arc::new(duplicate_field_rev));
Ok(Some(()))
}
},
)
}
})
}
/// Modifies the current field type of the [FieldTypeRevision]
@ -159,11 +177,15 @@ impl DatabaseRevisionPad {
{
let new_field_type = new_field_type.into();
self.modify_grid(|grid_meta| {
match grid_meta.fields.iter_mut().find(|field_rev| field_rev.id == field_id) {
match grid_meta
.fields
.iter_mut()
.find(|field_rev| field_rev.id == field_id)
{
None => {
tracing::warn!("Can not find the field with id: {}", field_id);
Ok(None)
}
},
Some(field_rev) => {
let mut_field_rev = Arc::make_mut(field_rev);
let old_field_type_rev = mut_field_rev.ty;
@ -178,19 +200,22 @@ impl DatabaseRevisionPad {
new_field_type_option.to_owned(),
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
}
},
None => {
// If the type-option data isn't exist before, creating the default type-option data.
let new_field_type_option = make_default_type_option();
let transformed_type_option =
type_option_transform(old_field_type_rev, old_field_type_option, new_field_type_option);
let transformed_type_option = type_option_transform(
old_field_type_rev,
old_field_type_option,
new_field_type_option,
);
mut_field_rev.insert_type_option_str(&new_field_type, transformed_type_option);
}
},
}
mut_field_rev.ty = new_field_type;
Ok(Some(()))
}
},
}
})
}
@ -199,16 +224,20 @@ impl DatabaseRevisionPad {
&mut self,
field_rev: Arc<FieldRevision>,
) -> SyncResult<Option<DatabaseRevisionChangeset>> {
self.modify_grid(
|grid_meta| match grid_meta.fields.iter().position(|field| field.id == field_rev.id) {
self.modify_grid(|grid_meta| {
match grid_meta
.fields
.iter()
.position(|field| field.id == field_rev.id)
{
None => Ok(None),
Some(index) => {
grid_meta.fields.remove(index);
grid_meta.fields.insert(index, field_rev);
Ok(Some(()))
}
},
)
}
})
}
pub fn move_field(
@ -233,18 +262,26 @@ impl DatabaseRevisionPad {
}
pub fn contain_field(&self, field_id: &str) -> bool {
self.grid_rev.fields.iter().any(|field| field.id == field_id)
self
.grid_rev
.fields
.iter()
.any(|field| field.id == field_id)
}
pub fn get_field_rev(&self, field_id: &str) -> Option<(usize, &Arc<FieldRevision>)> {
self.grid_rev
self
.grid_rev
.fields
.iter()
.enumerate()
.find(|(_, field)| field.id == field_id)
}
pub fn get_field_revs(&self, field_ids: Option<Vec<String>>) -> SyncResult<Vec<Arc<FieldRevision>>> {
pub fn get_field_revs(
&self,
field_ids: Option<Vec<String>>,
) -> SyncResult<Vec<Arc<FieldRevision>>> {
match field_ids {
None => Ok(self.grid_rev.fields.clone()),
Some(field_ids) => {
@ -261,12 +298,12 @@ impl DatabaseRevisionPad {
None => {
tracing::error!("Can't find the field with id: {}", field_id);
None
}
},
Some(field) => Some((*field).clone()),
})
.collect::<Vec<Arc<FieldRevision>>>();
Ok(fields)
}
},
}
}
@ -352,46 +389,62 @@ impl DatabaseRevisionPad {
operations,
md5: self.database_md5(),
}))
},
}
}
}
},
}
}
fn modify_block<F>(&mut self, block_id: &str, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
fn modify_block<F>(
&mut self,
block_id: &str,
f: F,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut GridBlockMetaRevision) -> SyncResult<Option<()>>,
{
self.modify_grid(
|grid_rev| match grid_rev.blocks.iter().position(|block| block.block_id == block_id) {
self.modify_grid(|grid_rev| {
match grid_rev
.blocks
.iter()
.position(|block| block.block_id == block_id)
{
None => {
tracing::warn!("[GridMetaPad]: Can't find any block with id: {}", block_id);
Ok(None)
}
},
Some(index) => {
let block_rev = Arc::make_mut(&mut grid_rev.blocks[index]);
f(block_rev)
}
},
)
}
})
}
pub fn modify_field<F>(&mut self, field_id: &str, f: F) -> SyncResult<Option<DatabaseRevisionChangeset>>
pub fn modify_field<F>(
&mut self,
field_id: &str,
f: F,
) -> SyncResult<Option<DatabaseRevisionChangeset>>
where
F: FnOnce(&mut FieldRevision) -> SyncResult<Option<()>>,
{
self.modify_grid(
|grid_rev| match grid_rev.fields.iter().position(|field| field.id == field_id) {
self.modify_grid(|grid_rev| {
match grid_rev
.fields
.iter()
.position(|field| field.id == field_id)
{
None => {
tracing::warn!("[GridMetaPad]: Can't find any field with id: {}", field_id);
Ok(None)
}
},
Some(index) => {
let mut_field_rev = Arc::make_mut(&mut grid_rev.fields[index]);
f(mut_field_rev)
}
},
)
}
})
}
pub fn json_str(&self) -> SyncResult<String> {

View File

@ -2,8 +2,8 @@ use crate::errors::{internal_sync_error, SyncError, SyncResult};
use crate::util::cal_diff;
use flowy_sync::util::make_operations_from_revisions;
use grid_model::{
DatabaseViewRevision, FieldRevision, FieldTypeRevision, FilterRevision, GroupConfigurationRevision, LayoutRevision,
SortRevision,
DatabaseViewRevision, FieldRevision, FieldTypeRevision, FilterRevision,
GroupConfigurationRevision, LayoutRevision, SortRevision,
};
use lib_infra::util::md5;
use lib_ot::core::{DeltaBuilder, DeltaOperations, EmptyAttributes, OperationTransform};
@ -62,7 +62,10 @@ impl GridViewRevisionPad {
Self::from_operations(view_id, operations)
}
pub fn get_groups_by_field_revs(&self, field_revs: &[Arc<FieldRevision>]) -> Vec<Arc<GroupConfigurationRevision>> {
pub fn get_groups_by_field_revs(
&self,
field_revs: &[Arc<FieldRevision>],
) -> Vec<Arc<GroupConfigurationRevision>> {
self.groups.get_objects_by_field_revs(field_revs)
}
@ -80,7 +83,9 @@ impl GridViewRevisionPad {
self.modify(|view| {
// Only save one group
view.groups.clear();
view.groups.add_object(field_id, field_type, group_configuration_rev);
view
.groups
.add_object(field_id, field_type, group_configuration_rev);
Ok(Some(()))
})
}
@ -98,7 +103,8 @@ impl GridViewRevisionPad {
configuration_id: &str,
mut_configuration_fn: F,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| match view.groups.get_mut_objects(field_id, field_type) {
self.modify(
|view| match view.groups.get_mut_objects(field_id, field_type) {
None => Ok(None),
Some(configurations_revs) => {
for configuration_rev in configurations_revs {
@ -108,8 +114,9 @@ impl GridViewRevisionPad {
}
}
Ok(None)
}
})
},
},
)
}
pub fn delete_group(
@ -133,8 +140,15 @@ impl GridViewRevisionPad {
}
/// For the moment, a field type only have one filter.
pub fn get_sorts(&self, field_id: &str, field_type_rev: &FieldTypeRevision) -> Vec<Arc<SortRevision>> {
self.sorts.get_objects(field_id, field_type_rev).unwrap_or_default()
pub fn get_sorts(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
) -> Vec<Arc<SortRevision>> {
self
.sorts
.get_objects(field_id, field_type_rev)
.unwrap_or_default()
}
pub fn get_sort(
@ -143,7 +157,8 @@ impl GridViewRevisionPad {
field_type_rev: &FieldTypeRevision,
sort_id: &str,
) -> Option<Arc<SortRevision>> {
self.sorts
self
.sorts
.get_object(field_id, field_type_rev, |sort| sort.id == sort_id)
}
@ -167,7 +182,9 @@ impl GridViewRevisionPad {
self.modify(|view| {
if let Some(sort) = view
.sorts
.get_mut_object(field_id, &sort_rev.field_type, |sort| sort.id == sort_rev.id)
.get_mut_object(field_id, &sort_rev.field_type, |sort| {
sort.id == sort_rev.id
})
{
let sort = Arc::make_mut(sort);
sort.condition = sort_rev.condition;
@ -207,8 +224,15 @@ impl GridViewRevisionPad {
}
/// For the moment, a field type only have one filter.
pub fn get_filters(&self, field_id: &str, field_type_rev: &FieldTypeRevision) -> Vec<Arc<FilterRevision>> {
self.filters.get_objects(field_id, field_type_rev).unwrap_or_default()
pub fn get_filters(
&self,
field_id: &str,
field_type_rev: &FieldTypeRevision,
) -> Vec<Arc<FilterRevision>> {
self
.filters
.get_objects(field_id, field_type_rev)
.unwrap_or_default()
}
pub fn get_filter(
@ -217,7 +241,8 @@ impl GridViewRevisionPad {
field_type_rev: &FieldTypeRevision,
filter_id: &str,
) -> Option<Arc<FilterRevision>> {
self.filters
self
.filters
.get_object(field_id, field_type_rev, |filter| filter.id == filter_id)
}
@ -239,9 +264,12 @@ impl GridViewRevisionPad {
filter_rev: FilterRevision,
) -> SyncResult<Option<GridViewRevisionChangeset>> {
self.modify(|view| {
if let Some(filter) = view
if let Some(filter) =
view
.filters
.get_mut_object(field_id, &filter_rev.field_type, |filter| filter.id == filter_rev.id)
.get_mut_object(field_id, &filter_rev.field_type, |filter| {
filter.id == filter_rev.id
})
{
let filter = Arc::make_mut(filter);
filter.condition = filter_rev.condition;
@ -294,9 +322,9 @@ impl GridViewRevisionPad {
self.operations = self.operations.compose(&operations)?;
let md5 = md5(&self.operations.json_bytes());
Ok(Some(GridViewRevisionChangeset { operations, md5 }))
},
}
}
}
},
}
}
}
@ -308,8 +336,9 @@ pub struct GridViewRevisionChangeset {
}
pub fn make_grid_view_rev_json_str(grid_revision: &DatabaseViewRevision) -> SyncResult<String> {
let json = serde_json::to_string(grid_revision)
.map_err(|err| internal_sync_error(format!("Serialize grid view to json str failed. {:?}", err)))?;
let json = serde_json::to_string(grid_revision).map_err(|err| {
internal_sync_error(format!("Serialize grid view to json str failed. {:?}", err))
})?;
Ok(json)
}

View File

@ -30,7 +30,10 @@ impl InitialDocument for NewlineDocument {
}
pub fn initial_delta_document_content() -> String {
DeltaTextOperationBuilder::new().insert("\n").build().json_str()
DeltaTextOperationBuilder::new()
.insert("\n")
.build()
.json_str()
}
pub struct ClientDocument {
@ -92,15 +95,19 @@ impl ClientDocument {
self.operations = operations;
match &self.notify {
None => {}
None => {},
Some(notify) => {
let _ = notify.send(());
}
},
}
}
pub fn compose_operations(&mut self, operations: DeltaTextOperations) -> Result<(), SyncError> {
tracing::trace!("{} compose {}", &self.operations.json_str(), operations.json_str());
tracing::trace!(
"{} compose {}",
&self.operations.json_str(),
operations.json_str()
);
let composed_operations = self.operations.compose(&operations)?;
let mut undo_operations = operations.invert(&self.operations);
@ -125,7 +132,11 @@ impl ClientDocument {
Ok(())
}
pub fn insert<T: ToString>(&mut self, index: usize, data: T) -> Result<DeltaTextOperations, SyncError> {
pub fn insert<T: ToString>(
&mut self,
index: usize,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
let text = data.to_string();
let interval = Interval::new(index, index);
validate_interval(&self.operations, &interval)?;
@ -144,15 +155,26 @@ impl ClientDocument {
Ok(operations)
}
pub fn format(&mut self, interval: Interval, attribute: AttributeEntry) -> Result<DeltaTextOperations, SyncError> {
pub fn format(
&mut self,
interval: Interval,
attribute: AttributeEntry,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
tracing::trace!("format {} with {:?}", interval, attribute);
let operations = self.view.format(&self.operations, attribute, interval).unwrap();
let operations = self
.view
.format(&self.operations, attribute, interval)
.unwrap();
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn replace<T: ToString>(&mut self, interval: Interval, data: T) -> Result<DeltaTextOperations, SyncError> {
pub fn replace<T: ToString>(
&mut self,
interval: Interval,
data: T,
) -> Result<DeltaTextOperations, SyncError> {
validate_interval(&self.operations, &interval)?;
let mut operations = DeltaTextOperations::default();
let text = data.to_string();
@ -187,7 +209,7 @@ impl ClientDocument {
Ok(UndoResult {
operations: undo_operations,
})
}
},
}
}
@ -201,7 +223,7 @@ impl ClientDocument {
Ok(UndoResult {
operations: redo_operations,
})
}
},
}
}
@ -225,7 +247,10 @@ impl ClientDocument {
}
}
fn validate_interval(operations: &DeltaTextOperations, interval: &Interval) -> Result<(), SyncError> {
fn validate_interval(
operations: &DeltaTextOperations,
interval: &Interval,
) -> Result<(), SyncError> {
if operations.utf16_target_len < interval.end {
tracing::error!(
"{:?} out of bounds. should 0..{}",

View File

@ -1,6 +1,9 @@
use crate::{client_document::DeleteExt, util::is_newline};
use lib_ot::{
core::{DeltaOperationBuilder, Interval, OperationAttributes, OperationIterator, Utf16CodeUnitMetric, NEW_LINE},
core::{
DeltaOperationBuilder, Interval, OperationAttributes, OperationIterator, Utf16CodeUnitMetric,
NEW_LINE,
},
text_delta::{empty_attributes, DeltaTextOperations},
};
@ -39,7 +42,7 @@ impl DeleteExt for PreserveLineFormatOnMerge {
None => {
new_delta.retain(op.len(), empty_attributes());
continue;
}
},
Some(line_break) => {
let mut attributes = op.get_attributes();
attributes.remove_all_value();
@ -51,9 +54,9 @@ impl DeleteExt for PreserveLineFormatOnMerge {
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attributes);
break;
},
}
}
}
},
}
}

View File

@ -37,14 +37,16 @@ impl FormatExt for ResolveBlockFormat {
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Block);
new_delta.extend(tmp_delta);
}
},
}
start += next_op.len();
}
while iter.has_next() {
let op = iter.next_op().expect("Unexpected None, iter.has_next() must return op");
let op = iter
.next_op()
.expect("Unexpected None, iter.has_next() must return op");
match find_newline(op.get_data()) {
None => new_delta.retain(op.len(), empty_attributes()),
@ -52,7 +54,7 @@ impl FormatExt for ResolveBlockFormat {
new_delta.retain(line_break, empty_attributes());
new_delta.retain(1, attribute.clone().into());
break;
}
},
}
}

View File

@ -37,7 +37,7 @@ impl FormatExt for ResolveInlineFormat {
Some(_) => {
let tmp_delta = line_break(&next_op, attribute, AttributeScope::Inline);
new_delta.extend(tmp_delta);
}
},
}
start += next_op.len();

View File

@ -1,6 +1,8 @@
use crate::util::find_newline;
use lib_ot::core::AttributeEntry;
use lib_ot::text_delta::{empty_attributes, AttributeScope, DeltaTextOperation, DeltaTextOperations};
use lib_ot::text_delta::{
empty_attributes, AttributeScope, DeltaTextOperation, DeltaTextOperations,
};
pub(crate) fn line_break(
op: &DeltaTextOperation,
@ -17,14 +19,14 @@ pub(crate) fn line_break(
AttributeScope::Inline => {
new_delta.retain(line_break - start, attribute.clone().into());
new_delta.retain(1, empty_attributes());
}
},
AttributeScope::Block => {
new_delta.retain(line_break - start, empty_attributes());
new_delta.retain(1, attribute.clone().into());
}
},
_ => {
tracing::error!("Unsupported parser line break for {:?}", scope);
}
},
}
start = line_break + 1;

View File

@ -39,13 +39,13 @@ impl InsertExt for AutoExitBlock {
}
match iter.next_op_with_newline() {
None => {}
None => {},
Some((newline_op, _)) => {
let newline_attributes = attributes_except_header(&newline_op);
if block_attributes == newline_attributes {
return None;
}
}
},
}
attributes.retain_values(&[BuildInTextAttributeKey::Header.as_ref()]);

View File

@ -27,7 +27,7 @@ impl InsertExt for AutoFormatExt {
let mut iter = OperationIterator::new(delta);
if let Some(prev) = iter.next_op_with_len(index) {
match AutoFormat::parse(prev.get_data()) {
None => {}
None => {},
Some(formatter) => {
let mut new_attributes = prev.get_attributes();
@ -54,7 +54,7 @@ impl InsertExt for AutoFormatExt {
.insert_with_attributes(text, next_attributes)
.build(),
);
}
},
}
}

View File

@ -28,7 +28,7 @@ impl InsertExt for DefaultInsertAttribute {
// {"insert":"456"},{"insert":"\n","attributes":{"header":1}}]
if text.ends_with(NEW_LINE) {
match iter.last() {
None => {}
None => {},
Some(op) => {
if op
.get_attributes()
@ -36,7 +36,7 @@ impl InsertExt for DefaultInsertAttribute {
{
attributes.extend(op.get_attributes());
}
}
},
}
}

View File

@ -2,7 +2,9 @@ use crate::{client_document::InsertExt, util::is_newline};
use lib_ot::core::AttributeHashMap;
use lib_ot::{
core::{DeltaOperationBuilder, OperationIterator, NEW_LINE},
text_delta::{attributes_except_header, empty_attributes, BuildInTextAttributeKey, DeltaTextOperations},
text_delta::{
attributes_except_header, empty_attributes, BuildInTextAttributeKey, DeltaTextOperations,
},
};
pub struct PreserveBlockFormatOnInsert {}
@ -24,7 +26,7 @@ impl InsertExt for PreserveBlockFormatOnInsert {
let mut iter = OperationIterator::from_offset(delta, index);
match iter.next_op_with_newline() {
None => {}
None => {},
Some((newline_op, offset)) => {
let newline_attributes = newline_op.get_attributes();
let block_attributes = attributes_except_header(&newline_op);
@ -38,7 +40,9 @@ impl InsertExt for PreserveBlockFormatOnInsert {
}
let lines: Vec<_> = text.split(NEW_LINE).collect();
let mut new_delta = DeltaOperationBuilder::new().retain(index + replace_len).build();
let mut new_delta = DeltaOperationBuilder::new()
.retain(index + replace_len)
.build();
lines.iter().enumerate().for_each(|(i, line)| {
if !line.is_empty() {
new_delta.insert(line, empty_attributes());
@ -60,7 +64,7 @@ impl InsertExt for PreserveBlockFormatOnInsert {
}
return Some(new_delta);
}
},
}
None

View File

@ -47,7 +47,7 @@ impl InsertExt for PreserveInlineFormat {
if OpNewline::parse(next).is_equal() {
attributes = empty_attributes();
}
}
},
}
let new_delta = DeltaOperationBuilder::new()
@ -98,10 +98,10 @@ impl InsertExt for PreserveLineFormatOnSplit {
}
match iter.next_op_with_newline() {
None => {}
None => {},
Some((newline_op, _)) => {
new_delta.insert(NEW_LINE, newline_op.get_attributes());
}
},
}
Some(new_delta)

View File

@ -112,5 +112,8 @@ fn construct_format_exts() -> Vec<FormatExtension> {
}
fn construct_delete_exts() -> Vec<DeleteExtension> {
vec![Box::new(PreserveLineFormatOnMerge {}), Box::new(DefaultDelete {})]
vec![
Box::new(PreserveLineFormatOnMerge {}),
Box::new(DefaultDelete {}),
]
}

View File

@ -56,11 +56,15 @@ impl FolderNodePad {
}
pub fn get_workspace(&self, workspace_id: &str) -> Option<&WorkspaceNode> {
self.workspaces.iter().find(|workspace| workspace.id == workspace_id)
self
.workspaces
.iter()
.find(|workspace| workspace.id == workspace_id)
}
pub fn get_mut_workspace(&mut self, workspace_id: &str) -> Option<&mut WorkspaceNode> {
self.workspaces
self
.workspaces
.iter_mut()
.find(|workspace| workspace.id == workspace_id)
}
@ -79,7 +83,8 @@ impl FolderNodePad {
}
pub fn to_json(&self, pretty: bool) -> SyncResult<String> {
self.tree
self
.tree
.read()
.to_json(pretty)
.map_err(|e| SyncError::serde().context(e))

View File

@ -31,11 +31,15 @@ impl FolderPad {
}
pub fn from_folder_rev(folder_rev: FolderRevision) -> SyncResult<Self> {
let json = serde_json::to_string(&folder_rev)
.map_err(|e| SyncError::internal().context(format!("Serialize to folder json str failed: {}", e)))?;
let json = serde_json::to_string(&folder_rev).map_err(|e| {
SyncError::internal().context(format!("Serialize to folder json str failed: {}", e))
})?;
let operations = FolderOperationsBuilder::new().insert(&json).build();
Ok(Self { folder_rev, operations })
Ok(Self {
folder_rev,
operations,
})
}
pub fn from_revisions(revisions: Vec<Revision>) -> SyncResult<Self> {
@ -51,7 +55,10 @@ impl FolderPad {
SyncError::internal().context(format!("Deserialize operations to folder failed: {}", e))
})?;
Ok(Self { folder_rev, operations })
Ok(Self {
folder_rev,
operations,
})
}
pub fn get_operations(&self) -> &FolderOperations {
@ -76,7 +83,10 @@ impl FolderPad {
}
#[tracing::instrument(level = "trace", skip(self, workspace_rev), fields(workspace_name=%workspace_rev.name), err)]
pub fn create_workspace(&mut self, workspace_rev: WorkspaceRevision) -> SyncResult<Option<FolderChangeset>> {
pub fn create_workspace(
&mut self,
workspace_rev: WorkspaceRevision,
) -> SyncResult<Option<FolderChangeset>> {
let workspace = Arc::new(workspace_rev);
if self.folder_rev.workspaces.contains(&workspace) {
tracing::warn!("[RootFolder]: Duplicate workspace");
@ -107,7 +117,10 @@ impl FolderPad {
})
}
pub fn read_workspaces(&self, workspace_id: Option<String>) -> SyncResult<Vec<WorkspaceRevision>> {
pub fn read_workspaces(
&self,
workspace_id: Option<String>,
) -> SyncResult<Vec<WorkspaceRevision>> {
match workspace_id {
None => {
let workspaces = self
@ -117,7 +130,7 @@ impl FolderPad {
.map(|workspace| workspace.as_ref().clone())
.collect::<Vec<WorkspaceRevision>>();
Ok(workspaces)
}
},
Some(workspace_id) => {
if let Some(workspace) = self
.folder_rev
@ -127,9 +140,12 @@ impl FolderPad {
{
Ok(vec![workspace.as_ref().clone()])
} else {
Err(SyncError::record_not_found().context(format!("Can't find workspace with id {}", workspace_id)))
}
Err(
SyncError::record_not_found()
.context(format!("Can't find workspace with id {}", workspace_id)),
)
}
},
}
}
@ -191,7 +207,12 @@ impl FolderPad {
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn move_app(&mut self, app_id: &str, from: usize, to: usize) -> SyncResult<Option<FolderChangeset>> {
pub fn move_app(
&mut self,
app_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<FolderChangeset>> {
let app = self.read_app(app_id)?;
self.with_workspace(&app.workspace_id, |workspace| {
match move_vec_element(&mut workspace.apps, |app| app.id == app_id, from, to)
@ -261,7 +282,11 @@ impl FolderPad {
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn delete_view(&mut self, app_id: &str, view_id: &str) -> SyncResult<Option<FolderChangeset>> {
pub fn delete_view(
&mut self,
app_id: &str,
view_id: &str,
) -> SyncResult<Option<FolderChangeset>> {
self.with_app(app_id, |app| {
app.belongings.retain(|view| view.id != view_id);
Ok(Some(()))
@ -269,7 +294,12 @@ impl FolderPad {
}
#[tracing::instrument(level = "trace", skip(self), err)]
pub fn move_view(&mut self, view_id: &str, from: usize, to: usize) -> SyncResult<Option<FolderChangeset>> {
pub fn move_view(
&mut self,
view_id: &str,
from: usize,
to: usize,
) -> SyncResult<Option<FolderChangeset>> {
let view = self.read_view(view_id)?;
self.with_app(&view.app_id, |app| {
match move_vec_element(&mut app.belongings, |view| view.id == view_id, from, to)
@ -286,7 +316,10 @@ impl FolderPad {
let mut new_trash = trash
.into_iter()
.flat_map(|new_trash| {
if original_trash.iter().any(|old_trash| old_trash.id == new_trash.id) {
if original_trash
.iter()
.any(|old_trash| old_trash.id == new_trash.id)
{
None
} else {
Some(Arc::new(new_trash))
@ -313,7 +346,7 @@ impl FolderPad {
}
}
Ok(trash_items)
}
},
Some(trash_id) => match self.folder_rev.trash.iter().find(|t| t.id == trash_id) {
Some(trash) => Ok(vec![trash.as_ref().clone()]),
None => Ok(vec![]),
@ -321,7 +354,10 @@ impl FolderPad {
}
}
pub fn delete_trash(&mut self, trash_ids: Option<Vec<String>>) -> SyncResult<Option<FolderChangeset>> {
pub fn delete_trash(
&mut self,
trash_ids: Option<Vec<String>>,
) -> SyncResult<Option<FolderChangeset>> {
match trash_ids {
None => self.with_trash(|trash| {
trash.clear();
@ -344,8 +380,9 @@ impl FolderPad {
}
pub fn make_folder_rev_json_str(folder_rev: &FolderRevision) -> SyncResult<String> {
let json = serde_json::to_string(folder_rev)
.map_err(|err| internal_sync_error(format!("Serialize folder to json str failed. {:?}", err)))?;
let json = serde_json::to_string(folder_rev).map_err(|err| {
internal_sync_error(format!("Serialize folder to json str failed. {:?}", err))
})?;
Ok(json)
}
@ -368,9 +405,9 @@ impl FolderPad {
operations,
md5: self.folder_md5(),
}))
},
}
}
}
},
}
}
@ -379,10 +416,16 @@ impl FolderPad {
F: FnOnce(&mut WorkspaceRevision) -> SyncResult<Option<()>>,
{
self.modify_workspaces(|workspaces| {
if let Some(workspace) = workspaces.iter_mut().find(|workspace| workspace_id == workspace.id) {
if let Some(workspace) = workspaces
.iter_mut()
.find(|workspace| workspace_id == workspace.id)
{
f(Arc::make_mut(workspace))
} else {
tracing::warn!("[FolderPad]: Can't find any workspace with id: {}", workspace_id);
tracing::warn!(
"[FolderPad]: Can't find any workspace with id: {}",
workspace_id
);
Ok(None)
}
})
@ -406,9 +449,9 @@ impl FolderPad {
operations,
md5: self.folder_md5(),
}))
},
}
}
}
},
}
}
@ -425,17 +468,26 @@ impl FolderPad {
None => {
tracing::warn!("[FolderPad]: Can't find any app with id: {}", app_id);
return Ok(None);
}
},
Some(workspace) => workspace.id.clone(),
};
self.with_workspace(&workspace_id, |workspace| {
// It's ok to unwrap because we get the workspace from the app_id.
f(workspace.apps.iter_mut().find(|app| app_id == app.id).unwrap())
f(workspace
.apps
.iter_mut()
.find(|app| app_id == app.id)
.unwrap())
})
}
fn with_view<F>(&mut self, belong_to_id: &str, view_id: &str, f: F) -> SyncResult<Option<FolderChangeset>>
fn with_view<F>(
&mut self,
belong_to_id: &str,
view_id: &str,
f: F,
) -> SyncResult<Option<FolderChangeset>>
where
F: FnOnce(&mut ViewRevision) -> SyncResult<Option<()>>,
{
@ -444,7 +496,7 @@ impl FolderPad {
None => {
tracing::warn!("[FolderPad]: Can't find any view with id: {}", view_id);
Ok(None)
}
},
Some(view) => f(view),
}
})
@ -486,13 +538,22 @@ mod tests {
let _time = Utc::now();
let mut workspace_1 = WorkspaceRevision::default();
workspace_1.name = "My first workspace".to_owned();
let operations_1 = folder.create_workspace(workspace_1).unwrap().unwrap().operations;
let operations_1 = folder
.create_workspace(workspace_1)
.unwrap()
.unwrap()
.operations;
let mut workspace_2 = WorkspaceRevision::default();
workspace_2.name = "My second workspace".to_owned();
let operations_2 = folder.create_workspace(workspace_2).unwrap().unwrap().operations;
let operations_2 = folder
.create_workspace(workspace_2)
.unwrap()
.unwrap()
.operations;
let folder_from_operations = make_folder_from_operations(initial_operations, vec![operations_1, operations_2]);
let folder_from_operations =
make_folder_from_operations(initial_operations, vec![operations_1, operations_2]);
assert_eq!(folder, folder_from_operations);
}
@ -506,7 +567,10 @@ mod tests {
] {
let mut deserializer = serde_json::Deserializer::from_reader(json.as_bytes());
let folder_rev = FolderRevision::deserialize(&mut deserializer).unwrap();
assert_eq!(folder_rev.workspaces.first().as_ref().unwrap().name, "first workspace");
assert_eq!(
folder_rev.workspaces.first().as_ref().unwrap().name,
"first workspace"
);
}
}
@ -733,7 +797,11 @@ mod tests {
#[test]
fn folder_delete_view() {
let (mut folder, initial_operations, view) = test_view_folder();
let operations = folder.delete_view(&view.app_id, &view.id).unwrap().unwrap().operations;
let operations = folder
.delete_view(&view.app_id, &view.id)
.unwrap()
.unwrap()
.operations;
let new_folder = make_folder_from_operations(initial_operations, vec![operations]);
assert_folder_equal(
@ -791,7 +859,11 @@ mod tests {
#[test]
fn folder_delete_trash() {
let (mut folder, initial_operations, trash) = test_trash();
let operations = folder.delete_trash(Some(vec![trash.id])).unwrap().unwrap().operations;
let operations = folder
.delete_trash(Some(vec![trash.id]))
.unwrap()
.unwrap()
.operations;
assert_folder_equal(
&folder,
&make_folder_from_operations(initial_operations, vec![operations]),
@ -834,7 +906,13 @@ mod tests {
app_rev.name = "😁 my first app".to_owned();
initial_operations = initial_operations
.compose(&folder_rev.create_app(app_rev.clone()).unwrap().unwrap().operations)
.compose(
&folder_rev
.create_app(app_rev.clone())
.unwrap()
.unwrap()
.operations,
)
.unwrap();
(folder_rev, initial_operations, app_rev)
@ -847,7 +925,13 @@ mod tests {
view_rev.name = "🎃 my first view".to_owned();
initial_operations = initial_operations
.compose(&folder.create_view(view_rev.clone()).unwrap().unwrap().operations)
.compose(
&folder
.create_view(view_rev.clone())
.unwrap()
.unwrap()
.operations,
)
.unwrap();
(folder, initial_operations, view_rev)

View File

@ -3,8 +3,13 @@ use crate::errors::SyncResult;
use lib_ot::core::{AttributeHashMap, AttributeValue, Changeset, NodeId, NodeOperation};
use std::sync::Arc;
pub fn get_attributes_str_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<String> {
tree.read()
pub fn get_attributes_str_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<String> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.str_value())
@ -35,20 +40,33 @@ pub fn set_attributes_str_value(
}
#[allow(dead_code)]
pub fn get_attributes_int_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<i64> {
tree.read()
pub fn get_attributes_int_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<i64> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
.and_then(|value| value.int_value())
}
pub fn get_attributes(tree: Arc<AtomicNodeTree>, node_id: &NodeId) -> Option<AttributeHashMap> {
tree.read().get_node(*node_id).map(|node| node.attributes.clone())
tree
.read()
.get_node(*node_id)
.map(|node| node.attributes.clone())
}
#[allow(dead_code)]
pub fn get_attributes_value(tree: Arc<AtomicNodeTree>, node_id: &NodeId, key: &str) -> Option<AttributeValue> {
tree.read()
pub fn get_attributes_value(
tree: Arc<AtomicNodeTree>,
node_id: &NodeId,
key: &str,
) -> Option<AttributeValue> {
tree
.read()
.get_node(*node_id)
.and_then(|node| node.attributes.get(key).cloned())
}

View File

@ -48,7 +48,7 @@ where
} else {
break;
}
}
},
Err(_) => break,
}
} else {
@ -110,13 +110,13 @@ pub fn cal_diff<T: OperationAttributes>(old: String, new: String) -> Option<Delt
match chunk {
Chunk::Equal(s) => {
delta_builder = delta_builder.retain(OTString::from(*s).utf16_len());
}
},
Chunk::Delete(s) => {
delta_builder = delta_builder.delete(OTString::from(*s).utf16_len());
}
},
Chunk::Insert(s) => {
delta_builder = delta_builder.insert(s);
}
},
}
}

View File

@ -13,7 +13,11 @@ fn client_folder_create_default_folder_test() {
#[test]
fn client_folder_create_default_folder_with_workspace_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
let json = folder_pad.to_json(false).unwrap();
assert_eq!(
@ -30,7 +34,11 @@ fn client_folder_create_default_folder_with_workspace_test() {
#[test]
fn client_folder_delete_workspace_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad.workspaces.remove_workspace("1");
let json = folder_pad.to_json(false).unwrap();
@ -43,7 +51,11 @@ fn client_folder_delete_workspace_test() {
#[test]
fn client_folder_update_workspace_name_test() {
let mut folder_pad = FolderNodePad::new();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad
.workspaces
@ -52,7 +64,12 @@ fn client_folder_update_workspace_name_test() {
.set_name("my first workspace".to_string());
assert_eq!(
folder_pad.workspaces.get_workspace("1").unwrap().get_name().unwrap(),
folder_pad
.workspaces
.get_workspace("1")
.unwrap()
.get_name()
.unwrap(),
"my first workspace"
);
}

View File

@ -3,15 +3,39 @@ use folder_model::AppRevision;
use lib_ot::core::Path;
pub enum FolderNodePadScript {
CreateWorkspace { id: String, name: String },
DeleteWorkspace { id: String },
AssertPathOfWorkspace { id: String, expected_path: Path },
AssertNumberOfWorkspace { expected: usize },
CreateApp { id: String, name: String },
DeleteApp { id: String },
UpdateApp { id: String, name: String },
AssertApp { id: String, expected: Option<AppRevision> },
AssertAppContent { id: String, name: String },
CreateWorkspace {
id: String,
name: String,
},
DeleteWorkspace {
id: String,
},
AssertPathOfWorkspace {
id: String,
expected_path: Path,
},
AssertNumberOfWorkspace {
expected: usize,
},
CreateApp {
id: String,
name: String,
},
DeleteApp {
id: String,
},
UpdateApp {
id: String,
name: String,
},
AssertApp {
id: String,
expected: Option<AppRevision>,
},
AssertAppContent {
id: String,
name: String,
},
// AssertNumberOfApps { expected: usize },
}
@ -22,7 +46,11 @@ pub struct FolderNodePadTest {
impl FolderNodePadTest {
pub fn new() -> FolderNodePadTest {
let mut folder_pad = FolderNodePad::default();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
let workspace = WorkspaceNode::new(
folder_pad.tree.clone(),
"1".to_string(),
"workspace name".to_string(),
);
folder_pad.workspaces.add_workspace(workspace).unwrap();
Self { folder_pad }
}
@ -38,32 +66,32 @@ impl FolderNodePadTest {
FolderNodePadScript::CreateWorkspace { id, name } => {
let workspace = WorkspaceNode::new(self.folder_pad.tree.clone(), id, name);
self.folder_pad.workspaces.add_workspace(workspace).unwrap();
}
},
FolderNodePadScript::DeleteWorkspace { id } => {
self.folder_pad.workspaces.remove_workspace(id);
}
},
FolderNodePadScript::AssertPathOfWorkspace { id, expected_path } => {
let workspace_node: &WorkspaceNode = self.folder_pad.workspaces.get_workspace(id).unwrap();
let node_id = workspace_node.node_id.unwrap();
let path = self.folder_pad.tree.read().path_from_node_id(node_id);
assert_eq!(path, expected_path);
}
},
FolderNodePadScript::AssertNumberOfWorkspace { expected } => {
assert_eq!(self.folder_pad.workspaces.len(), expected);
}
},
FolderNodePadScript::CreateApp { id, name } => {
let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name);
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.add_app(app_node).unwrap();
}
},
FolderNodePadScript::DeleteApp { id } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.remove_app(&id);
}
},
FolderNodePadScript::UpdateApp { id, name } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
workspace_node.get_mut_app(&id).unwrap().set_name(name);
}
},
FolderNodePadScript::AssertApp { id, expected } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id);
@ -73,14 +101,14 @@ impl FolderNodePadTest {
let app_node = app.unwrap();
assert_eq!(expected_app.name, app_node.get_name().unwrap());
assert_eq!(expected_app.id, app_node.get_id().unwrap());
},
}
}
}
},
FolderNodePadScript::AssertAppContent { id, name } => {
let workspace_node = self.folder_pad.get_workspace("1").unwrap();
let app = workspace_node.get_app(&id).unwrap();
assert_eq!(app.get_name().unwrap(), name)
} // FolderNodePadScript::AssertNumberOfApps { expected } => {
}, // FolderNodePadScript::AssertNumberOfApps { expected } => {
// let workspace_node = self.folder_pad.get_workspace("1").unwrap();
// assert_eq!(workspace_node.apps.len(), expected);
// }

View File

@ -27,7 +27,9 @@ fn client_folder_create_multi_workspaces_test() {
},
AssertNumberOfWorkspace { expected: 3 },
// The path of the workspace 'b' will be changed after deleting the 'a' workspace.
DeleteWorkspace { id: "a".to_string() },
DeleteWorkspace {
id: "a".to_string(),
},
AssertPathOfWorkspace {
id: "b".to_string(),
expected_path: vec![0, 0, 1].into(),
@ -58,7 +60,9 @@ fn client_folder_delete_app_test() {
id: "1".to_string(),
name: "my first app".to_string(),
},
DeleteApp { id: "1".to_string() },
DeleteApp {
id: "1".to_string(),
},
AssertApp {
id: "1".to_string(),
expected: None,

View File

@ -20,9 +20,16 @@ pub fn gen(crate_name: &str) {
return;
}
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string();
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
let event_crates = parse_dart_event_files(vec![crate_path]);
let event_ast = event_crates.iter().flat_map(parse_event_crate).collect::<Vec<_>>();
let event_ast = event_crates
.iter()
.flat_map(parse_event_crate)
.collect::<Vec<_>>();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = DART_IMPORTED.to_owned();
@ -49,7 +56,8 @@ pub fn gen(crate_name: &str) {
std::fs::create_dir_all(dart_event_folder.as_path()).unwrap();
}
let dart_event_file_path = path_string_with_component(&dart_event_folder, vec!["dart_event.dart"]);
let dart_event_file_path =
path_string_with_component(&dart_event_folder, vec!["dart_event.dart"]);
println!("cargo:rerun-if-changed={}", dart_event_file_path);
match std::fs::OpenOptions::new()
@ -62,10 +70,10 @@ pub fn gen(crate_name: &str) {
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
}
},
Err(err) => {
panic!("Failed to open file: {}, {:?}", dart_event_file_path, err);
}
},
}
}
@ -110,11 +118,13 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
.event_files
.iter()
.flat_map(|event_file| {
let file_path = path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast.items
ast
.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
@ -132,7 +142,7 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
}
},
_ => vec![],
})
.collect::<Vec<_>>()
@ -141,7 +151,8 @@ pub fn parse_event_crate(event_crate: &DartEventCrate) -> Vec<EventASTContext> {
}
pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContext> {
ast.iter()
ast
.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input

View File

@ -28,7 +28,9 @@ impl EventTemplate {
self.tera_context.insert("event_class", &dart_class_name);
self.tera_context.insert("event", &event);
self.tera_context.insert("has_input", &ctx.input_deserializer.is_some());
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => self.tera_context.insert("input_deserializer", "Unit"),
Some(ref input) => self.tera_context.insert("input_deserializer", input),
@ -47,7 +49,9 @@ impl EventTemplate {
Some(ref output) => self.tera_context.insert("output_deserializer", output),
}
self.tera_context.insert("error_deserializer", &ctx.error_deserializer);
self
.tera_context
.insert("error_deserializer", &ctx.error_deserializer);
let tera = get_tera("dart_event");
match tera.render("event_template.tera", &self.tera_context) {
@ -55,7 +59,7 @@ impl EventTemplate {
Err(e) => {
log::error!("{:?}", e);
None
}
},
}
}
}

View File

@ -49,7 +49,12 @@ pub fn parse_crate_config_from(entry: &walkdir::DirEntry) -> Option<CrateConfig>
}
let crate_path = entry.path().parent().unwrap().to_path_buf();
let flowy_config = FlowyConfig::from_toml_file(config_path.as_path());
let crate_folder = crate_path.file_stem().unwrap().to_str().unwrap().to_string();
let crate_folder = crate_path
.file_stem()
.unwrap()
.to_str()
.unwrap()
.to_string();
Some(CrateConfig {
crate_path,

View File

@ -121,7 +121,12 @@ pub fn get_ast_structs(ast: &syn::File) -> Vec<Struct> {
if let Item::Struct(item_struct) = item {
let (_, fields) = struct_from_ast(&ast_result, &item_struct.fields);
if fields.iter().filter(|f| f.pb_attrs.pb_index().is_some()).count() > 0 {
if fields
.iter()
.filter(|f| f.pb_attrs.pb_index().is_some())
.count()
> 0
{
proto_structs.push(Struct {
name: item_struct.ident.to_string(),
fields,
@ -140,7 +145,12 @@ pub fn get_ast_enums(ast: &syn::File) -> Vec<FlowyEnum> {
ast.items.iter().for_each(|item| {
// https://docs.rs/syn/1.0.54/syn/enum.Item.html
if let Item::Enum(item_enum) = item {
let attrs = flowy_ast::enum_from_ast(&ast_result, &item_enum.ident, &item_enum.variants, &ast.attrs);
let attrs = flowy_ast::enum_from_ast(
&ast_result,
&item_enum.ident,
&item_enum.variants,
&ast.attrs,
);
flowy_enums.push(FlowyEnum {
name: item_enum.ident.to_string(),
attrs,

View File

@ -18,7 +18,11 @@ use std::process::Command;
use walkdir::WalkDir;
pub fn gen(crate_name: &str) {
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string();
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
// 1. generate the proto files to proto_file_dir
#[cfg(feature = "proto_gen")]
@ -27,8 +31,16 @@ pub fn gen(crate_name: &str) {
for proto_crate in proto_crates {
let mut proto_file_paths = vec![];
let mut file_names = vec![];
let proto_file_output_path = proto_crate.proto_output_path().to_str().unwrap().to_string();
let protobuf_output_path = proto_crate.protobuf_crate_path().to_str().unwrap().to_string();
let proto_file_output_path = proto_crate
.proto_output_path()
.to_str()
.unwrap()
.to_string();
let protobuf_output_path = proto_crate
.protobuf_crate_path()
.to_str()
.unwrap()
.to_string();
for (path, file_name) in WalkDir::new(&proto_file_output_path)
.into_iter()
@ -101,8 +113,8 @@ fn generate_ts_protobuf_files(
protoc_bin_path: &Path,
) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path =
std::env::var("TAURI_BACKEND_SERVICE_PATH").unwrap_or("appflowy_tauri/src/services/backend".to_string());
let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
.unwrap_or("appflowy_tauri/src/services/backend".to_string());
let mut output = PathBuf::new();
output.push(root);
@ -142,10 +154,10 @@ fn generate_ts_protobuf_files(
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
}
},
Err(err) => {
panic!("Failed to open file: {}", err);
}
},
}
}
@ -208,10 +220,10 @@ fn generate_dart_protobuf_files(
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
}
},
Err(err) => {
panic!("Failed to open file: {}", err);
}
},
}
}
@ -241,7 +253,11 @@ pub fn check_pb_dart_plugin() {
paths.iter().for_each(|s| msg.push_str(&format!("{}\n", s)));
if let Ok(output) = Command::new("sh").arg("-c").arg("which protoc-gen-dart").output() {
if let Ok(output) = Command::new("sh")
.arg("-c")
.arg("which protoc-gen-dart")
.output()
{
msg.push_str(&format!(
"Installed protoc-gen-dart path: {:?}\n",
String::from_utf8(output.stdout).unwrap()
@ -264,7 +280,10 @@ fn gen_proto_files(crate_name: &str, crate_path: &str) -> Vec<ProtobufCrate> {
.map(|info| info.protobuf_crate.clone())
.collect::<Vec<_>>();
crate_context.into_iter().flat_map(|info| info.files).for_each(|file| {
crate_context
.into_iter()
.flat_map(|info| info.files)
.for_each(|file| {
println!("cargo:rerun-if-changed={}", file.file_path);
});

View File

@ -39,10 +39,10 @@ impl ProtoGenerator {
Ok(ref mut file) => {
file.write_all(proto_cache_str.as_bytes()).unwrap();
File::flush(file).unwrap();
}
},
Err(_err) => {
panic!("Failed to open file: {}", protobuf_cache_path);
}
},
}
crate_contexts
@ -53,7 +53,8 @@ fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) {
let file_path_content_map = crate_contexts
.iter()
.flat_map(|ctx| {
ctx.files
ctx
.files
.iter()
.map(|file| {
(
@ -87,7 +88,10 @@ fn write_proto_files(crate_contexts: &[ProtobufCrateContext]) {
}
}
fn gen_import_content(current_file: &ProtoFile, file_path_symbols_map: &HashMap<String, ProtoFileSymbol>) -> String {
fn gen_import_content(
current_file: &ProtoFile,
file_path_symbols_map: &HashMap<String, ProtoFileSymbol>,
) -> String {
let mut import_files: Vec<String> = vec![];
file_path_symbols_map
.iter()
@ -139,10 +143,10 @@ fn write_rust_crate_mod_file(crate_contexts: &[ProtobufCrateContext]) {
},
);
file.write_all(mod_file_content.as_bytes()).unwrap();
}
},
Err(err) => {
panic!("Failed to open file: {}", err);
}
},
}
}
}
@ -154,8 +158,14 @@ impl ProtoCache {
.flat_map(|crate_info| &crate_info.files)
.collect::<Vec<&ProtoFile>>();
let structs: Vec<String> = proto_files.iter().flat_map(|info| info.structs.clone()).collect();
let enums: Vec<String> = proto_files.iter().flat_map(|info| info.enums.clone()).collect();
let structs: Vec<String> = proto_files
.iter()
.flat_map(|info| info.structs.clone())
.collect();
let enums: Vec<String> = proto_files
.iter()
.flat_map(|info| info.enums.clone())
.collect();
Self { structs, enums }
}
}

View File

@ -24,7 +24,8 @@ impl ProtobufCrateContext {
pub fn create_crate_mod_file(&self) {
// mod model;
// pub use model::*;
let mod_file_path = path_string_with_component(&self.protobuf_crate.protobuf_crate_path(), vec!["mod.rs"]);
let mod_file_path =
path_string_with_component(&self.protobuf_crate.protobuf_crate_path(), vec!["mod.rs"]);
let mut content = "#![cfg_attr(rustfmt, rustfmt::skip)]\n".to_owned();
content.push_str("// Auto-generated, do not edit\n");
content.push_str("mod model;\npub use model::*;");
@ -37,10 +38,10 @@ impl ProtobufCrateContext {
{
Ok(ref mut file) => {
file.write_all(content.as_bytes()).unwrap();
}
},
Err(err) => {
panic!("Failed to open protobuf mod file: {}", err);
}
},
}
}
@ -52,7 +53,10 @@ impl ProtobufCrateContext {
#[allow(dead_code)]
pub fn flutter_mod_file(&self, root: &str) -> String {
let crate_module_dir = format!("{}/{}/protobuf.dart", root, self.protobuf_crate.crate_folder);
let crate_module_dir = format!(
"{}/{}/protobuf.dart",
root, self.protobuf_crate.crate_folder
);
crate_module_dir
}
}
@ -75,7 +79,8 @@ impl ProtobufCrate {
// Return the file paths for each rust file that used to generate the proto file.
pub fn proto_input_paths(&self) -> Vec<PathBuf> {
self.flowy_config
self
.flowy_config
.proto_input
.iter()
.map(|name| path_buf_with_component(&self.crate_path, vec![name]))

View File

@ -29,7 +29,7 @@ impl ProtobufDeriveMeta {
Err(e) => {
log::error!("{:?}", e);
None
}
},
}
}
}

View File

@ -19,8 +19,10 @@ impl EnumTemplate {
pub fn set_message_enum(&mut self, flowy_enum: &FlowyEnum) {
self.context.insert("enum_name", &flowy_enum.name);
flowy_enum.attrs.iter().for_each(|item| {
self.items
.push(format!("{} = {};", item.attrs.enum_item_name, item.attrs.value))
self.items.push(format!(
"{} = {};",
item.attrs.enum_item_name, item.attrs.value
))
})
}
@ -32,7 +34,7 @@ impl EnumTemplate {
Err(e) => {
log::error!("{:?}", e);
None
}
},
}
}
}

View File

@ -48,19 +48,21 @@ impl StructTemplate {
if let Some(ref category) = field.bracket_category {
match category {
BracketCategory::Opt => match &field.bracket_inner_ty {
None => {}
None => {},
Some(inner_ty) => match inner_ty.to_string().as_str() {
//TODO: support hashmap or something else wrapped by Option
"Vec" => {
self.fields
.push(format!("oneof one_of_{} {{ bytes {} = {}; }};", name, name, index));
}
self.fields.push(format!(
"oneof one_of_{} {{ bytes {} = {}; }};",
name, name, index
));
},
_ => {
self.fields.push(format!(
"oneof one_of_{} {{ {} {} = {}; }};",
name, mapped_ty, name, index
));
}
},
},
},
BracketCategory::Map((k, v)) => {
@ -74,7 +76,7 @@ impl StructTemplate {
name,
index
));
}
},
BracketCategory::Vec => {
let bracket_ty: &str = &field.bracket_ty.as_ref().unwrap().to_string();
// Vec<u8>
@ -86,8 +88,10 @@ impl StructTemplate {
RUST_TYPE_MAP[bracket_ty], mapped_ty, name, index
))
}
}
BracketCategory::Other => self.fields.push(format!("{} {} = {};", mapped_ty, name, index)),
},
BracketCategory::Other => self
.fields
.push(format!("{} {} = {};", mapped_ty, name, index)),
}
}
}
@ -100,7 +104,7 @@ impl StructTemplate {
Err(e) => {
log::error!("{:?}", e);
None
}
},
}
}
}

View File

@ -25,14 +25,19 @@ impl EventTemplate {
pub fn render(&mut self, ctx: EventRenderContext, index: usize) -> Option<String> {
self.tera_context.insert("index", &index);
let event_func_name = format!("{}{}", ctx.event_ty, ctx.event);
self.tera_context.insert("event_func_name", &event_func_name);
self.tera_context
self
.tera_context
.insert("event_func_name", &event_func_name);
self
.tera_context
.insert("event_name", &format!("{}.{}", ctx.prefix, ctx.event_ty));
self.tera_context.insert("event", &ctx.event);
self.tera_context.insert("has_input", &ctx.input_deserializer.is_some());
self
.tera_context
.insert("has_input", &ctx.input_deserializer.is_some());
match ctx.input_deserializer {
None => {}
None => {},
Some(ref input) => self
.tera_context
.insert("input_deserializer", &format!("{}.{}", ctx.prefix, input)),
@ -59,7 +64,7 @@ impl EventTemplate {
Err(e) => {
log::error!("{:?}", e);
None
}
},
}
}
}

View File

@ -14,12 +14,19 @@ use walkdir::WalkDir;
pub fn gen(crate_name: &str) {
let root = std::env::var("CARGO_MAKE_WORKING_DIRECTORY").unwrap_or("../../".to_string());
let tauri_backend_service_path =
std::env::var("TAURI_BACKEND_SERVICE_PATH").unwrap_or("appflowy_tauri/src/services/backend".to_string());
let tauri_backend_service_path = std::env::var("TAURI_BACKEND_SERVICE_PATH")
.unwrap_or("appflowy_tauri/src/services/backend".to_string());
let crate_path = std::fs::canonicalize(".").unwrap().as_path().display().to_string();
let crate_path = std::fs::canonicalize(".")
.unwrap()
.as_path()
.display()
.to_string();
let event_crates = parse_ts_event_files(vec![crate_path]);
let event_ast = event_crates.iter().flat_map(parse_event_crate).collect::<Vec<_>>();
let event_ast = event_crates
.iter()
.flat_map(parse_event_crate)
.collect::<Vec<_>>();
let event_render_ctx = ast_to_event_render_ctx(event_ast.as_ref());
let mut render_result = TS_HEADER.to_string();
@ -41,8 +48,10 @@ pub fn gen(crate_name: &str) {
let event_file = "event";
let event_file_ext = "ts";
let ts_event_file_path =
path_string_with_component(&ts_event_folder, vec![&format!("{}.{}", event_file, event_file_ext)]);
let ts_event_file_path = path_string_with_component(
&ts_event_folder,
vec![&format!("{}.{}", event_file, event_file_ext)],
);
println!("cargo:rerun-if-changed={}", ts_event_file_path);
match std::fs::OpenOptions::new()
@ -55,10 +64,10 @@ pub fn gen(crate_name: &str) {
Ok(ref mut file) => {
file.write_all(render_result.as_bytes()).unwrap();
File::flush(file).unwrap();
}
},
Err(err) => {
panic!("Failed to open file: {}, {:?}", ts_event_file_path, err);
}
},
}
let ts_index = path_string_with_component(&ts_event_folder, vec!["index.ts"]);
@ -76,10 +85,10 @@ pub fn gen(crate_name: &str) {
export.push_str(&format!("export * from './{}';\n", event_file));
file.write_all(export.as_bytes()).unwrap();
File::flush(file).unwrap();
}
},
Err(err) => {
panic!("Failed to open file: {}", err);
}
},
}
}
@ -119,11 +128,13 @@ pub fn parse_event_crate(event_crate: &TsEventCrate) -> Vec<EventASTContext> {
.event_files
.iter()
.flat_map(|event_file| {
let file_path = path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_path =
path_string_with_component(&event_crate.crate_path, vec![event_file.as_str()]);
let file_content = read_file(file_path.as_ref()).unwrap();
let ast = syn::parse_file(file_content.as_ref()).expect("Unable to parse file");
ast.items
ast
.items
.iter()
.flat_map(|item| match item {
Item::Enum(item_enum) => {
@ -141,7 +152,7 @@ pub fn parse_event_crate(event_crate: &TsEventCrate) -> Vec<EventASTContext> {
.enumerate()
.map(|(_index, variant)| EventASTContext::from(&variant.attrs))
.collect::<Vec<_>>()
}
},
_ => vec![],
})
.collect::<Vec<_>>()
@ -160,7 +171,8 @@ pub fn ast_to_event_render_ctx(ast: &[EventASTContext]) -> Vec<EventRenderContex
}
});
ast.iter()
ast
.iter()
.map(|event_ast| {
let input_deserializer = event_ast
.event_input

View File

@ -17,7 +17,7 @@ pub fn read_file(path: &str) -> Option<String> {
Err(e) => {
log::error!("{}, with error: {:?}", path, e);
Some("".to_string())
}
},
}
}
@ -34,17 +34,21 @@ pub fn save_content_to_file_with_diff_prompt(content: &str, output_file: &str) {
{
Ok(ref mut file) => {
file.write_all(new_content.as_bytes()).unwrap();
}
},
Err(err) => {
panic!("Failed to open log file: {}", err);
}
},
};
if new_content != old_content {
print_diff(old_content, new_content.clone());
write_to_file()
}
} else {
match OpenOptions::new().create(true).write(true).open(output_file) {
match OpenOptions::new()
.create(true)
.write(true)
.open(output_file)
{
Ok(ref mut file) => file.write_all(content.as_bytes()).unwrap(),
Err(err) => panic!("Open or create to {} fail: {}", output_file, err),
}
@ -64,11 +68,11 @@ pub fn print_diff(old_content: String, new_content: String) {
match change.tag() {
ChangeTag::Delete => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
}
},
ChangeTag::Insert => {
print!("{}{}", style.apply_to(sign).bold(), style.apply_to(change));
}
ChangeTag::Equal => {}
},
ChangeTag::Equal => {},
};
}
println!("---------------------------------------------------");
@ -91,7 +95,11 @@ pub fn is_proto_file(e: &walkdir::DirEntry) -> bool {
}
pub fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
entry
.file_name()
.to_str()
.map(|s| s.starts_with('.'))
.unwrap_or(false)
}
pub fn create_dir_if_not_exist(dir: &Path) {
@ -101,7 +109,10 @@ pub fn create_dir_if_not_exist(dir: &Path) {
}
pub fn path_string_with_component(path: &Path, components: Vec<&str>) -> String {
path_buf_with_component(path, components).to_str().unwrap().to_string()
path_buf_with_component(path, components)
.to_str()
.unwrap()
.to_string()
}
#[allow(dead_code)]
@ -138,7 +149,12 @@ where
pub fn suffix_relative_to_path(path: &str, base: &str) -> String {
let base = Path::new(base);
let path = Path::new(path);
path.strip_prefix(base).unwrap().to_str().unwrap().to_owned()
path
.strip_prefix(base)
.unwrap()
.to_str()
.unwrap()
.to_owned()
}
pub fn get_tera(directory: &str) -> Tera {
@ -149,7 +165,7 @@ pub fn get_tera(directory: &str) -> Tera {
Ok(p) => p.as_path().display().to_string(),
Err(e) => {
panic!("❌ Canonicalize file path {} failed {:?}", root, e);
}
},
};
let mut template_path = format!("{}/**/*.tera", root_absolute_path);
@ -163,7 +179,7 @@ pub fn get_tera(directory: &str) -> Tera {
Err(e) => {
log::error!("Parsing error(s): {}", e);
::std::process::exit(1);
}
},
}
}

View File

@ -49,7 +49,10 @@ impl DocumentDepsResolver {
struct BlockUserImpl(Arc<UserSession>);
impl DocumentUser for BlockUserImpl {
fn user_dir(&self) -> Result<String, FlowyError> {
let dir = self.0.user_dir().map_err(|e| FlowyError::unauthorized().context(e))?;
let dir = self
.0
.user_dir()
.map_err(|e| FlowyError::unauthorized().context(e))?;
let doc_dir = format!("{}/document", dir);
if !Path::new(&doc_dir).exists() {
@ -85,10 +88,10 @@ impl RevisionWebSocket for DocumentRevisionWebSocket {
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {}
None => {},
Some(sender) => {
sender.send(msg).map_err(internal_error)?;
}
},
}
Ok(())
})

View File

@ -46,13 +46,22 @@ impl FolderDepsResolver {
Some(local_server) => local_server,
};
let view_data_processor = make_view_data_processor(text_block_manager.clone(), grid_manager.clone());
let folder_manager =
Arc::new(FolderManager::new(user.clone(), cloud_service, database, view_data_processor, web_socket).await);
let view_data_processor =
make_view_data_processor(text_block_manager.clone(), grid_manager.clone());
let folder_manager = Arc::new(
FolderManager::new(
user.clone(),
cloud_service,
database,
view_data_processor,
web_socket,
)
.await,
);
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match folder_manager.initialize(&user_id, &token).await {
Ok(_) => {}
Ok(_) => {},
Err(e) => tracing::error!("Initialize folder manager failed: {}", e),
}
}
@ -70,12 +79,18 @@ fn make_view_data_processor(
let mut map: HashMap<ViewDataFormatPB, Arc<dyn ViewDataProcessor + Send + Sync>> = HashMap::new();
let document_processor = Arc::new(DocumentViewDataProcessor(document_manager));
document_processor.data_types().into_iter().for_each(|data_type| {
document_processor
.data_types()
.into_iter()
.for_each(|data_type| {
map.insert(data_type, document_processor.clone());
});
let grid_data_impl = Arc::new(GridViewDataProcessor(grid_manager));
grid_data_impl.data_types().into_iter().for_each(|data_type| {
grid_data_impl
.data_types()
.into_iter()
.for_each(|data_type| {
map.insert(data_type, grid_data_impl.clone());
});
@ -85,18 +100,27 @@ fn make_view_data_processor(
struct WorkspaceDatabaseImpl(Arc<UserSession>);
impl WorkspaceDatabase for WorkspaceDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool().map_err(|e| FlowyError::internal().context(e))
self
.0
.db_pool()
.map_err(|e| FlowyError::internal().context(e))
}
}
struct WorkspaceUserImpl(Arc<UserSession>);
impl WorkspaceUser for WorkspaceUserImpl {
fn user_id(&self) -> Result<String, FlowyError> {
self.0.user_id().map_err(|e| FlowyError::internal().context(e))
self
.0
.user_id()
.map_err(|e| FlowyError::internal().context(e))
}
fn token(&self) -> Result<String, FlowyError> {
self.0.token().map_err(|e| FlowyError::internal().context(e))
self
.0
.token()
.map_err(|e| FlowyError::internal().context(e))
}
}
@ -112,10 +136,10 @@ impl RevisionWebSocket for FolderRevisionWebSocket {
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {}
None => {},
Some(sender) => {
sender.send(msg).map_err(internal_error)?;
}
},
}
Ok(())
})
@ -237,7 +261,9 @@ impl ViewDataProcessor for GridViewDataProcessor {
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
FutureResult::new(async move {
grid_manager.create_database(view_id, vec![revision]).await?;
grid_manager
.create_database(view_id, vec![revision])
.await?;
Ok(())
})
}
@ -277,7 +303,7 @@ impl ViewDataProcessor for GridViewDataProcessor {
return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
});
}
},
};
let user_id = user_id.to_string();
@ -307,7 +333,7 @@ impl ViewDataProcessor for GridViewDataProcessor {
return FutureResult::new(async move {
Err(FlowyError::internal().context(format!("Can't handle {:?} layout type", layout)))
});
}
},
};
FutureResult::new(async move {

View File

@ -34,7 +34,7 @@ impl GridDepsResolver {
if let (Ok(user_id), Ok(token)) = (user.user_id(), user.token()) {
match grid_manager.initialize(&user_id, &token).await {
Ok(_) => {}
Ok(_) => {},
Err(e) => tracing::error!("Initialize grid manager failed: {}", e),
}
}
@ -46,7 +46,10 @@ impl GridDepsResolver {
struct GridDatabaseImpl(Arc<UserSession>);
impl GridDatabase for GridDatabaseImpl {
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError> {
self.0.db_pool().map_err(|e| FlowyError::internal().context(e))
self
.0
.db_pool()
.map_err(|e| FlowyError::internal().context(e))
}
}
@ -77,10 +80,12 @@ impl RevisionWebSocket for GridRevisionWebSocket {
let ws_conn = self.0.clone();
Box::pin(async move {
match ws_conn.web_socket().await? {
None => {}
None => {},
Some(sender) => {
sender.send(msg).map_err(|e| FlowyError::internal().context(e))?;
}
sender
.send(msg)
.map_err(|e| FlowyError::internal().context(e))?;
},
}
Ok(())
})

View File

@ -130,7 +130,8 @@ impl AppFlowyCore {
runtime.spawn(TaskRunner::run(task_dispatcher.clone()));
let (local_server, ws_conn) = mk_local_server(&config.server_config);
let (user_session, document_manager, folder_manager, local_server, grid_manager) = runtime.block_on(async {
let (user_session, document_manager, folder_manager, local_server, grid_manager) = runtime
.block_on(async {
let user_session = mk_user_session(&config, &local_server, &config.server_config);
let document_manager = DocumentDepsResolver::resolve(
local_server.clone(),
@ -140,8 +141,12 @@ impl AppFlowyCore {
&config.document,
);
let grid_manager =
GridDepsResolver::resolve(ws_conn.clone(), user_session.clone(), task_dispatcher.clone()).await;
let grid_manager = GridDepsResolver::resolve(
ws_conn.clone(),
user_session.clone(),
task_dispatcher.clone(),
)
.await;
let folder_manager = FolderDepsResolver::resolve(
local_server.clone(),
@ -244,7 +249,10 @@ fn mk_local_server(
}
}
async fn _listen_network_status(mut subscribe: broadcast::Receiver<NetworkType>, _core: Arc<FolderManager>) {
async fn _listen_network_status(
mut subscribe: broadcast::Receiver<NetworkType>,
_core: Arc<FolderManager>,
) {
while let Ok(_new_type) = subscribe.recv().await {
// core.network_state_changed(new_type);
}
@ -252,7 +260,7 @@ async fn _listen_network_status(mut subscribe: broadcast::Receiver<NetworkType>,
fn init_kv(root: &str) {
match flowy_sqlite::kv::KV::init(root) {
Ok(_) => {}
Ok(_) => {},
Err(e) => tracing::error!("Init kv store failed: {}", e),
}
}
@ -290,7 +298,10 @@ impl UserStatusListener {
self.folder_manager.initialize(user_id, token).await?;
self.document_manager.initialize(user_id).await?;
self.grid_manager.initialize(user_id, token).await?;
self.ws_conn.start(token.to_owned(), user_id.to_owned()).await?;
self
.ws_conn
.start(token.to_owned(), user_id.to_owned())
.await?;
Ok(())
}
@ -299,18 +310,22 @@ impl UserStatusListener {
DocumentVersionPB::V0 => ViewDataFormatPB::DeltaFormat,
DocumentVersionPB::V1 => ViewDataFormatPB::NodeFormat,
};
self.folder_manager
self
.folder_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token, view_data_type)
.await?;
self.document_manager
self
.document_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
self.grid_manager
self
.grid_manager
.initialize_with_new_user(&user_profile.id, &user_profile.token)
.await?;
self.ws_conn
self
.ws_conn
.start(user_profile.token.clone(), user_profile.id.clone())
.await?;
Ok(())

View File

@ -18,5 +18,11 @@ pub fn make_plugins(
let network_plugin = flowy_net::event_map::init(ws_conn.clone());
let grid_plugin = flowy_database::event_map::init(grid_manager.clone());
let document_plugin = flowy_document::event_map::init(document_manager.clone());
vec![user_plugin, folder_plugin, network_plugin, grid_plugin, document_plugin]
vec![
user_plugin,
folder_plugin,
network_plugin,
grid_plugin,
document_plugin,
]
}

View File

@ -27,8 +27,10 @@ impl TryInto<CreateSelectOptionParams> for CreateSelectOptionPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CreateSelectOptionParams, Self::Error> {
let option_name = NotEmptyStr::parse(self.option_name).map_err(|_| ErrorCode::SelectOptionNameIsEmpty)?;
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let option_name =
NotEmptyStr::parse(self.option_name).map_err(|_| ErrorCode::SelectOptionNameIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(CreateSelectOptionParams {
field_id: field_id.0,
@ -62,7 +64,8 @@ impl TryInto<CellIdParams> for CellIdPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CellIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(CellIdParams {

View File

@ -66,7 +66,9 @@ pub struct FieldIdPB {
impl std::convert::From<&str> for FieldIdPB {
fn from(s: &str) -> Self {
FieldIdPB { field_id: s.to_owned() }
FieldIdPB {
field_id: s.to_owned(),
}
}
}
@ -168,7 +170,8 @@ impl TryInto<CreateFieldParams> for CreateFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CreateFieldParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
Ok(CreateFieldParams {
database_id: database_id.0,
field_type: self.field_type,
@ -202,7 +205,8 @@ impl TryInto<EditFieldParams> for UpdateFieldTypePayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<EditFieldParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(EditFieldParams {
database_id: database_id.0,
@ -234,7 +238,8 @@ impl TryInto<TypeOptionPathParams> for TypeOptionPathPB {
type Error = ErrorCode;
fn try_into(self) -> Result<TypeOptionPathParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(TypeOptionPathParams {
database_id: database_id.0,
@ -333,7 +338,8 @@ impl TryInto<TypeOptionChangesetParams> for TypeOptionChangesetPB {
type Error = ErrorCode;
fn try_into(self) -> Result<TypeOptionChangesetParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let _ = NotEmptyStr::parse(self.field_id.clone()).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(TypeOptionChangesetParams {
@ -362,7 +368,8 @@ impl TryInto<GetFieldParams> for GetFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<GetFieldParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_ids = self.field_ids.map(|repeated| {
repeated
.items
@ -417,7 +424,8 @@ impl TryInto<FieldChangesetParams> for FieldChangesetPB {
type Error = ErrorCode;
fn try_into(self) -> Result<FieldChangesetParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
let field_type = self.field_type.map(FieldTypeRevision::from);
// if let Some(type_option_data) = self.type_option_data.as_ref() {
@ -606,7 +614,7 @@ impl std::convert::From<FieldTypeRevision> for FieldType {
_ => {
tracing::error!("Can't convert FieldTypeRevision: {} to FieldType", ty);
FieldType::RichText
}
},
}
}
}
@ -632,7 +640,8 @@ impl TryInto<FieldIdParams> for DuplicateFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<FieldIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(FieldIdParams {
database_id: database_id.0,
@ -654,7 +663,8 @@ impl TryInto<FieldIdParams> for DeleteFieldPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<FieldIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let field_id = NotEmptyStr::parse(self.field_id).map_err(|_| ErrorCode::FieldIdIsEmpty)?;
Ok(FieldIdParams {
database_id: database_id.0,

View File

@ -86,7 +86,8 @@ impl FromFilterString for DateFilterPB {
where
Self: Sized,
{
let condition = DateFilterConditionPB::try_from(filter_rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
let condition = DateFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()
@ -103,7 +104,8 @@ impl FromFilterString for DateFilterPB {
}
impl std::convert::From<&FilterRevision> for DateFilterPB {
fn from(rev: &FilterRevision) -> Self {
let condition = DateFilterConditionPB::try_from(rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
let condition =
DateFilterConditionPB::try_from(rev.condition).unwrap_or(DateFilterConditionPB::DateIs);
let mut filter = DateFilterPB {
condition,
..Default::default()

View File

@ -69,7 +69,8 @@ impl FromFilterString for NumberFilterPB {
impl std::convert::From<&FilterRevision> for NumberFilterPB {
fn from(rev: &FilterRevision) -> Self {
NumberFilterPB {
condition: NumberFilterConditionPB::try_from(rev.condition).unwrap_or(NumberFilterConditionPB::Equal),
condition: NumberFilterConditionPB::try_from(rev.condition)
.unwrap_or(NumberFilterConditionPB::Equal),
content: rev.content.clone(),
}
}

View File

@ -65,7 +65,8 @@ impl std::convert::From<&FilterRevision> for SelectOptionFilterPB {
fn from(rev: &FilterRevision) -> Self {
let ids = SelectOptionIds::from(rev.content.clone());
SelectOptionFilterPB {
condition: SelectOptionConditionPB::try_from(rev.condition).unwrap_or(SelectOptionConditionPB::OptionIs),
condition: SelectOptionConditionPB::try_from(rev.condition)
.unwrap_or(SelectOptionConditionPB::OptionIs),
option_ids: ids.into_inner(),
}
}

View File

@ -61,7 +61,8 @@ impl FromFilterString for TextFilterPB {
Self: Sized,
{
TextFilterPB {
condition: TextFilterConditionPB::try_from(filter_rev.condition).unwrap_or(TextFilterConditionPB::Is),
condition: TextFilterConditionPB::try_from(filter_rev.condition)
.unwrap_or(TextFilterConditionPB::Is),
content: filter_rev.content.clone(),
}
}
@ -70,7 +71,8 @@ impl FromFilterString for TextFilterPB {
impl std::convert::From<&FilterRevision> for TextFilterPB {
fn from(rev: &FilterRevision) -> Self {
TextFilterPB {
condition: TextFilterConditionPB::try_from(rev.condition).unwrap_or(TextFilterConditionPB::Is),
condition: TextFilterConditionPB::try_from(rev.condition)
.unwrap_or(TextFilterConditionPB::Is),
content: rev.content.clone(),
}
}

View File

@ -1,7 +1,7 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{
CheckboxFilterPB, ChecklistFilterPB, DateFilterContentPB, DateFilterPB, FieldType, NumberFilterPB,
SelectOptionFilterPB, TextFilterPB,
CheckboxFilterPB, ChecklistFilterPB, DateFilterContentPB, DateFilterPB, FieldType,
NumberFilterPB, SelectOptionFilterPB, TextFilterPB,
};
use crate::services::field::SelectOptionIds;
use crate::services::filter::FilterType;
@ -169,7 +169,11 @@ impl TryInto<AlterFilterParams> for AlterFilterPayloadPB {
.0;
let filter_id = match self.filter_id {
None => None,
Some(filter_id) => Some(NotEmptyStr::parse(filter_id).map_err(|_| ErrorCode::FilterIdIsEmpty)?.0),
Some(filter_id) => Some(
NotEmptyStr::parse(filter_id)
.map_err(|_| ErrorCode::FilterIdIsEmpty)?
.0,
),
};
let condition;
let mut content = "".to_string();
@ -180,16 +184,16 @@ impl TryInto<AlterFilterParams> for AlterFilterPayloadPB {
let filter = TextFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
}
},
FieldType::Checkbox => {
let filter = CheckboxFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
}
},
FieldType::Number => {
let filter = NumberFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = filter.content;
}
},
FieldType::DateTime => {
let filter = DateFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
@ -199,12 +203,12 @@ impl TryInto<AlterFilterParams> for AlterFilterPayloadPB {
timestamp: filter.timestamp,
}
.to_string();
}
},
FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => {
let filter = SelectOptionFilterPB::try_from(bytes).map_err(|_| ErrorCode::ProtobufSerde)?;
condition = filter.condition as u8;
content = SelectOptionIds::from(filter.option_ids).to_string();
}
},
}
Ok(AlterFilterParams {

View File

@ -133,11 +133,16 @@ impl TryInto<MoveGroupRowParams> for MoveGroupRowPayloadPB {
fn try_into(self) -> Result<MoveGroupRowParams, Self::Error> {
let view_id = NotEmptyStr::parse(self.view_id).map_err(|_| ErrorCode::DatabaseViewIdIsEmpty)?;
let from_row_id = NotEmptyStr::parse(self.from_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
let to_group_id = NotEmptyStr::parse(self.to_group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let to_group_id =
NotEmptyStr::parse(self.to_group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let to_row_id = match self.to_row_id {
None => None,
Some(to_row_id) => Some(NotEmptyStr::parse(to_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?.0),
Some(to_row_id) => Some(
NotEmptyStr::parse(to_row_id)
.map_err(|_| ErrorCode::RowIdIsEmpty)?
.0,
),
};
Ok(MoveGroupRowParams {

View File

@ -19,7 +19,9 @@ pub struct SelectOptionGroupConfigurationPB {
hide_empty: bool,
}
impl std::convert::From<SelectOptionGroupConfigurationRevision> for SelectOptionGroupConfigurationPB {
impl std::convert::From<SelectOptionGroupConfigurationRevision>
for SelectOptionGroupConfigurationPB
{
fn from(rev: SelectOptionGroupConfigurationRevision) -> Self {
Self {
hide_empty: rev.hide_empty,

View File

@ -23,11 +23,16 @@ impl TryInto<CreateRowParams> for CreateBoardCardPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let group_id = NotEmptyStr::parse(self.group_id).map_err(|_| ErrorCode::GroupIdIsEmpty)?;
let start_row_id = match self.start_row_id {
None => None,
Some(start_row_id) => Some(NotEmptyStr::parse(start_row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?.0),
Some(start_row_id) => Some(
NotEmptyStr::parse(start_row_id)
.map_err(|_| ErrorCode::RowIdIsEmpty)?
.0,
),
};
Ok(CreateRowParams {
database_id: database_id.0,

View File

@ -151,7 +151,8 @@ impl TryInto<RowIdParams> for RowIdPB {
type Error = ErrorCode;
fn try_into(self) -> Result<RowIdParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let row_id = NotEmptyStr::parse(self.row_id).map_err(|_| ErrorCode::RowIdIsEmpty)?;
Ok(RowIdParams {
@ -191,7 +192,8 @@ impl TryInto<CreateRowParams> for CreateRowPayloadPB {
type Error = ErrorCode;
fn try_into(self) -> Result<CreateRowParams, Self::Error> {
let database_id = NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
let database_id =
NotEmptyStr::parse(self.database_id).map_err(|_| ErrorCode::DatabaseIdIsEmpty)?;
Ok(CreateRowParams {
database_id: database_id.0,

View File

@ -1,8 +1,9 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{
AlterFilterParams, AlterFilterPayloadPB, AlterSortParams, AlterSortPayloadPB, DeleteFilterParams,
DeleteFilterPayloadPB, DeleteGroupParams, DeleteGroupPayloadPB, DeleteSortParams, DeleteSortPayloadPB,
InsertGroupParams, InsertGroupPayloadPB, RepeatedFilterPB, RepeatedGroupConfigurationPB, RepeatedSortPB,
DeleteFilterPayloadPB, DeleteGroupParams, DeleteGroupPayloadPB, DeleteSortParams,
DeleteSortPayloadPB, InsertGroupParams, InsertGroupPayloadPB, RepeatedFilterPB,
RepeatedGroupConfigurationPB, RepeatedSortPB,
};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::ErrorCode;

View File

@ -107,7 +107,11 @@ impl TryInto<AlterSortParams> for AlterSortPayloadPB {
let sort_id = match self.sort_id {
None => None,
Some(sort_id) => Some(NotEmptyStr::parse(sort_id).map_err(|_| ErrorCode::SortIdIsEmpty)?.0),
Some(sort_id) => Some(
NotEmptyStr::parse(sort_id)
.map_err(|_| ErrorCode::SortIdIsEmpty)?
.0,
),
};
Ok(AlterSortParams {

View File

@ -53,7 +53,11 @@ impl ViewRowsChangesetPB {
}
}
pub fn from_move(view_id: String, deleted_rows: Vec<String>, inserted_rows: Vec<InsertedRowPB>) -> Self {
pub fn from_move(
view_id: String,
deleted_rows: Vec<String>,
inserted_rows: Vec<InsertedRowPB>,
) -> Self {
Self {
view_id,
inserted_rows,

View File

@ -2,10 +2,10 @@ use crate::entities::*;
use crate::manager::DatabaseManager;
use crate::services::cell::{FromCellString, ToCellChangesetString, TypeCellData};
use crate::services::field::{
default_type_option_builder_from_type, select_type_option_from_field_rev, type_option_builder_from_json_str,
DateCellChangeset, DateChangesetPB, SelectOptionCellChangeset, SelectOptionCellChangesetPB,
SelectOptionCellChangesetParams, SelectOptionCellDataPB, SelectOptionChangeset, SelectOptionChangesetPB,
SelectOptionIds, SelectOptionPB,
default_type_option_builder_from_type, select_type_option_from_field_rev,
type_option_builder_from_json_str, DateCellChangeset, DateChangesetPB, SelectOptionCellChangeset,
SelectOptionCellChangesetPB, SelectOptionCellChangesetParams, SelectOptionCellDataPB,
SelectOptionChangeset, SelectOptionChangesetPB, SelectOptionIds, SelectOptionPB,
};
use crate::services::row::make_row_from_row_rev;
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
@ -113,7 +113,11 @@ pub(crate) async fn get_fields_handler(
let params: GetFieldParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let field_revs = editor.get_field_revs(params.field_ids).await?;
let repeated_field: RepeatedFieldPB = field_revs.into_iter().map(FieldPB::from).collect::<Vec<_>>().into();
let repeated_field: RepeatedFieldPB = field_revs
.into_iter()
.map(FieldPB::from)
.collect::<Vec<_>>()
.into();
data_result(repeated_field)
}
@ -179,7 +183,12 @@ pub(crate) async fn switch_to_field_handler(
// Update the type-option data after the field type has been changed
let type_option_data = get_type_option_data(&new_field_rev, &params.field_type).await?;
editor
.update_field_type_option(&params.database_id, &new_field_rev.id, type_option_data, old_field_rev)
.update_field_type_option(
&params.database_id,
&new_field_rev.id,
type_option_data,
old_field_rev,
)
.await?;
Ok(())
@ -215,7 +224,7 @@ pub(crate) async fn get_field_type_option_data_handler(
type_option_data,
};
data_result(data)
}
},
}
}
@ -252,7 +261,10 @@ pub(crate) async fn move_field_handler(
}
/// The [FieldRevision] contains multiple data, each of them belongs to a specific FieldType.
async fn get_type_option_data(field_rev: &FieldRevision, field_type: &FieldType) -> FlowyResult<Vec<u8>> {
async fn get_type_option_data(
field_rev: &FieldRevision,
field_type: &FieldType,
) -> FlowyResult<Vec<u8>> {
let s = field_rev
.get_type_option_str(field_type)
.map(|value| value.to_owned())
@ -275,7 +287,10 @@ pub(crate) async fn get_row_handler(
) -> DataResult<OptionalRowPB, FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.database_id).await?;
let row = editor.get_row_rev(&params.row_id).await?.map(make_row_from_row_rev);
let row = editor
.get_row_rev(&params.row_id)
.await?
.map(make_row_from_row_rev);
data_result(OptionalRowPB { row })
}
@ -319,7 +334,9 @@ pub(crate) async fn create_table_row_handler(
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.database_id.as_ref()).await?;
let editor = manager
.get_database_editor(params.database_id.as_ref())
.await?;
let row = editor.create_row(params).await?;
data_result(row)
}
@ -345,7 +362,11 @@ pub(crate) async fn update_cell_handler(
let changeset: CellChangesetPB = data.into_inner();
let editor = manager.get_database_editor(&changeset.database_id).await?;
editor
.update_cell_with_changeset(&changeset.row_id, &changeset.field_id, changeset.type_cell_data)
.update_cell_with_changeset(
&changeset.row_id,
&changeset.field_id,
changeset.type_cell_data,
)
.await?;
Ok(())
}
@ -363,7 +384,7 @@ pub(crate) async fn new_select_option_handler(
let type_option = select_type_option_from_field_rev(&field_rev)?;
let select_option = type_option.create_option(&params.option_name);
data_result(select_option)
}
},
}
}
@ -373,7 +394,9 @@ pub(crate) async fn update_select_option_handler(
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let changeset: SelectOptionChangeset = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&changeset.cell_path.database_id).await?;
let editor = manager
.get_database_editor(&changeset.cell_path.database_id)
.await?;
let field_id = changeset.cell_path.field_id.clone();
let (tx, rx) = tokio::sync::oneshot::channel();
editor
@ -383,8 +406,9 @@ pub(crate) async fn update_select_option_handler(
let mut is_changed = None;
for option in changeset.insert_options {
cell_changeset_str =
Some(SelectOptionCellChangeset::from_insert_option_id(&option.id).to_cell_changeset_str());
cell_changeset_str = Some(
SelectOptionCellChangeset::from_insert_option_id(&option.id).to_cell_changeset_str(),
);
type_option.insert_option(option);
is_changed = Some(());
}
@ -395,8 +419,9 @@ pub(crate) async fn update_select_option_handler(
}
for option in changeset.delete_options {
cell_changeset_str =
Some(SelectOptionCellChangeset::from_delete_option_id(&option.id).to_cell_changeset_str());
cell_changeset_str = Some(
SelectOptionCellChangeset::from_delete_option_id(&option.id).to_cell_changeset_str(),
);
type_option.delete_option(option);
is_changed = Some(());
}
@ -418,7 +443,7 @@ pub(crate) async fn update_select_option_handler(
)
.await
{
Ok(_) => {}
Ok(_) => {},
Err(e) => tracing::error!("{}", e),
}
}
@ -434,12 +459,17 @@ pub(crate) async fn get_select_option_handler(
let editor = manager.get_database_editor(&params.database_id).await?;
match editor.get_field_rev(&params.field_id).await {
None => {
tracing::error!("Can't find the select option field with id: {}", params.field_id);
tracing::error!(
"Can't find the select option field with id: {}",
params.field_id
);
data_result(SelectOptionCellDataPB::default())
}
},
Some(field_rev) => {
//
let cell_rev = editor.get_cell_rev(&params.row_id, &params.field_id).await?;
let cell_rev = editor
.get_cell_rev(&params.row_id, &params.field_id)
.await?;
let type_option = select_type_option_from_field_rev(&field_rev)?;
let type_cell_data: TypeCellData = match cell_rev {
None => TypeCellData {
@ -451,7 +481,7 @@ pub(crate) async fn get_select_option_handler(
let ids = SelectOptionIds::from_cell_str(&type_cell_data.cell_str)?;
let selected_options = type_option.get_selected_options(ids);
data_result(selected_options)
}
},
}
}
@ -461,7 +491,9 @@ pub(crate) async fn update_select_option_cell_handler(
manager: AFPluginState<Arc<DatabaseManager>>,
) -> Result<(), FlowyError> {
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(&params.cell_identifier.database_id).await?;
let editor = manager
.get_database_editor(&params.cell_identifier.database_id)
.await?;
let changeset = SelectOptionCellChangeset {
insert_option_ids: params.insert_option_ids,
delete_option_ids: params.delete_option_ids,
@ -514,7 +546,9 @@ pub(crate) async fn create_board_card_handler(
manager: AFPluginState<Arc<DatabaseManager>>,
) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?;
let editor = manager.get_database_editor(params.database_id.as_ref()).await?;
let editor = manager
.get_database_editor(params.database_id.as_ref())
.await?;
let row = editor.create_row(params).await?;
data_result(row)
}

View File

@ -6,7 +6,9 @@ use std::sync::Arc;
use strum_macros::Display;
pub fn init(database_manager: Arc<DatabaseManager>) -> AFPlugin {
let mut plugin = AFPlugin::new().name(env!("CARGO_PKG_NAME")).state(database_manager);
let mut plugin = AFPlugin::new()
.name(env!("CARGO_PKG_NAME"))
.state(database_manager);
plugin = plugin
.event(DatabaseEvent::GetDatabase, get_database_data_handler)
// .event(GridEvent::GetGridBlocks, get_grid_blocks_handler)

View File

@ -59,7 +59,7 @@ macro_rules! impl_type_option {
Err(e) => {
tracing::error!("Field type data serialize to json fail, error: {:?}", e);
serde_json::to_string(&$target::default()).unwrap()
}
},
}
}
@ -80,7 +80,7 @@ macro_rules! impl_type_option {
err
);
$target::default()
}
},
}
}

View File

@ -15,7 +15,9 @@ use flowy_client_sync::client_database::{
make_database_block_operations, make_database_operations, make_grid_view_operations,
};
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket};
use flowy_revision::{
RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration, RevisionWebSocket,
};
use flowy_sqlite::ConnectionPool;
use grid_model::{BuildDatabaseContext, DatabaseRevision, DatabaseViewRevision};
use lib_infra::async_trait::async_trait;
@ -73,7 +75,11 @@ impl DatabaseManager {
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database<T: AsRef<str>>(&self, database_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
pub async fn create_database<T: AsRef<str>>(
&self,
database_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let database_id = database_id.as_ref();
let db_pool = self.database_user.db_pool()?;
let rev_manager = self.make_database_rev_manager(database_id, db_pool)?;
@ -83,7 +89,11 @@ impl DatabaseManager {
}
#[tracing::instrument(level = "debug", skip_all, err)]
async fn create_database_view<T: AsRef<str>>(&self, view_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
async fn create_database_view<T: AsRef<str>>(
&self,
view_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let rev_manager = make_database_view_rev_manager(&self.database_user, view_id).await?;
rev_manager.reset_object(revisions).await?;
@ -91,14 +101,21 @@ impl DatabaseManager {
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn create_database_block<T: AsRef<str>>(&self, block_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
pub async fn create_database_block<T: AsRef<str>>(
&self,
block_id: T,
revisions: Vec<Revision>,
) -> FlowyResult<()> {
let block_id = block_id.as_ref();
let rev_manager = make_database_block_rev_manager(&self.database_user, block_id)?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
pub async fn open_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
pub async fn open_database<T: AsRef<str>>(
&self,
database_id: T,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let database_id = database_id.as_ref();
let _ = self.migration.run_v1_migration(database_id).await;
self.get_or_create_database_editor(database_id).await
@ -108,12 +125,20 @@ impl DatabaseManager {
pub async fn close_database<T: AsRef<str>>(&self, database_id: T) -> FlowyResult<()> {
let database_id = database_id.as_ref();
tracing::Span::current().record("database_id", database_id);
self.database_editors.write().await.remove(database_id).await;
self
.database_editors
.write()
.await
.remove(database_id)
.await;
Ok(())
}
// #[tracing::instrument(level = "debug", skip(self), err)]
pub async fn get_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
pub async fn get_database_editor(
&self,
database_id: &str,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
let read_guard = self.database_editors.read().await;
let editor = read_guard.get(database_id);
match editor {
@ -121,12 +146,15 @@ impl DatabaseManager {
// Drop the read_guard ASAP in case of the following read/write lock
drop(read_guard);
self.open_database(database_id).await
}
},
Some(editor) => Ok(editor),
}
}
async fn get_or_create_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
async fn get_or_create_database_editor(
&self,
database_id: &str,
) -> FlowyResult<Arc<DatabaseRevisionEditor>> {
if let Some(editor) = self.database_editors.read().await.get(database_id) {
return Ok(editor);
}
@ -150,7 +178,9 @@ impl DatabaseManager {
let cloud = Arc::new(GridRevisionCloudService::new(token));
let mut rev_manager = self.make_database_rev_manager(database_id, pool.clone())?;
let database_pad = Arc::new(RwLock::new(
rev_manager.initialize::<GridRevisionSerde>(Some(cloud)).await?,
rev_manager
.initialize::<GridRevisionSerde>(Some(cloud))
.await?,
));
let database_editor = DatabaseRevisionEditor::new(
database_id,
@ -175,11 +205,13 @@ impl DatabaseManager {
// Create revision persistence
let disk_cache = SQLiteDatabaseRevisionPersistence::new(&user_id, pool.clone());
let configuration = RevisionPersistenceConfiguration::new(6, false);
let rev_persistence = RevisionPersistence::new(&user_id, database_id, disk_cache, configuration);
let rev_persistence =
RevisionPersistence::new(&user_id, database_id, disk_cache, configuration);
// Create snapshot persistence
let snapshot_object_id = format!("grid:{}", database_id);
let snapshot_persistence = SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let snapshot_persistence =
SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = GridRevisionMergeable();
let rev_manager = RevisionManager::new(
@ -211,7 +243,9 @@ pub async fn make_database_view_data(
let block_id = &block_meta_data.block_id;
// Indexing the block's rows
block_meta_data.rows.iter().for_each(|row| {
let _ = database_manager.block_index_cache.insert(&row.block_id, &row.id);
let _ = database_manager
.block_index_cache
.insert(&row.block_id, &row.id);
});
// Create grid's block
@ -231,7 +265,9 @@ pub async fn make_database_view_data(
let grid_rev_delta = make_database_operations(&grid_rev);
let grid_rev_delta_bytes = grid_rev_delta.json_bytes();
let revision = Revision::initial_revision(&grid_id, grid_rev_delta_bytes.clone());
database_manager.create_database(&grid_id, vec![revision]).await?;
database_manager
.create_database(&grid_id, vec![revision])
.await?;
// Create grid view
let grid_view = if grid_view_revision_data.is_empty() {
@ -242,7 +278,9 @@ pub async fn make_database_view_data(
let grid_view_delta = make_grid_view_operations(&grid_view);
let grid_view_delta_bytes = grid_view_delta.json_bytes();
let revision = Revision::initial_revision(view_id, grid_view_delta_bytes);
database_manager.create_database_view(view_id, vec![revision]).await?;
database_manager
.create_database_view(view_id, vec![revision])
.await?;
Ok(grid_rev_delta_bytes)
}

View File

@ -4,7 +4,8 @@ use flowy_client_sync::client_database::{GridBlockRevisionChangeset, GridBlockRe
use flowy_client_sync::make_operations_from_revisions;
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionObjectSerializer,
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer,
RevisionObjectSerializer,
};
use flowy_sqlite::ConnectionPool;
use grid_model::{CellRevision, DatabaseBlockRevision, RowChangeset, RowRevision};
@ -66,10 +67,11 @@ impl DatabaseBlockRevisionEditor {
) -> FlowyResult<(i32, Option<i32>)> {
let mut row_count = 0;
let mut row_index = None;
self.modify(|block_pad| {
self
.modify(|block_pad| {
if let Some(start_row_id) = prev_row_id.as_ref() {
match block_pad.index_of_row(start_row_id) {
None => {}
None => {},
Some(index) => row_index = Some(index as i32 + 1),
}
}
@ -89,7 +91,8 @@ impl DatabaseBlockRevisionEditor {
pub async fn delete_rows(&self, ids: Vec<Cow<'_, String>>) -> FlowyResult<i32> {
let mut row_count = 0;
self.modify(|block_pad| {
self
.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
@ -99,12 +102,15 @@ impl DatabaseBlockRevisionEditor {
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
self.modify(|block_pad| Ok(block_pad.update_row(changeset)?)).await?;
self
.modify(|block_pad| Ok(block_pad.update_row(changeset)?))
.await?;
Ok(())
}
pub async fn move_row(&self, row_id: &str, from: usize, to: usize) -> FlowyResult<()> {
self.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
self
.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
.await?;
Ok(())
}
@ -131,12 +137,15 @@ impl DatabaseBlockRevisionEditor {
Err(err) => {
tracing::error!("Read row revision failed with: {}", err);
Ok(None)
}
},
}
}
}
pub async fn get_row_revs<T>(&self, row_ids: Option<Vec<Cow<'_, T>>>) -> FlowyResult<Vec<Arc<RowRevision>>>
pub async fn get_row_revs<T>(
&self,
row_ids: Option<Vec<Cow<'_, T>>>,
) -> FlowyResult<Vec<Arc<RowRevision>>>
where
T: AsRef<str> + ToOwned + ?Sized,
{
@ -155,21 +164,26 @@ impl DatabaseBlockRevisionEditor {
async fn modify<F>(&self, f: F) -> FlowyResult<()>
where
F: for<'a> FnOnce(&'a mut GridBlockRevisionPad) -> FlowyResult<Option<GridBlockRevisionChangeset>>,
F: for<'a> FnOnce(
&'a mut GridBlockRevisionPad,
) -> FlowyResult<Option<GridBlockRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
let changeset = f(&mut write_guard)?;
match changeset {
None => {}
None => {},
Some(changeset) => {
self.apply_change(changeset).await?;
}
},
}
Ok(())
}
async fn apply_change(&self, change: GridBlockRevisionChangeset) -> FlowyResult<()> {
let GridBlockRevisionChangeset { operations: delta, md5 } = change;
let GridBlockRevisionChangeset {
operations: delta,
md5,
} = change;
let data = delta.json_bytes();
let _ = self.rev_manager.add_local_revision(data, md5).await?;
Ok(())
@ -183,7 +197,11 @@ struct GridBlockRevisionCloudService {
impl RevisionCloudService for GridBlockRevisionCloudService {
#[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(&self, _user_id: &str, _object_id: &str) -> FutureResult<Vec<Revision>, FlowyError> {
fn fetch_object(
&self,
_user_id: &str,
_object_id: &str,
) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
}

View File

@ -11,7 +11,9 @@ use dashmap::DashMap;
use flowy_error::FlowyResult;
use flowy_revision::{RevisionManager, RevisionPersistence, RevisionPersistenceConfiguration};
use flowy_sqlite::ConnectionPool;
use grid_model::{GridBlockMetaRevision, GridBlockMetaRevisionChangeset, RowChangeset, RowRevision};
use grid_model::{
GridBlockMetaRevision, GridBlockMetaRevisionChangeset, RowChangeset, RowRevision,
};
use std::borrow::Cow;
use std::collections::HashMap;
use std::sync::Arc;
@ -71,26 +73,41 @@ impl DatabaseBlockManager {
}
// #[tracing::instrument(level = "trace", skip(self))]
pub(crate) async fn get_block_editor(&self, block_id: &str) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
pub(crate) async fn get_block_editor(
&self,
block_id: &str,
) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
debug_assert!(!block_id.is_empty());
match self.block_editors.get(block_id) {
None => {
tracing::error!("This is a fatal error, block with id:{} is not exist", block_id);
tracing::error!(
"This is a fatal error, block with id:{} is not exist",
block_id
);
let editor = Arc::new(make_database_block_editor(&self.user, block_id).await?);
self.block_editors.insert(block_id.to_owned(), editor.clone());
self
.block_editors
.insert(block_id.to_owned(), editor.clone());
Ok(editor)
}
},
Some(editor) => Ok(editor.clone()),
}
}
pub(crate) async fn get_editor_from_row_id(&self, row_id: &str) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
pub(crate) async fn get_editor_from_row_id(
&self,
row_id: &str,
) -> FlowyResult<Arc<DatabaseBlockRevisionEditor>> {
let block_id = self.persistence.get_block_id(row_id)?;
self.get_block_editor(&block_id).await
}
#[tracing::instrument(level = "trace", skip(self, start_row_id), err)]
pub(crate) async fn create_row(&self, row_rev: RowRevision, start_row_id: Option<String>) -> FlowyResult<i32> {
pub(crate) async fn create_row(
&self,
row_rev: RowRevision,
start_row_id: Option<String>,
) -> FlowyResult<i32> {
let block_id = row_rev.block_id.clone();
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let editor = self.get_block_editor(&row_rev.block_id).await?;
@ -134,9 +151,16 @@ impl DatabaseBlockManager {
let editor = self.get_editor_from_row_id(&changeset.row_id).await?;
editor.update_row(changeset.clone()).await?;
match editor.get_row_rev(&changeset.row_id).await? {
None => tracing::error!("Update row failed, can't find the row with id: {}", changeset.row_id),
None => tracing::error!(
"Update row failed, can't find the row with id: {}",
changeset.row_id
),
Some((_, row_rev)) => {
let changed_field_ids = changeset.cell_by_field_id.keys().cloned().collect::<Vec<String>>();
let changed_field_ids = changeset
.cell_by_field_id
.keys()
.cloned()
.collect::<Vec<String>>();
let row = UpdatedRowPB {
row: make_row_from_row_rev(row_rev),
field_ids: changed_field_ids,
@ -146,7 +170,7 @@ impl DatabaseBlockManager {
block_id: editor.block_id.clone(),
row,
});
}
},
}
Ok(())
}
@ -166,7 +190,7 @@ impl DatabaseBlockManager {
});
Ok(Some(row_rev))
}
},
}
}
@ -190,7 +214,12 @@ impl DatabaseBlockManager {
Ok(changesets)
}
// This function will be moved to GridViewRevisionEditor
pub(crate) async fn move_row(&self, row_rev: Arc<RowRevision>, from: usize, to: usize) -> FlowyResult<()> {
pub(crate) async fn move_row(
&self,
row_rev: Arc<RowRevision>,
from: usize,
to: usize,
) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&row_rev.id).await?;
editor.move_row(&row_rev.id, from, to).await?;
@ -253,14 +282,14 @@ impl DatabaseBlockManager {
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
}
},
Some(block_ids) => {
for block_id in block_ids {
let editor = self.get_block_editor(&block_id).await?;
let row_revs = editor.get_row_revs::<&str>(None).await?;
blocks.push(DatabaseBlockRowRevision { block_id, row_revs });
}
}
},
}
Ok(blocks)
}
@ -311,9 +340,16 @@ pub fn make_database_block_rev_manager(
// Create snapshot persistence
let snapshot_object_id = format!("grid_block:{}", block_id);
let snapshot_persistence = SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let snapshot_persistence =
SQLiteDatabaseRevisionSnapshotPersistence::new(&snapshot_object_id, pool);
let rev_compress = GridBlockRevisionMergeable();
let rev_manager = RevisionManager::new(&user_id, block_id, rev_persistence, rev_compress, snapshot_persistence);
let rev_manager = RevisionManager::new(
&user_id,
block_id,
rev_persistence,
rev_compress,
snapshot_persistence,
);
Ok(rev_manager)
}

View File

@ -26,7 +26,10 @@ where
where
T: 'static + Send + Sync,
{
self.0.insert(key.clone(), TypeValue::new(val)).and_then(downcast_owned)
self
.0
.insert(key.clone(), TypeValue::new(val))
.and_then(downcast_owned)
}
pub fn remove(&mut self, key: &TypeValueKey) {
@ -44,14 +47,18 @@ where
where
T: 'static + Send + Sync,
{
self.0.get(key).and_then(|type_value| type_value.boxed.downcast_ref())
self
.0
.get(key)
.and_then(|type_value| type_value.boxed.downcast_ref())
}
pub fn get_mut<T>(&mut self, key: &TypeValueKey) -> Option<&mut T>
where
T: 'static + Send + Sync,
{
self.0
self
.0
.get_mut(key)
.and_then(|type_value| type_value.boxed.downcast_mut())
}

View File

@ -84,7 +84,10 @@ pub fn get_type_cell_protobuf<T: TryInto<TypeCellData, Error = FlowyError> + Deb
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData { cell_str, field_type } = type_cell_data;
let TypeCellData {
cell_str,
field_type,
} = type_cell_data;
match try_decode_cell_str_to_cell_protobuf(
cell_str,
&field_type,
@ -96,15 +99,15 @@ pub fn get_type_cell_protobuf<T: TryInto<TypeCellData, Error = FlowyError> + Deb
Err(e) => {
tracing::error!("Decode cell data failed, {:?}", e);
(field_type, CellProtobufBlob::default())
},
}
}
}
},
Err(_err) => {
// It's okay to ignore this error, because it's okay that the current cell can't
// display the existing cell data. For example, the UI of the text cell will be blank if
// the type of the data of cell is Number.
(to_field_type, CellProtobufBlob::default())
}
},
}
}
@ -120,9 +123,18 @@ where
let to_field_type = field_rev.ty.into();
match data.try_into() {
Ok(type_cell_data) => {
let TypeCellData { cell_str, field_type } = type_cell_data;
try_decode_cell_str_to_cell_data(cell_str, &field_type, &to_field_type, field_rev, cell_data_cache)
}
let TypeCellData {
cell_str,
field_type,
} = type_cell_data;
try_decode_cell_str_to_cell_data(
cell_str,
&field_type,
&to_field_type,
field_rev,
cell_data_cache,
)
},
Err(_err) => None,
}
}
@ -192,7 +204,9 @@ pub fn stringify_cell_data(
field_type: &FieldType,
field_rev: &FieldRevision,
) -> String {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, None).get_type_option_cell_data_handler(field_type) {
match TypeOptionCellExt::new_with_cell_data_cache(field_rev, None)
.get_type_option_cell_data_handler(field_type)
{
None => "".to_string(),
Some(handler) => handler.stringify_cell_str(cell_str, decoded_field_type, field_rev),
}
@ -234,14 +248,22 @@ pub fn insert_date_cell(timestamp: i64, field_rev: &FieldRevision) -> CellRevisi
CellRevision::new(data)
}
pub fn insert_select_option_cell(option_ids: Vec<String>, field_rev: &FieldRevision) -> CellRevision {
let changeset = SelectOptionCellChangeset::from_insert_options(option_ids).to_cell_changeset_str();
pub fn insert_select_option_cell(
option_ids: Vec<String>,
field_rev: &FieldRevision,
) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_insert_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
}
pub fn delete_select_option_cell(option_ids: Vec<String>, field_rev: &FieldRevision) -> CellRevision {
let changeset = SelectOptionCellChangeset::from_delete_options(option_ids).to_cell_changeset_str();
pub fn delete_select_option_cell(
option_ids: Vec<String>,
field_rev: &FieldRevision,
) -> CellRevision {
let changeset =
SelectOptionCellChangeset::from_delete_options(option_ids).to_cell_changeset_str();
let data = apply_cell_data_changeset(changeset, None, field_rev, None).unwrap();
CellRevision::new(data)
}
@ -301,7 +323,7 @@ where
Err(e) => {
tracing::error!("Deserialize CellDataChangeset failed: {}", e);
AnyCellChangeset(None)
}
},
}
}
}

View File

@ -74,7 +74,10 @@ impl std::convert::TryFrom<CellRevision> for TypeCellData {
impl TypeCellData {
pub fn new(cell_str: String, field_type: FieldType) -> Self {
TypeCellData { cell_str, field_type }
TypeCellData {
cell_str,
field_type,
}
}
pub fn to_json(&self) -> String {
@ -192,7 +195,7 @@ impl ToString for CellProtobufBlob {
Err(e) => {
tracing::error!("DecodedCellData to string failed: {:?}", e);
"".to_string()
}
},
}
}
}

View File

@ -26,7 +26,10 @@ pub fn default_type_option_builder_from_type(field_type: &FieldType) -> Box<dyn
type_option_builder_from_json_str(&s, field_type)
}
pub fn type_option_builder_from_json_str(s: &str, field_type: &FieldType) -> Box<dyn TypeOptionBuilder> {
pub fn type_option_builder_from_json_str(
s: &str,
field_type: &FieldType,
) -> Box<dyn TypeOptionBuilder> {
match field_type {
FieldType::RichText => Box::new(RichTextTypeOptionBuilder::from_json_str(s)),
FieldType::Number => Box::new(NumberTypeOptionBuilder::from_json_str(s)),
@ -39,7 +42,10 @@ pub fn type_option_builder_from_json_str(s: &str, field_type: &FieldType) -> Box
}
}
pub fn type_option_builder_from_bytes<T: Into<Bytes>>(bytes: T, field_type: &FieldType) -> Box<dyn TypeOptionBuilder> {
pub fn type_option_builder_from_bytes<T: Into<Bytes>>(
bytes: T,
field_type: &FieldType,
) -> Box<dyn TypeOptionBuilder> {
let bytes = bytes.into();
match field_type {
FieldType::RichText => Box::new(RichTextTypeOptionBuilder::from_protobuf_bytes(bytes)),

View File

@ -22,7 +22,12 @@ mod tests {
let checkbox_filter = CheckboxFilterPB {
condition: CheckboxFilterConditionPB::IsChecked,
};
for (value, visible) in [("true", true), ("yes", true), ("false", false), ("no", false)] {
for (value, visible) in [
("true", true),
("yes", true),
("false", false),
("no", false),
] {
let data = CheckboxCellData::from_str(value).unwrap();
assert_eq!(checkbox_filter.is_visible(&data), visible);
}
@ -33,7 +38,12 @@ mod tests {
let checkbox_filter = CheckboxFilterPB {
condition: CheckboxFilterConditionPB::IsUnChecked,
};
for (value, visible) in [("false", true), ("no", true), ("true", false), ("yes", false)] {
for (value, visible) in [
("false", true),
("no", true),
("true", false),
("yes", false),
] {
let data = CheckboxCellData::from_str(value).unwrap();
assert_eq!(checkbox_filter.is_visible(&data), visible);
}

View File

@ -2,8 +2,8 @@ use crate::entities::{CheckboxFilterPB, FieldType};
use crate::impl_type_option;
use crate::services::cell::{CellDataChangeset, CellDataDecoder, FromCellString, TypeCellData};
use crate::services::field::{
default_order, BoxTypeOptionBuilder, CheckboxCellData, TypeOption, TypeOptionBuilder, TypeOptionCellData,
TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionTransform,
default_order, BoxTypeOptionBuilder, CheckboxCellData, TypeOption, TypeOptionBuilder,
TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionTransform,
};
use bytes::Bytes;
use flowy_derive::ProtoBuf;
@ -54,7 +54,12 @@ impl TypeOptionTransform for CheckboxTypeOptionPB {
true
}
fn transform_type_option(&mut self, _old_type_option_field_type: FieldType, _old_type_option_data: String) {}
fn transform_type_option(
&mut self,
_old_type_option_field_type: FieldType,
_old_type_option_data: String,
) {
}
fn transform_type_option_cell_str(
&self,
@ -74,11 +79,17 @@ impl TypeOptionTransform for CheckboxTypeOptionPB {
}
impl TypeOptionCellData for CheckboxTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
cell_data
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
CheckboxCellData::from_cell_str(&cell_str)
}
}

View File

@ -9,11 +9,11 @@ impl DateFilterPB {
match self.condition {
DateFilterConditionPB::DateIsNotEmpty => {
return true;
}
},
DateFilterConditionPB::DateIsEmpty => {
return false;
}
_ => {}
},
_ => {},
}
let cell_time = NaiveDateTime::from_timestamp_opt(timestamp, 0);
@ -35,7 +35,7 @@ impl DateFilterPB {
let end_date = end_time.map(|time| time.date());
cell_date >= start_date && cell_date <= end_date
}
},
Some(timestamp) => {
let expected_timestamp = NaiveDateTime::from_timestamp_opt(timestamp, 0);
let expected_date = expected_timestamp.map(|time| time.date());
@ -49,9 +49,9 @@ impl DateFilterPB {
DateFilterConditionPB::DateOnOrAfter => cell_date >= expected_date,
_ => true,
}
},
}
}
}
},
}
}
}

View File

@ -19,16 +19,16 @@ mod tests {
match date_format {
DateFormat::Friendly => {
assert_date(&type_option, 1647251762, None, "Mar 14,2022", &field_rev);
}
},
DateFormat::US => {
assert_date(&type_option, 1647251762, None, "2022/03/14", &field_rev);
}
},
DateFormat::ISO => {
assert_date(&type_option, 1647251762, None, "2022-03-14", &field_rev);
}
},
DateFormat::Local => {
assert_date(&type_option, 1647251762, None, "03/14/2022", &field_rev);
}
},
}
}
}
@ -52,7 +52,7 @@ mod tests {
"May 27,2022 23:00",
&field_rev,
);
}
},
TimeFormat::TwelveHour => {
assert_date(&type_option, 1653609600, None, "May 27,2022", &field_rev);
assert_date(
@ -62,7 +62,7 @@ mod tests {
"May 27,2022 11:23 PM",
&field_rev,
);
}
},
}
}
}
@ -97,7 +97,13 @@ mod tests {
type_option.include_time = true;
let field_rev = FieldBuilder::from_field_type(&FieldType::DateTime).build();
assert_date(&type_option, 1653609600, Some("".to_owned()), "May 27,2022", &field_rev);
assert_date(
&type_option,
1653609600,
Some("".to_owned()),
"May 27,2022",
&field_rev,
);
}
/// The default time format is TwentyFourHour, so the include_time_str in twelve_hours_format will cause parser error.
@ -135,7 +141,10 @@ mod tests {
// Mon Mar 14 2022 17:56:02 GMT+0800 (China Standard Time)
let gmt_8_offset = FixedOffset::east_opt(8 * 3600).unwrap();
let china_local = chrono::DateTime::<chrono::Local>::from_utc(native, gmt_8_offset);
let china_local_time = format!("{}", china_local.format_with_items(StrftimeItems::new(&format)));
let china_local_time = format!(
"{}",
china_local.format_with_items(StrftimeItems::new(&format))
);
assert_eq!(china_local_time, "03/14/2022 05:56 PM");
}
@ -160,7 +169,11 @@ mod tests {
);
}
fn decode_cell_data(cell_str: String, type_option: &DateTypeOptionPB, field_rev: &FieldRevision) -> String {
fn decode_cell_data(
cell_str: String,
type_option: &DateTypeOptionPB,
field_rev: &FieldRevision,
) -> String {
let decoded_data = type_option
.decode_cell_str(cell_str, &FieldType::DateTime, field_rev)
.unwrap();

View File

@ -2,9 +2,9 @@ use crate::entities::{DateFilterPB, FieldType};
use crate::impl_type_option;
use crate::services::cell::{CellDataChangeset, CellDataDecoder, FromCellString, TypeCellData};
use crate::services::field::{
default_order, BoxTypeOptionBuilder, DateCellChangeset, DateCellData, DateCellDataPB, DateFormat, TimeFormat,
TypeOption, TypeOptionBuilder, TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter,
TypeOptionTransform,
default_order, BoxTypeOptionBuilder, DateCellChangeset, DateCellData, DateCellDataPB, DateFormat,
TimeFormat, TypeOption, TypeOptionBuilder, TypeOptionCellData, TypeOptionCellDataCompare,
TypeOptionCellDataFilter, TypeOptionTransform,
};
use bytes::Bytes;
use chrono::format::strftime::StrftimeItems;
@ -37,11 +37,17 @@ impl TypeOption for DateTypeOptionPB {
}
impl TypeOptionCellData for DateTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
self.today_desc_from_timestamp(cell_data)
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
DateCellData::from_cell_str(&cell_str)
}
}
@ -72,12 +78,20 @@ impl DateTypeOptionPB {
let mut time = "".to_string();
if has_time && self.include_time {
let fmt = format!("{}{}", self.date_format.format_str(), self.time_format.format_str());
let fmt = format!(
"{}{}",
self.date_format.format_str(),
self.time_format.format_str()
);
time = format!("{}", utc.format_with_items(StrftimeItems::new(&fmt))).replace(&date, "");
}
let timestamp = native.timestamp();
DateCellDataPB { date, time, timestamp }
DateCellDataPB {
date,
time,
timestamp,
}
}
fn date_fmt(&self, time: &Option<String>) -> String {
@ -88,9 +102,13 @@ impl DateTypeOptionPB {
if time_str.is_empty() {
self.date_format.format_str().to_string()
} else {
format!("{} {}", self.date_format.format_str(), self.time_format.format_str())
}
format!(
"{} {}",
self.date_format.format_str(),
self.time_format.format_str()
)
}
},
}
} else {
self.date_format.format_str().to_string()
@ -114,11 +132,11 @@ impl DateTypeOptionPB {
Ok(native) => {
let utc = self.utc_date_time_from_native(native);
Ok(utc.timestamp())
}
},
Err(_e) => {
let msg = format!("Parse {} failed", date_str);
Err(FlowyError::new(ErrorCode::InvalidDateTimeFormat, &msg))
}
},
};
}
}
@ -126,7 +144,10 @@ impl DateTypeOptionPB {
Ok(utc.timestamp())
}
fn utc_date_time_from_native(&self, naive: chrono::NaiveDateTime) -> chrono::DateTime<chrono::Utc> {
fn utc_date_time_from_native(
&self,
naive: chrono::NaiveDateTime,
) -> chrono::DateTime<chrono::Utc> {
chrono::DateTime::<chrono::Utc>::from_utc(naive, chrono::Utc)
}
}
@ -174,7 +195,7 @@ impl CellDataChangeset for DateTypeOptionPB {
} else {
date_timestamp
}
}
},
_ => date_timestamp,
},
};

View File

@ -1,6 +1,7 @@
use crate::entities::CellIdPB;
use crate::services::cell::{
CellProtobufBlobParser, DecodedCellData, FromCellChangesetString, FromCellString, ToCellChangesetString,
CellProtobufBlobParser, DecodedCellData, FromCellChangesetString, FromCellString,
ToCellChangesetString,
};
use bytes::Bytes;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
@ -127,7 +128,7 @@ impl std::convert::From<i32> for DateFormat {
_ => {
tracing::error!("Unsupported date format, fallback to friendly");
DateFormat::Friendly
}
},
}
}
}
@ -147,7 +148,9 @@ impl DateFormat {
}
}
#[derive(Clone, Copy, PartialEq, Eq, EnumIter, Debug, Hash, Serialize, Deserialize, ProtoBuf_Enum)]
#[derive(
Clone, Copy, PartialEq, Eq, EnumIter, Debug, Hash, Serialize, Deserialize, ProtoBuf_Enum,
)]
pub enum TimeFormat {
TwelveHour = 0,
TwentyFourHour = 1,
@ -161,7 +164,7 @@ impl std::convert::From<i32> for TimeFormat {
_ => {
tracing::error!("Unsupported time format, fallback to TwentyFourHour");
TimeFormat::TwentyFourHour
}
},
}
}
}

View File

@ -12,11 +12,11 @@ impl NumberFilterPB {
match self.condition {
NumberFilterConditionPB::NumberIsEmpty => {
return num_cell_data.is_empty();
}
},
NumberFilterConditionPB::NumberIsNotEmpty => {
return !num_cell_data.is_empty();
}
_ => {}
},
_ => {},
}
}
match num_cell_data.decimal().as_ref() {
@ -32,7 +32,7 @@ impl NumberFilterPB {
NumberFilterConditionPB::LessThanOrEqualTo => cell_decimal <= &decimal,
_ => true,
}
}
},
}
}
}

View File

@ -43,70 +43,114 @@ mod tests {
match format {
NumberFormat::Num => {
assert_number(&type_option, "18443", "18443", &field_type, &field_rev);
}
},
NumberFormat::USD => {
assert_number(&type_option, "18443", "$18,443", &field_type, &field_rev);
}
},
NumberFormat::CanadianDollar => {
assert_number(&type_option, "18443", "CA$18,443", &field_type, &field_rev)
}
NumberFormat::EUR => assert_number(&type_option, "18443", "€18.443", &field_type, &field_rev),
NumberFormat::Pound => assert_number(&type_option, "18443", "£18,443", &field_type, &field_rev),
},
NumberFormat::EUR => {
assert_number(&type_option, "18443", "€18.443", &field_type, &field_rev)
},
NumberFormat::Pound => {
assert_number(&type_option, "18443", "£18,443", &field_type, &field_rev)
},
NumberFormat::Yen => {
assert_number(&type_option, "18443", "¥18,443", &field_type, &field_rev);
}
NumberFormat::Ruble => assert_number(&type_option, "18443", "18.443RUB", &field_type, &field_rev),
NumberFormat::Rupee => assert_number(&type_option, "18443", "₹18,443", &field_type, &field_rev),
NumberFormat::Won => assert_number(&type_option, "18443", "₩18,443", &field_type, &field_rev),
},
NumberFormat::Ruble => {
assert_number(&type_option, "18443", "18.443RUB", &field_type, &field_rev)
},
NumberFormat::Rupee => {
assert_number(&type_option, "18443", "₹18,443", &field_type, &field_rev)
},
NumberFormat::Won => {
assert_number(&type_option, "18443", "₩18,443", &field_type, &field_rev)
},
NumberFormat::Yuan => {
assert_number(&type_option, "18443", "CN¥18,443", &field_type, &field_rev);
}
},
NumberFormat::Real => {
assert_number(&type_option, "18443", "R$18,443", &field_type, &field_rev);
}
NumberFormat::Lira => assert_number(&type_option, "18443", "TRY18.443", &field_type, &field_rev),
NumberFormat::Rupiah => assert_number(&type_option, "18443", "IDR18,443", &field_type, &field_rev),
NumberFormat::Franc => assert_number(&type_option, "18443", "CHF18,443", &field_type, &field_rev),
},
NumberFormat::Lira => {
assert_number(&type_option, "18443", "TRY18.443", &field_type, &field_rev)
},
NumberFormat::Rupiah => {
assert_number(&type_option, "18443", "IDR18,443", &field_type, &field_rev)
},
NumberFormat::Franc => {
assert_number(&type_option, "18443", "CHF18,443", &field_type, &field_rev)
},
NumberFormat::HongKongDollar => {
assert_number(&type_option, "18443", "HZ$18,443", &field_type, &field_rev)
}
},
NumberFormat::NewZealandDollar => {
assert_number(&type_option, "18443", "NZ$18,443", &field_type, &field_rev)
}
NumberFormat::Krona => assert_number(&type_option, "18443", "18 443SEK", &field_type, &field_rev),
},
NumberFormat::Krona => {
assert_number(&type_option, "18443", "18 443SEK", &field_type, &field_rev)
},
NumberFormat::NorwegianKrone => {
assert_number(&type_option, "18443", "18,443NOK", &field_type, &field_rev)
}
NumberFormat::MexicanPeso => assert_number(&type_option, "18443", "MX$18,443", &field_type, &field_rev),
NumberFormat::Rand => assert_number(&type_option, "18443", "ZAR18,443", &field_type, &field_rev),
},
NumberFormat::MexicanPeso => {
assert_number(&type_option, "18443", "MX$18,443", &field_type, &field_rev)
},
NumberFormat::Rand => {
assert_number(&type_option, "18443", "ZAR18,443", &field_type, &field_rev)
},
NumberFormat::NewTaiwanDollar => {
assert_number(&type_option, "18443", "NT$18,443", &field_type, &field_rev)
}
NumberFormat::DanishKrone => assert_number(&type_option, "18443", "18.443DKK", &field_type, &field_rev),
NumberFormat::Baht => assert_number(&type_option, "18443", "THB18,443", &field_type, &field_rev),
NumberFormat::Forint => assert_number(&type_option, "18443", "18 443HUF", &field_type, &field_rev),
NumberFormat::Koruna => assert_number(&type_option, "18443", "18 443CZK", &field_type, &field_rev),
NumberFormat::Shekel => assert_number(&type_option, "18443", "18 443Kč", &field_type, &field_rev),
NumberFormat::ChileanPeso => assert_number(&type_option, "18443", "CLP18.443", &field_type, &field_rev),
},
NumberFormat::DanishKrone => {
assert_number(&type_option, "18443", "18.443DKK", &field_type, &field_rev)
},
NumberFormat::Baht => {
assert_number(&type_option, "18443", "THB18,443", &field_type, &field_rev)
},
NumberFormat::Forint => {
assert_number(&type_option, "18443", "18 443HUF", &field_type, &field_rev)
},
NumberFormat::Koruna => {
assert_number(&type_option, "18443", "18 443CZK", &field_type, &field_rev)
},
NumberFormat::Shekel => {
assert_number(&type_option, "18443", "18 443Kč", &field_type, &field_rev)
},
NumberFormat::ChileanPeso => {
assert_number(&type_option, "18443", "CLP18.443", &field_type, &field_rev)
},
NumberFormat::PhilippinePeso => {
assert_number(&type_option, "18443", "₱18,443", &field_type, &field_rev)
}
NumberFormat::Dirham => assert_number(&type_option, "18443", "18,443AED", &field_type, &field_rev),
},
NumberFormat::Dirham => {
assert_number(&type_option, "18443", "18,443AED", &field_type, &field_rev)
},
NumberFormat::ColombianPeso => {
assert_number(&type_option, "18443", "COP18.443", &field_type, &field_rev)
}
NumberFormat::Riyal => assert_number(&type_option, "18443", "SAR18,443", &field_type, &field_rev),
NumberFormat::Ringgit => assert_number(&type_option, "18443", "MYR18,443", &field_type, &field_rev),
NumberFormat::Leu => assert_number(&type_option, "18443", "18.443RON", &field_type, &field_rev),
},
NumberFormat::Riyal => {
assert_number(&type_option, "18443", "SAR18,443", &field_type, &field_rev)
},
NumberFormat::Ringgit => {
assert_number(&type_option, "18443", "MYR18,443", &field_type, &field_rev)
},
NumberFormat::Leu => {
assert_number(&type_option, "18443", "18.443RON", &field_type, &field_rev)
},
NumberFormat::ArgentinePeso => {
assert_number(&type_option, "18443", "ARS18.443", &field_type, &field_rev)
}
},
NumberFormat::UruguayanPeso => {
assert_number(&type_option, "18443", "UYU18.443", &field_type, &field_rev)
}
NumberFormat::Percent => assert_number(&type_option, "18443", "18,443%", &field_type, &field_rev),
},
NumberFormat::Percent => {
assert_number(&type_option, "18443", "18,443%", &field_type, &field_rev)
},
}
}
}
@ -126,217 +170,355 @@ mod tests {
assert_number(&type_option, "0.2", "0.2", &field_type, &field_rev);
assert_number(&type_option, "", "", &field_type, &field_rev);
assert_number(&type_option, "abc", "", &field_type, &field_rev);
}
},
NumberFormat::USD => {
assert_number(&type_option, "$18,44", "$1,844", &field_type, &field_rev);
assert_number(&type_option, "$0.2", "$0.2", &field_type, &field_rev);
assert_number(&type_option, "$1844", "$1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "$1,844", &field_type, &field_rev);
}
},
NumberFormat::CanadianDollar => {
assert_number(&type_option, "CA$18,44", "CA$1,844", &field_type, &field_rev);
assert_number(
&type_option,
"CA$18,44",
"CA$1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "CA$0.2", "CA$0.2", &field_type, &field_rev);
assert_number(&type_option, "CA$1844", "CA$1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "CA$1,844", &field_type, &field_rev);
}
},
NumberFormat::EUR => {
assert_number(&type_option, "€18.44", "€18,44", &field_type, &field_rev);
assert_number(&type_option, "€0.5", "€0,5", &field_type, &field_rev);
assert_number(&type_option, "€1844", "€1.844", &field_type, &field_rev);
assert_number(&type_option, "1844", "€1.844", &field_type, &field_rev);
}
},
NumberFormat::Pound => {
assert_number(&type_option, "£18,44", "£1,844", &field_type, &field_rev);
assert_number(&type_option, "£0.2", "£0.2", &field_type, &field_rev);
assert_number(&type_option, "£1844", "£1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "£1,844", &field_type, &field_rev);
}
},
NumberFormat::Yen => {
assert_number(&type_option, "¥18,44", "¥1,844", &field_type, &field_rev);
assert_number(&type_option, "¥0.2", "¥0.2", &field_type, &field_rev);
assert_number(&type_option, "¥1844", "¥1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "¥1,844", &field_type, &field_rev);
}
},
NumberFormat::Ruble => {
assert_number(&type_option, "RUB18.44", "18,44RUB", &field_type, &field_rev);
assert_number(
&type_option,
"RUB18.44",
"18,44RUB",
&field_type,
&field_rev,
);
assert_number(&type_option, "0.5", "0,5RUB", &field_type, &field_rev);
assert_number(&type_option, "RUB1844", "1.844RUB", &field_type, &field_rev);
assert_number(&type_option, "1844", "1.844RUB", &field_type, &field_rev);
}
},
NumberFormat::Rupee => {
assert_number(&type_option, "₹18,44", "₹1,844", &field_type, &field_rev);
assert_number(&type_option, "₹0.2", "₹0.2", &field_type, &field_rev);
assert_number(&type_option, "₹1844", "₹1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "₹1,844", &field_type, &field_rev);
}
},
NumberFormat::Won => {
assert_number(&type_option, "₩18,44", "₩1,844", &field_type, &field_rev);
assert_number(&type_option, "₩0.3", "₩0", &field_type, &field_rev);
assert_number(&type_option, "₩1844", "₩1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "₩1,844", &field_type, &field_rev);
}
},
NumberFormat::Yuan => {
assert_number(&type_option, "CN¥18,44", "CN¥1,844", &field_type, &field_rev);
assert_number(
&type_option,
"CN¥18,44",
"CN¥1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "CN¥0.2", "CN¥0.2", &field_type, &field_rev);
assert_number(&type_option, "CN¥1844", "CN¥1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "CN¥1,844", &field_type, &field_rev);
}
},
NumberFormat::Real => {
assert_number(&type_option, "R$18,44", "R$1,844", &field_type, &field_rev);
assert_number(&type_option, "R$0.2", "R$0.2", &field_type, &field_rev);
assert_number(&type_option, "R$1844", "R$1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "R$1,844", &field_type, &field_rev);
}
},
NumberFormat::Lira => {
assert_number(&type_option, "TRY18.44", "TRY18,44", &field_type, &field_rev);
assert_number(
&type_option,
"TRY18.44",
"TRY18,44",
&field_type,
&field_rev,
);
assert_number(&type_option, "TRY0.5", "TRY0,5", &field_type, &field_rev);
assert_number(&type_option, "TRY1844", "TRY1.844", &field_type, &field_rev);
assert_number(&type_option, "1844", "TRY1.844", &field_type, &field_rev);
}
},
NumberFormat::Rupiah => {
assert_number(&type_option, "IDR18,44", "IDR1,844", &field_type, &field_rev);
assert_number(
&type_option,
"IDR18,44",
"IDR1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "IDR0.2", "IDR0.2", &field_type, &field_rev);
assert_number(&type_option, "IDR1844", "IDR1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "IDR1,844", &field_type, &field_rev);
}
},
NumberFormat::Franc => {
assert_number(&type_option, "CHF18,44", "CHF1,844", &field_type, &field_rev);
assert_number(
&type_option,
"CHF18,44",
"CHF1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "CHF0.2", "CHF0.2", &field_type, &field_rev);
assert_number(&type_option, "CHF1844", "CHF1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "CHF1,844", &field_type, &field_rev);
}
},
NumberFormat::HongKongDollar => {
assert_number(&type_option, "HZ$18,44", "HZ$1,844", &field_type, &field_rev);
assert_number(
&type_option,
"HZ$18,44",
"HZ$1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "HZ$0.2", "HZ$0.2", &field_type, &field_rev);
assert_number(&type_option, "HZ$1844", "HZ$1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "HZ$1,844", &field_type, &field_rev);
}
},
NumberFormat::NewZealandDollar => {
assert_number(&type_option, "NZ$18,44", "NZ$1,844", &field_type, &field_rev);
assert_number(
&type_option,
"NZ$18,44",
"NZ$1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "NZ$0.2", "NZ$0.2", &field_type, &field_rev);
assert_number(&type_option, "NZ$1844", "NZ$1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "NZ$1,844", &field_type, &field_rev);
}
},
NumberFormat::Krona => {
assert_number(&type_option, "SEK18,44", "18,44SEK", &field_type, &field_rev);
assert_number(
&type_option,
"SEK18,44",
"18,44SEK",
&field_type,
&field_rev,
);
assert_number(&type_option, "SEK0.2", "0,2SEK", &field_type, &field_rev);
assert_number(&type_option, "SEK1844", "1 844SEK", &field_type, &field_rev);
assert_number(&type_option, "1844", "1 844SEK", &field_type, &field_rev);
}
},
NumberFormat::NorwegianKrone => {
assert_number(&type_option, "NOK18,44", "1,844NOK", &field_type, &field_rev);
assert_number(
&type_option,
"NOK18,44",
"1,844NOK",
&field_type,
&field_rev,
);
assert_number(&type_option, "NOK0.2", "0.2NOK", &field_type, &field_rev);
assert_number(&type_option, "NOK1844", "1,844NOK", &field_type, &field_rev);
assert_number(&type_option, "1844", "1,844NOK", &field_type, &field_rev);
}
},
NumberFormat::MexicanPeso => {
assert_number(&type_option, "MX$18,44", "MX$1,844", &field_type, &field_rev);
assert_number(
&type_option,
"MX$18,44",
"MX$1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "MX$0.2", "MX$0.2", &field_type, &field_rev);
assert_number(&type_option, "MX$1844", "MX$1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "MX$1,844", &field_type, &field_rev);
}
},
NumberFormat::Rand => {
assert_number(&type_option, "ZAR18,44", "ZAR1,844", &field_type, &field_rev);
assert_number(
&type_option,
"ZAR18,44",
"ZAR1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "ZAR0.2", "ZAR0.2", &field_type, &field_rev);
assert_number(&type_option, "ZAR1844", "ZAR1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "ZAR1,844", &field_type, &field_rev);
}
},
NumberFormat::NewTaiwanDollar => {
assert_number(&type_option, "NT$18,44", "NT$1,844", &field_type, &field_rev);
assert_number(
&type_option,
"NT$18,44",
"NT$1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "NT$0.2", "NT$0.2", &field_type, &field_rev);
assert_number(&type_option, "NT$1844", "NT$1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "NT$1,844", &field_type, &field_rev);
}
},
NumberFormat::DanishKrone => {
assert_number(&type_option, "DKK18.44", "18,44DKK", &field_type, &field_rev);
assert_number(
&type_option,
"DKK18.44",
"18,44DKK",
&field_type,
&field_rev,
);
assert_number(&type_option, "DKK0.5", "0,5DKK", &field_type, &field_rev);
assert_number(&type_option, "DKK1844", "1.844DKK", &field_type, &field_rev);
assert_number(&type_option, "1844", "1.844DKK", &field_type, &field_rev);
}
},
NumberFormat::Baht => {
assert_number(&type_option, "THB18,44", "THB1,844", &field_type, &field_rev);
assert_number(
&type_option,
"THB18,44",
"THB1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "THB0.2", "THB0.2", &field_type, &field_rev);
assert_number(&type_option, "THB1844", "THB1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "THB1,844", &field_type, &field_rev);
}
},
NumberFormat::Forint => {
assert_number(&type_option, "HUF18,44", "18HUF", &field_type, &field_rev);
assert_number(&type_option, "HUF0.3", "0HUF", &field_type, &field_rev);
assert_number(&type_option, "HUF1844", "1 844HUF", &field_type, &field_rev);
assert_number(&type_option, "1844", "1 844HUF", &field_type, &field_rev);
}
},
NumberFormat::Koruna => {
assert_number(&type_option, "CZK18,44", "18,44CZK", &field_type, &field_rev);
assert_number(
&type_option,
"CZK18,44",
"18,44CZK",
&field_type,
&field_rev,
);
assert_number(&type_option, "CZK0.2", "0,2CZK", &field_type, &field_rev);
assert_number(&type_option, "CZK1844", "1 844CZK", &field_type, &field_rev);
assert_number(&type_option, "1844", "1 844CZK", &field_type, &field_rev);
}
},
NumberFormat::Shekel => {
assert_number(&type_option, "Kč18,44", "18,44Kč", &field_type, &field_rev);
assert_number(&type_option, "Kč0.2", "0,2Kč", &field_type, &field_rev);
assert_number(&type_option, "Kč1844", "1 844Kč", &field_type, &field_rev);
assert_number(&type_option, "1844", "1 844Kč", &field_type, &field_rev);
}
},
NumberFormat::ChileanPeso => {
assert_number(&type_option, "CLP18.44", "CLP18", &field_type, &field_rev);
assert_number(&type_option, "0.5", "CLP0", &field_type, &field_rev);
assert_number(&type_option, "CLP1844", "CLP1.844", &field_type, &field_rev);
assert_number(&type_option, "1844", "CLP1.844", &field_type, &field_rev);
}
},
NumberFormat::PhilippinePeso => {
assert_number(&type_option, "₱18,44", "₱1,844", &field_type, &field_rev);
assert_number(&type_option, "₱0.2", "₱0.2", &field_type, &field_rev);
assert_number(&type_option, "₱1844", "₱1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "₱1,844", &field_type, &field_rev);
}
},
NumberFormat::Dirham => {
assert_number(&type_option, "AED18,44", "1,844AED", &field_type, &field_rev);
assert_number(
&type_option,
"AED18,44",
"1,844AED",
&field_type,
&field_rev,
);
assert_number(&type_option, "AED0.2", "0.2AED", &field_type, &field_rev);
assert_number(&type_option, "AED1844", "1,844AED", &field_type, &field_rev);
assert_number(&type_option, "1844", "1,844AED", &field_type, &field_rev);
}
},
NumberFormat::ColombianPeso => {
assert_number(&type_option, "COP18.44", "COP18,44", &field_type, &field_rev);
assert_number(
&type_option,
"COP18.44",
"COP18,44",
&field_type,
&field_rev,
);
assert_number(&type_option, "0.5", "COP0,5", &field_type, &field_rev);
assert_number(&type_option, "COP1844", "COP1.844", &field_type, &field_rev);
assert_number(&type_option, "1844", "COP1.844", &field_type, &field_rev);
}
},
NumberFormat::Riyal => {
assert_number(&type_option, "SAR18,44", "SAR1,844", &field_type, &field_rev);
assert_number(
&type_option,
"SAR18,44",
"SAR1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "SAR0.2", "SAR0.2", &field_type, &field_rev);
assert_number(&type_option, "SAR1844", "SAR1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "SAR1,844", &field_type, &field_rev);
}
},
NumberFormat::Ringgit => {
assert_number(&type_option, "MYR18,44", "MYR1,844", &field_type, &field_rev);
assert_number(
&type_option,
"MYR18,44",
"MYR1,844",
&field_type,
&field_rev,
);
assert_number(&type_option, "MYR0.2", "MYR0.2", &field_type, &field_rev);
assert_number(&type_option, "MYR1844", "MYR1,844", &field_type, &field_rev);
assert_number(&type_option, "1844", "MYR1,844", &field_type, &field_rev);
}
},
NumberFormat::Leu => {
assert_number(&type_option, "RON18.44", "18,44RON", &field_type, &field_rev);
assert_number(
&type_option,
"RON18.44",
"18,44RON",
&field_type,
&field_rev,
);
assert_number(&type_option, "0.5", "0,5RON", &field_type, &field_rev);
assert_number(&type_option, "RON1844", "1.844RON", &field_type, &field_rev);
assert_number(&type_option, "1844", "1.844RON", &field_type, &field_rev);
}
},
NumberFormat::ArgentinePeso => {
assert_number(&type_option, "ARS18.44", "ARS18,44", &field_type, &field_rev);
assert_number(
&type_option,
"ARS18.44",
"ARS18,44",
&field_type,
&field_rev,
);
assert_number(&type_option, "0.5", "ARS0,5", &field_type, &field_rev);
assert_number(&type_option, "ARS1844", "ARS1.844", &field_type, &field_rev);
assert_number(&type_option, "1844", "ARS1.844", &field_type, &field_rev);
}
},
NumberFormat::UruguayanPeso => {
assert_number(&type_option, "UYU18.44", "UYU18,44", &field_type, &field_rev);
assert_number(
&type_option,
"UYU18.44",
"UYU18,44",
&field_type,
&field_rev,
);
assert_number(&type_option, "0.5", "UYU0,5", &field_type, &field_rev);
assert_number(&type_option, "UYU1844", "UYU1.844", &field_type, &field_rev);
assert_number(&type_option, "1844", "UYU1.844", &field_type, &field_rev);
}
},
NumberFormat::Percent => {
assert_number(&type_option, "1", "1%", &field_type, &field_rev);
assert_number(&type_option, "10.1", "10.1%", &field_type, &field_rev);
assert_number(&type_option, "100", "100%", &field_type, &field_rev);
}
},
}
}
}
@ -356,76 +538,114 @@ mod tests {
match format {
NumberFormat::Num => {
assert_number(&type_option, "18443", "18443", &field_type, &field_rev);
}
},
NumberFormat::USD => {
assert_number(&type_option, "18443", "-$18,443", &field_type, &field_rev);
}
},
NumberFormat::CanadianDollar => {
assert_number(&type_option, "18443", "-CA$18,443", &field_type, &field_rev)
}
NumberFormat::EUR => assert_number(&type_option, "18443", "-€18.443", &field_type, &field_rev),
NumberFormat::Pound => assert_number(&type_option, "18443", "-£18,443", &field_type, &field_rev),
},
NumberFormat::EUR => {
assert_number(&type_option, "18443", "-€18.443", &field_type, &field_rev)
},
NumberFormat::Pound => {
assert_number(&type_option, "18443", "-£18,443", &field_type, &field_rev)
},
NumberFormat::Yen => {
assert_number(&type_option, "18443", "-¥18,443", &field_type, &field_rev);
}
NumberFormat::Ruble => assert_number(&type_option, "18443", "-18.443RUB", &field_type, &field_rev),
NumberFormat::Rupee => assert_number(&type_option, "18443", "-₹18,443", &field_type, &field_rev),
NumberFormat::Won => assert_number(&type_option, "18443", "-₩18,443", &field_type, &field_rev),
},
NumberFormat::Ruble => {
assert_number(&type_option, "18443", "-18.443RUB", &field_type, &field_rev)
},
NumberFormat::Rupee => {
assert_number(&type_option, "18443", "-₹18,443", &field_type, &field_rev)
},
NumberFormat::Won => {
assert_number(&type_option, "18443", "-₩18,443", &field_type, &field_rev)
},
NumberFormat::Yuan => {
assert_number(&type_option, "18443", "-CN¥18,443", &field_type, &field_rev);
}
},
NumberFormat::Real => {
assert_number(&type_option, "18443", "-R$18,443", &field_type, &field_rev);
}
NumberFormat::Lira => assert_number(&type_option, "18443", "-TRY18.443", &field_type, &field_rev),
NumberFormat::Rupiah => assert_number(&type_option, "18443", "-IDR18,443", &field_type, &field_rev),
NumberFormat::Franc => assert_number(&type_option, "18443", "-CHF18,443", &field_type, &field_rev),
},
NumberFormat::Lira => {
assert_number(&type_option, "18443", "-TRY18.443", &field_type, &field_rev)
},
NumberFormat::Rupiah => {
assert_number(&type_option, "18443", "-IDR18,443", &field_type, &field_rev)
},
NumberFormat::Franc => {
assert_number(&type_option, "18443", "-CHF18,443", &field_type, &field_rev)
},
NumberFormat::HongKongDollar => {
assert_number(&type_option, "18443", "-HZ$18,443", &field_type, &field_rev)
}
},
NumberFormat::NewZealandDollar => {
assert_number(&type_option, "18443", "-NZ$18,443", &field_type, &field_rev)
}
NumberFormat::Krona => assert_number(&type_option, "18443", "-18 443SEK", &field_type, &field_rev),
},
NumberFormat::Krona => {
assert_number(&type_option, "18443", "-18 443SEK", &field_type, &field_rev)
},
NumberFormat::NorwegianKrone => {
assert_number(&type_option, "18443", "-18,443NOK", &field_type, &field_rev)
}
},
NumberFormat::MexicanPeso => {
assert_number(&type_option, "18443", "-MX$18,443", &field_type, &field_rev)
}
NumberFormat::Rand => assert_number(&type_option, "18443", "-ZAR18,443", &field_type, &field_rev),
},
NumberFormat::Rand => {
assert_number(&type_option, "18443", "-ZAR18,443", &field_type, &field_rev)
},
NumberFormat::NewTaiwanDollar => {
assert_number(&type_option, "18443", "-NT$18,443", &field_type, &field_rev)
}
},
NumberFormat::DanishKrone => {
assert_number(&type_option, "18443", "-18.443DKK", &field_type, &field_rev)
}
NumberFormat::Baht => assert_number(&type_option, "18443", "-THB18,443", &field_type, &field_rev),
NumberFormat::Forint => assert_number(&type_option, "18443", "-18 443HUF", &field_type, &field_rev),
NumberFormat::Koruna => assert_number(&type_option, "18443", "-18 443CZK", &field_type, &field_rev),
NumberFormat::Shekel => assert_number(&type_option, "18443", "-18 443Kč", &field_type, &field_rev),
},
NumberFormat::Baht => {
assert_number(&type_option, "18443", "-THB18,443", &field_type, &field_rev)
},
NumberFormat::Forint => {
assert_number(&type_option, "18443", "-18 443HUF", &field_type, &field_rev)
},
NumberFormat::Koruna => {
assert_number(&type_option, "18443", "-18 443CZK", &field_type, &field_rev)
},
NumberFormat::Shekel => {
assert_number(&type_option, "18443", "-18 443Kč", &field_type, &field_rev)
},
NumberFormat::ChileanPeso => {
assert_number(&type_option, "18443", "-CLP18.443", &field_type, &field_rev)
}
},
NumberFormat::PhilippinePeso => {
assert_number(&type_option, "18443", "-₱18,443", &field_type, &field_rev)
}
NumberFormat::Dirham => assert_number(&type_option, "18443", "-18,443AED", &field_type, &field_rev),
},
NumberFormat::Dirham => {
assert_number(&type_option, "18443", "-18,443AED", &field_type, &field_rev)
},
NumberFormat::ColombianPeso => {
assert_number(&type_option, "18443", "-COP18.443", &field_type, &field_rev)
}
NumberFormat::Riyal => assert_number(&type_option, "18443", "-SAR18,443", &field_type, &field_rev),
NumberFormat::Ringgit => assert_number(&type_option, "18443", "-MYR18,443", &field_type, &field_rev),
NumberFormat::Leu => assert_number(&type_option, "18443", "-18.443RON", &field_type, &field_rev),
},
NumberFormat::Riyal => {
assert_number(&type_option, "18443", "-SAR18,443", &field_type, &field_rev)
},
NumberFormat::Ringgit => {
assert_number(&type_option, "18443", "-MYR18,443", &field_type, &field_rev)
},
NumberFormat::Leu => {
assert_number(&type_option, "18443", "-18.443RON", &field_type, &field_rev)
},
NumberFormat::ArgentinePeso => {
assert_number(&type_option, "18443", "-ARS18.443", &field_type, &field_rev)
}
},
NumberFormat::UruguayanPeso => {
assert_number(&type_option, "18443", "-UYU18.443", &field_type, &field_rev)
}
NumberFormat::Percent => assert_number(&type_option, "18443", "-18,443%", &field_type, &field_rev),
},
NumberFormat::Percent => {
assert_number(&type_option, "18443", "-18,443%", &field_type, &field_rev)
},
}
}
}

View File

@ -3,8 +3,8 @@ use crate::impl_type_option;
use crate::services::cell::{CellDataChangeset, CellDataDecoder, TypeCellData};
use crate::services::field::type_options::number_type_option::format::*;
use crate::services::field::{
BoxTypeOptionBuilder, NumberCellData, StrCellData, TypeOption, TypeOptionBuilder, TypeOptionCellData,
TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionTransform,
BoxTypeOptionBuilder, NumberCellData, StrCellData, TypeOption, TypeOptionBuilder,
TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionTransform,
};
use bytes::Bytes;
use fancy_regex::Regex;
@ -83,11 +83,17 @@ impl TypeOption for NumberTypeOptionPB {
}
impl TypeOptionCellData for NumberTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
cell_data
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
Ok(cell_str.into())
}
}
@ -122,7 +128,7 @@ impl NumberTypeOptionPB {
Err(_) => Ok(NumberCellData::new()),
}
}
}
},
_ => NumberCellData::from_format_str(s, self.sign_positive, &self.format),
}
}
@ -182,7 +188,10 @@ impl CellDataChangeset for NumberTypeOptionPB {
let number_cell_data = self.format_cell_data(&data)?;
match self.format {
NumberFormat::Num => Ok((number_cell_data.to_string(), number_cell_data.to_string().into())),
NumberFormat::Num => Ok((
number_cell_data.to_string(),
number_cell_data.to_string().into(),
)),
_ => Ok((data, number_cell_data.to_string().into())),
}
}

View File

@ -32,7 +32,7 @@ impl NumberCellData {
decimal.set_sign_positive(sign_positive);
let money = Money::from_decimal(decimal, currency);
Ok(Self::from_money(money))
}
},
Err(_) => match Money::from_str(&num_str, currency) {
Ok(money) => Ok(NumberCellData::from_money(money)),
Err(_) => {
@ -43,7 +43,7 @@ impl NumberCellData {
// returns empty string if it can be formatted
Ok(Self::default())
}
}
},
},
}
}

View File

@ -2,7 +2,11 @@ use crate::entities::{ChecklistFilterConditionPB, ChecklistFilterPB};
use crate::services::field::{SelectOptionPB, SelectedSelectOptions};
impl ChecklistFilterPB {
pub fn is_visible(&self, all_options: &[SelectOptionPB], selected_options: &SelectedSelectOptions) -> bool {
pub fn is_visible(
&self,
all_options: &[SelectOptionPB],
selected_options: &SelectedSelectOptions,
) -> bool {
let selected_option_ids = selected_options
.options
.iter()
@ -22,7 +26,7 @@ impl ChecklistFilterPB {
all_option_ids.retain(|option_id| !selected_option_ids.contains(option_id));
all_option_ids.is_empty()
}
},
ChecklistFilterConditionPB::IsIncomplete => {
if selected_option_ids.is_empty() {
return true;
@ -30,7 +34,7 @@ impl ChecklistFilterPB {
all_option_ids.retain(|option_id| !selected_option_ids.contains(option_id));
!all_option_ids.is_empty()
}
},
}
}
}

View File

@ -2,9 +2,9 @@ use crate::entities::{ChecklistFilterPB, FieldType};
use crate::impl_type_option;
use crate::services::cell::{CellDataChangeset, FromCellString, TypeCellData};
use crate::services::field::{
BoxTypeOptionBuilder, SelectOptionCellChangeset, SelectOptionCellDataPB, SelectOptionIds, SelectOptionPB,
SelectTypeOptionSharedAction, SelectedSelectOptions, TypeOption, TypeOptionBuilder, TypeOptionCellData,
TypeOptionCellDataCompare, TypeOptionCellDataFilter,
BoxTypeOptionBuilder, SelectOptionCellChangeset, SelectOptionCellDataPB, SelectOptionIds,
SelectOptionPB, SelectTypeOptionSharedAction, SelectedSelectOptions, TypeOption,
TypeOptionBuilder, TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter,
};
use bytes::Bytes;
use flowy_derive::ProtoBuf;
@ -32,11 +32,17 @@ impl TypeOption for ChecklistTypeOptionPB {
}
impl TypeOptionCellData for ChecklistTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
self.get_selected_options(cell_data)
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
SelectOptionIds::from_cell_str(&cell_str)
}
}
@ -64,7 +70,12 @@ impl CellDataChangeset for ChecklistTypeOptionPB {
let insert_option_ids = changeset
.insert_option_ids
.into_iter()
.filter(|insert_option_id| self.options.iter().any(|option| &option.id == insert_option_id))
.filter(|insert_option_id| {
self
.options
.iter()
.any(|option| &option.id == insert_option_id)
})
.collect::<Vec<String>>();
let select_option_ids = match type_cell_data {
@ -82,7 +93,7 @@ impl CellDataChangeset for ChecklistTypeOptionPB {
}
select_ids
}
},
};
Ok((select_option_ids.to_string(), select_option_ids))
}
@ -97,7 +108,8 @@ impl TypeOptionCellDataFilter for ChecklistTypeOptionPB {
if !field_type.is_check_list() {
return true;
}
let selected_options = SelectedSelectOptions::from(self.get_selected_options(cell_data.clone()));
let selected_options =
SelectedSelectOptions::from(self.get_selected_options(cell_data.clone()));
filter.is_visible(&self.options, &selected_options)
}
}

View File

@ -4,9 +4,9 @@ use crate::services::cell::{CellDataChangeset, FromCellString, TypeCellData};
use std::cmp::{min, Ordering};
use crate::services::field::{
default_order, BoxTypeOptionBuilder, SelectOptionCellChangeset, SelectOptionCellDataPB, SelectOptionIds,
SelectOptionPB, SelectTypeOptionSharedAction, SelectedSelectOptions, TypeOption, TypeOptionBuilder,
TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter,
default_order, BoxTypeOptionBuilder, SelectOptionCellChangeset, SelectOptionCellDataPB,
SelectOptionIds, SelectOptionPB, SelectTypeOptionSharedAction, SelectedSelectOptions, TypeOption,
TypeOptionBuilder, TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter,
};
use bytes::Bytes;
use flowy_derive::ProtoBuf;
@ -33,11 +33,17 @@ impl TypeOption for MultiSelectTypeOptionPB {
}
impl TypeOptionCellData for MultiSelectTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
self.get_selected_options(cell_data)
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
SelectOptionIds::from_cell_str(&cell_str)
}
}
@ -65,7 +71,12 @@ impl CellDataChangeset for MultiSelectTypeOptionPB {
let insert_option_ids = changeset
.insert_option_ids
.into_iter()
.filter(|insert_option_id| self.options.iter().any(|option| &option.id == insert_option_id))
.filter(|insert_option_id| {
self
.options
.iter()
.any(|option| &option.id == insert_option_id)
})
.collect::<Vec<String>>();
let select_option_ids = match type_cell_data {
@ -84,7 +95,7 @@ impl CellDataChangeset for MultiSelectTypeOptionPB {
tracing::trace!("Multi-select cell data: {}", select_ids.to_string());
select_ids
}
},
};
Ok((select_option_ids.to_string(), select_option_ids))
}
@ -100,7 +111,8 @@ impl TypeOptionCellDataFilter for MultiSelectTypeOptionPB {
if !field_type.is_multi_select() {
return true;
}
let selected_options = SelectedSelectOptions::from(self.get_selected_options(cell_data.clone()));
let selected_options =
SelectedSelectOptions::from(self.get_selected_options(cell_data.clone()));
filter.is_visible(&selected_options, FieldType::MultiSelect)
}
}
@ -159,7 +171,9 @@ mod tests {
use crate::entities::FieldType;
use crate::services::cell::CellDataChangeset;
use crate::services::field::type_options::selection_type_option::*;
use crate::services::field::{CheckboxTypeOptionBuilder, FieldBuilder, TypeOptionBuilder, TypeOptionTransform};
use crate::services::field::{
CheckboxTypeOptionBuilder, FieldBuilder, TypeOptionBuilder, TypeOptionTransform,
};
use crate::services::field::{MultiSelectTypeOptionBuilder, MultiSelectTypeOptionPB};
#[test]
@ -189,7 +203,10 @@ mod tests {
let singleselect_type_option_data = singleselect_type_option_builder.serializer().json_str();
let mut multi_select = MultiSelectTypeOptionBuilder::default().0;
multi_select.transform_type_option(FieldType::MultiSelect, singleselect_type_option_data.clone());
multi_select.transform_type_option(
FieldType::MultiSelect,
singleselect_type_option_data.clone(),
);
debug_assert_eq!(multi_select.options.len(), 2);
// Already contain the yes/no option. It doesn't need to insert new options
@ -211,7 +228,8 @@ mod tests {
let type_option = MultiSelectTypeOptionPB::from(&field_rev);
let option_ids = vec![google.id, facebook.id];
let changeset = SelectOptionCellChangeset::from_insert_options(option_ids.clone());
let select_option_ids: SelectOptionIds = type_option.apply_changeset(changeset, None).unwrap().1;
let select_option_ids: SelectOptionIds =
type_option.apply_changeset(changeset, None).unwrap().1;
assert_eq!(&*select_option_ids, &option_ids);
}

View File

@ -4,8 +4,16 @@ use crate::entities::{FieldType, SelectOptionConditionPB, SelectOptionFilterPB};
use crate::services::field::SelectedSelectOptions;
impl SelectOptionFilterPB {
pub fn is_visible(&self, selected_options: &SelectedSelectOptions, field_type: FieldType) -> bool {
let selected_option_ids: Vec<&String> = selected_options.options.iter().map(|option| &option.id).collect();
pub fn is_visible(
&self,
selected_options: &SelectedSelectOptions,
field_type: FieldType,
) -> bool {
let selected_option_ids: Vec<&String> = selected_options
.options
.iter()
.map(|option| &option.id)
.collect();
match self.condition {
SelectOptionConditionPB::OptionIs => match field_type {
FieldType::SingleSelect => {
@ -24,7 +32,7 @@ impl SelectOptionFilterPB {
.collect::<Vec<_>>();
!required_options.is_empty()
}
},
FieldType::MultiSelect => {
if self.option_ids.is_empty() {
return true;
@ -37,7 +45,7 @@ impl SelectOptionFilterPB {
.collect::<Vec<_>>();
!required_options.is_empty()
}
},
_ => false,
},
SelectOptionConditionPB::OptionIsNot => match field_type {
@ -57,7 +65,7 @@ impl SelectOptionFilterPB {
.collect::<Vec<_>>();
required_options.is_empty()
}
},
FieldType::MultiSelect => {
let required_options = self
.option_ids
@ -66,7 +74,7 @@ impl SelectOptionFilterPB {
.collect::<Vec<_>>();
required_options.is_empty()
}
},
_ => false,
},
SelectOptionConditionPB::OptionIsEmpty => selected_option_ids.is_empty(),
@ -90,7 +98,10 @@ mod tests {
};
assert_eq!(
filter.is_visible(&SelectedSelectOptions { options: vec![] }, FieldType::SingleSelect),
filter.is_visible(
&SelectedSelectOptions { options: vec![] },
FieldType::SingleSelect
),
true
);
assert_eq!(
@ -104,11 +115,19 @@ mod tests {
);
assert_eq!(
filter.is_visible(&SelectedSelectOptions { options: vec![] }, FieldType::MultiSelect),
filter.is_visible(
&SelectedSelectOptions { options: vec![] },
FieldType::MultiSelect
),
true
);
assert_eq!(
filter.is_visible(&SelectedSelectOptions { options: vec![option] }, FieldType::MultiSelect),
filter.is_visible(
&SelectedSelectOptions {
options: vec![option]
},
FieldType::MultiSelect
),
false,
);
}
@ -132,7 +151,10 @@ mod tests {
true
);
assert_eq!(
filter.is_visible(&SelectedSelectOptions { options: vec![] }, FieldType::SingleSelect),
filter.is_visible(
&SelectedSelectOptions { options: vec![] },
FieldType::SingleSelect
),
false,
);
@ -146,7 +168,10 @@ mod tests {
true
);
assert_eq!(
filter.is_visible(&SelectedSelectOptions { options: vec![] }, FieldType::MultiSelect),
filter.is_visible(
&SelectedSelectOptions { options: vec![] },
FieldType::MultiSelect
),
false,
);
}
@ -233,7 +258,10 @@ mod tests {
(vec![option_1.clone()], false),
(vec![option_2.clone()], false),
(vec![option_3.clone()], true),
(vec![option_1.clone(), option_2.clone(), option_3.clone()], false),
(
vec![option_1.clone(), option_2.clone(), option_3.clone()],
false,
),
(vec![], true),
] {
assert_eq!(
@ -253,7 +281,10 @@ mod tests {
option_ids: vec![option_1.id.clone(), option_2.id.clone()],
};
for (options, is_visible) in vec![
(vec![option_1.clone(), option_2.clone(), option_3.clone()], true),
(
vec![option_1.clone(), option_2.clone(), option_3.clone()],
true,
),
(vec![option_2.clone(), option_1.clone()], true),
(vec![option_2.clone()], true),
(vec![option_1.clone(), option_3.clone()], true),

View File

@ -1,14 +1,14 @@
use crate::entities::parser::NotEmptyStr;
use crate::entities::{CellIdPB, CellIdParams, FieldType};
use crate::services::cell::{
CellDataDecoder, CellProtobufBlobParser, DecodedCellData, FromCellChangesetString, FromCellString,
ToCellChangesetString,
CellDataDecoder, CellProtobufBlobParser, DecodedCellData, FromCellChangesetString,
FromCellString, ToCellChangesetString,
};
use crate::services::field::selection_type_option::type_option_transform::SelectOptionTypeOptionTransformHelper;
use crate::services::field::{
CheckboxCellData, ChecklistTypeOptionPB, MultiSelectTypeOptionPB, SingleSelectTypeOptionPB, TypeOption,
TypeOptionCellData, TypeOptionTransform,
CheckboxCellData, ChecklistTypeOptionPB, MultiSelectTypeOptionPB, SingleSelectTypeOptionPB,
TypeOption, TypeOptionCellData, TypeOptionTransform,
};
use bytes::Bytes;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
@ -74,9 +74,18 @@ impl std::default::Default for SelectOptionColorPB {
}
}
pub fn make_selected_options(ids: SelectOptionIds, options: &[SelectOptionPB]) -> Vec<SelectOptionPB> {
ids.iter()
.flat_map(|option_id| options.iter().find(|option| &option.id == option_id).cloned())
pub fn make_selected_options(
ids: SelectOptionIds,
options: &[SelectOptionPB],
) -> Vec<SelectOptionPB> {
ids
.iter()
.flat_map(|option_id| {
options
.iter()
.find(|option| &option.id == option_id)
.cloned()
})
.collect()
}
/// Defines the shared actions used by SingleSelect or Multi-Select.
@ -100,7 +109,10 @@ pub trait SelectTypeOptionSharedAction: TypeOptionDataSerializer + Send + Sync {
fn delete_option(&mut self, delete_option: SelectOptionPB) {
let options = self.mut_options();
if let Some(index) = options.iter().position(|option| option.id == delete_option.id) {
if let Some(index) = options
.iter()
.position(|option| option.id == delete_option.id)
{
options.remove(index);
}
}
@ -114,10 +126,10 @@ pub trait SelectTypeOptionSharedAction: TypeOptionDataSerializer + Send + Sync {
fn get_selected_options(&self, ids: SelectOptionIds) -> SelectOptionCellDataPB {
let mut select_options = make_selected_options(ids, self.options());
match self.number_of_max_options() {
None => {}
None => {},
Some(number_of_max_options) => {
select_options.truncate(number_of_max_options);
}
},
}
SelectOptionCellDataPB {
options: self.options().clone(),
@ -141,7 +153,11 @@ where
true
}
fn transform_type_option(&mut self, old_type_option_field_type: FieldType, old_type_option_data: String) {
fn transform_type_option(
&mut self,
old_type_option_field_type: FieldType,
old_type_option_data: String,
) {
SelectOptionTypeOptionTransformHelper::transform_type_option(
self,
&old_type_option_field_type,
@ -166,7 +182,7 @@ where
transformed_ids.push(option.id.clone());
}
Some(SelectOptionIds::from(transformed_ids))
}
},
Err(_) => None,
},
FieldType::RichText => SelectOptionIds::from_cell_str(cell_str).ok(),
@ -189,7 +205,8 @@ where
}
fn decode_cell_data_to_str(&self, cell_data: <Self as TypeOption>::CellData) -> String {
self.get_selected_options(cell_data)
self
.get_selected_options(cell_data)
.select_options
.into_iter()
.map(|option| option.name)
@ -206,19 +223,19 @@ pub fn select_type_option_from_field_rev(
FieldType::SingleSelect => {
let type_option = SingleSelectTypeOptionPB::from(field_rev);
Ok(Box::new(type_option))
}
},
FieldType::MultiSelect => {
let type_option = MultiSelectTypeOptionPB::from(field_rev);
Ok(Box::new(type_option))
}
},
FieldType::Checklist => {
let type_option = ChecklistTypeOptionPB::from(field_rev);
Ok(Box::new(type_option))
}
},
ty => {
tracing::error!("Unsupported field type: {:?} for this handler", ty);
Err(ErrorCode::FieldInvalidOperation.into())
}
},
}
}
@ -291,7 +308,10 @@ impl std::convert::From<String> for SelectOptionIds {
impl std::convert::From<Vec<String>> for SelectOptionIds {
fn from(ids: Vec<String>) -> Self {
let ids = ids.into_iter().filter(|id| !id.is_empty()).collect::<Vec<String>>();
let ids = ids
.into_iter()
.filter(|id| !id.is_empty())
.collect::<Vec<String>>();
Self(ids)
}
}
@ -394,7 +414,7 @@ impl TryInto<SelectOptionCellChangesetParams> for SelectOptionCellChangesetPB {
Err(_) => {
tracing::error!("The insert option id should not be empty");
None
}
},
})
.collect::<Vec<String>>();
@ -406,7 +426,7 @@ impl TryInto<SelectOptionCellChangesetParams> for SelectOptionCellChangesetPB {
Err(_) => {
tracing::error!("The deleted option id should not be empty");
None
}
},
})
.collect::<Vec<String>>();

View File

@ -4,8 +4,8 @@ use crate::services::cell::{CellDataChangeset, FromCellString, TypeCellData};
use std::cmp::Ordering;
use crate::services::field::{
default_order, BoxTypeOptionBuilder, SelectOptionCellDataPB, SelectedSelectOptions, TypeOption, TypeOptionBuilder,
TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter,
default_order, BoxTypeOptionBuilder, SelectOptionCellDataPB, SelectedSelectOptions, TypeOption,
TypeOptionBuilder, TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter,
};
use crate::services::field::{
SelectOptionCellChangeset, SelectOptionIds, SelectOptionPB, SelectTypeOptionSharedAction,
@ -35,11 +35,17 @@ impl TypeOption for SingleSelectTypeOptionPB {
}
impl TypeOptionCellData for SingleSelectTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
self.get_selected_options(cell_data)
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
SelectOptionIds::from_cell_str(&cell_str)
}
}
@ -67,7 +73,12 @@ impl CellDataChangeset for SingleSelectTypeOptionPB {
let mut insert_option_ids = changeset
.insert_option_ids
.into_iter()
.filter(|insert_option_id| self.options.iter().any(|option| &option.id == insert_option_id))
.filter(|insert_option_id| {
self
.options
.iter()
.any(|option| &option.id == insert_option_id)
})
.collect::<Vec<String>>();
// In single select, the insert_option_ids should only contain one select option id.
@ -94,7 +105,8 @@ impl TypeOptionCellDataFilter for SingleSelectTypeOptionPB {
if !field_type.is_single_select() {
return true;
}
let selected_options = SelectedSelectOptions::from(self.get_selected_options(cell_data.clone()));
let selected_options =
SelectedSelectOptions::from(self.get_selected_options(cell_data.clone()));
filter.is_visible(&selected_options, FieldType::SingleSelect)
}
}
@ -176,7 +188,8 @@ mod tests {
let multiselect_type_option_data = multiselect_type_option_builder.serializer().json_str();
let mut single_select = SingleSelectTypeOptionBuilder::default().0;
single_select.transform_type_option(FieldType::MultiSelect, multiselect_type_option_data.clone());
single_select
.transform_type_option(FieldType::MultiSelect, multiselect_type_option_data.clone());
debug_assert_eq!(single_select.options.len(), 2);
// Already contain the yes/no option. It doesn't need to insert new options

View File

@ -1,8 +1,8 @@
use crate::entities::FieldType;
use crate::services::field::{
MultiSelectTypeOptionPB, SelectOptionColorPB, SelectOptionIds, SelectOptionPB, SelectTypeOptionSharedAction,
SingleSelectTypeOptionPB, TypeOption, CHECK, UNCHECK,
MultiSelectTypeOptionPB, SelectOptionColorPB, SelectOptionIds, SelectOptionPB,
SelectTypeOptionSharedAction, SingleSelectTypeOptionPB, TypeOption, CHECK, UNCHECK,
};
use grid_model::TypeOptionDataDeserializer;
@ -16,8 +16,11 @@ impl SelectOptionTypeOptionTransformHelper {
///
/// * `old_field_type`: the FieldType of the passed-in TypeOptionData
///
pub fn transform_type_option<T>(shared: &mut T, old_field_type: &FieldType, old_type_option_data: String)
where
pub fn transform_type_option<T>(
shared: &mut T,
old_field_type: &FieldType,
old_type_option_data: String,
) where
T: SelectTypeOptionSharedAction + TypeOption<CellData = SelectOptionIds>,
{
match old_field_type {
@ -32,24 +35,32 @@ impl SelectOptionTypeOptionTransformHelper {
let uncheck_option = SelectOptionPB::with_color(UNCHECK, SelectOptionColorPB::Yellow);
shared.mut_options().push(uncheck_option);
}
}
},
FieldType::MultiSelect => {
let options = MultiSelectTypeOptionPB::from_json_str(&old_type_option_data).options;
options.iter().for_each(|new_option| {
if !shared.options().iter().any(|option| option.name == new_option.name) {
if !shared
.options()
.iter()
.any(|option| option.name == new_option.name)
{
shared.mut_options().push(new_option.clone());
}
})
}
},
FieldType::SingleSelect => {
let options = SingleSelectTypeOptionPB::from_json_str(&old_type_option_data).options;
options.iter().for_each(|new_option| {
if !shared.options().iter().any(|option| option.name == new_option.name) {
if !shared
.options()
.iter()
.any(|option| option.name == new_option.name)
{
shared.mut_options().push(new_option.clone());
}
})
}
_ => {}
},
_ => {},
}
}

View File

@ -14,7 +14,12 @@ mod tests {
let field_rev = FieldBuilder::from_field_type(&field_type).build();
assert_eq!(
stringify_cell_data(1647251762.to_string(), &FieldType::RichText, &field_type, &field_rev),
stringify_cell_data(
1647251762.to_string(),
&FieldType::RichText,
&field_type,
&field_rev
),
"Mar 14,2022"
);
}

View File

@ -1,12 +1,12 @@
use crate::entities::{FieldType, TextFilterPB};
use crate::impl_type_option;
use crate::services::cell::{
stringify_cell_data, CellDataChangeset, CellDataDecoder, CellProtobufBlobParser, DecodedCellData, FromCellString,
TypeCellData,
stringify_cell_data, CellDataChangeset, CellDataDecoder, CellProtobufBlobParser, DecodedCellData,
FromCellString, TypeCellData,
};
use crate::services::field::{
BoxTypeOptionBuilder, TypeOption, TypeOptionBuilder, TypeOptionCellData, TypeOptionCellDataCompare,
TypeOptionCellDataFilter, TypeOptionTransform,
BoxTypeOptionBuilder, TypeOption, TypeOptionBuilder, TypeOptionCellData,
TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionTransform,
};
use bytes::Bytes;
use flowy_derive::ProtoBuf;
@ -53,7 +53,12 @@ impl TypeOptionTransform for RichTextTypeOptionPB {
true
}
fn transform_type_option(&mut self, _old_type_option_field_type: FieldType, _old_type_option_data: String) {}
fn transform_type_option(
&mut self,
_old_type_option_field_type: FieldType,
_old_type_option_data: String,
) {
}
fn transform_type_option_cell_str(
&self,
@ -67,7 +72,15 @@ impl TypeOptionTransform for RichTextTypeOptionPB {
|| decoded_field_type.is_number()
|| decoded_field_type.is_url()
{
Some(stringify_cell_data(cell_str.to_owned(), decoded_field_type, decoded_field_type, field_rev).into())
Some(
stringify_cell_data(
cell_str.to_owned(),
decoded_field_type,
decoded_field_type,
field_rev,
)
.into(),
)
} else {
StrCellData::from_cell_str(cell_str).ok()
}
@ -75,11 +88,17 @@ impl TypeOptionTransform for RichTextTypeOptionPB {
}
impl TypeOptionCellData for RichTextTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
cell_data
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
StrCellData::from_cell_str(&cell_str)
}
}

View File

@ -1,5 +1,7 @@
use crate::entities::FieldType;
use crate::services::cell::{CellDataDecoder, FromCellChangesetString, FromCellString, ToCellChangesetString};
use crate::services::cell::{
CellDataDecoder, FromCellChangesetString, FromCellString, ToCellChangesetString,
};
use crate::services::filter::FromFilterString;
use bytes::Bytes;
@ -46,13 +48,19 @@ pub trait TypeOptionCellData: TypeOption {
/// For example:
/// FieldType::URL => URLCellDataPB
/// FieldType::Date=> DateCellDataPB
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType;
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType;
/// Decodes the opaque cell string to corresponding data struct.
// For example, the cell data is timestamp if its field type is `FieldType::Date`. This cell
// data can not directly show to user. So it needs to be encode as the date string with custom
// format setting. Encode `1647251762` to `"Mar 14,2022`
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData>;
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData>;
}
pub trait TypeOptionTransform: TypeOption {
@ -73,7 +81,12 @@ pub trait TypeOptionTransform: TypeOption {
/// * `old_type_option_data`: the data that can be parsed into corresponding `TypeOption`.
///
///
fn transform_type_option(&mut self, _old_type_option_field_type: FieldType, _old_type_option_data: String) {}
fn transform_type_option(
&mut self,
_old_type_option_field_type: FieldType,
_old_type_option_data: String,
) {
}
/// Transform the cell data from one field type to another
///

View File

@ -4,9 +4,10 @@ use crate::services::cell::{
FromCellChangesetString, FromCellString, TypeCellData,
};
use crate::services::field::{
CheckboxTypeOptionPB, ChecklistTypeOptionPB, DateTypeOptionPB, MultiSelectTypeOptionPB, NumberTypeOptionPB,
RichTextTypeOptionPB, SingleSelectTypeOptionPB, TypeOption, TypeOptionCellData, TypeOptionCellDataCompare,
TypeOptionCellDataFilter, TypeOptionTransform, URLTypeOptionPB,
CheckboxTypeOptionPB, ChecklistTypeOptionPB, DateTypeOptionPB, MultiSelectTypeOptionPB,
NumberTypeOptionPB, RichTextTypeOptionPB, SingleSelectTypeOptionPB, TypeOption,
TypeOptionCellData, TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionTransform,
URLTypeOptionPB,
};
use crate::services::filter::FilterType;
use flowy_error::FlowyResult;
@ -37,7 +38,12 @@ pub trait TypeOptionCellDataHandler {
field_rev: &FieldRevision,
) -> FlowyResult<String>;
fn handle_cell_compare(&self, left_cell_data: &str, right_cell_data: &str, field_rev: &FieldRevision) -> Ordering;
fn handle_cell_compare(
&self,
left_cell_data: &str,
right_cell_data: &str,
field_rev: &FieldRevision,
) -> Ordering;
fn handle_cell_filter(
&self,
@ -48,8 +54,12 @@ pub trait TypeOptionCellDataHandler {
/// Decode the cell_str to corresponding cell data, and then return the display string of the
/// cell data.
fn stringify_cell_str(&self, cell_str: String, decoded_field_type: &FieldType, field_rev: &FieldRevision)
-> String;
fn stringify_cell_str(
&self,
cell_str: String,
decoded_field_type: &FieldType,
field_rev: &FieldRevision,
) -> String;
fn get_cell_data(
&self,
@ -141,7 +151,9 @@ where
cell_str,
cell_data
);
cell_data_cache.write().insert(key.as_ref(), cell_data.clone());
cell_data_cache
.write()
.insert(key.as_ref(), cell_data.clone());
}
Ok(cell_data)
}
@ -219,7 +231,12 @@ where
Ok(cell_str)
}
fn handle_cell_compare(&self, left_cell_data: &str, right_cell_data: &str, field_rev: &FieldRevision) -> Ordering {
fn handle_cell_compare(
&self,
left_cell_data: &str,
right_cell_data: &str,
field_rev: &FieldRevision,
) -> Ordering {
let field_type: FieldType = field_rev.ty.into();
let left = self
.get_decoded_cell_data(left_cell_data.to_owned(), &field_type, field_rev)
@ -318,7 +335,7 @@ impl<'a> TypeOptionCellExt<'a> {
None => vec![],
Some(_handler) => {
todo!()
}
},
}
}
@ -450,30 +467,22 @@ fn get_type_option_transform_handler(
field_type: &FieldType,
) -> Box<dyn TypeOptionTransformHandler> {
match field_type {
FieldType::RichText => {
Box::new(RichTextTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::Number => {
Box::new(NumberTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::DateTime => {
Box::new(DateTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::SingleSelect => {
Box::new(SingleSelectTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::MultiSelect => {
Box::new(MultiSelectTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::Checkbox => {
Box::new(CheckboxTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::URL => {
Box::new(URLTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::Checklist => {
Box::new(ChecklistTypeOptionPB::from_json_str(type_option_data)) as Box<dyn TypeOptionTransformHandler>
}
FieldType::RichText => Box::new(RichTextTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
FieldType::Number => Box::new(NumberTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
FieldType::DateTime => Box::new(DateTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
FieldType::SingleSelect => Box::new(SingleSelectTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
FieldType::MultiSelect => Box::new(MultiSelectTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
FieldType::Checkbox => Box::new(CheckboxTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
FieldType::URL => Box::new(URLTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
FieldType::Checklist => Box::new(ChecklistTypeOptionPB::from_json_str(type_option_data))
as Box<dyn TypeOptionTransformHandler>,
}
}
@ -538,7 +547,10 @@ impl RowSingleCellData {
into_number_field_cell_data,
<NumberTypeOptionPB as TypeOption>::CellData
);
into_cell_data!(into_url_field_cell_data, <URLTypeOptionPB as TypeOption>::CellData);
into_cell_data!(
into_url_field_cell_data,
<URLTypeOptionPB as TypeOption>::CellData
);
into_cell_data!(
into_single_select_field_cell_data,
<SingleSelectTypeOptionPB as TypeOption>::CellData
@ -547,7 +559,10 @@ impl RowSingleCellData {
into_multi_select_field_cell_data,
<MultiSelectTypeOptionPB as TypeOption>::CellData
);
into_cell_data!(into_date_field_cell_data, <DateTypeOptionPB as TypeOption>::CellData);
into_cell_data!(
into_date_field_cell_data,
<DateTypeOptionPB as TypeOption>::CellData
);
into_cell_data!(
into_check_list_field_cell_data,
<CheckboxTypeOptionPB as TypeOption>::CellData

View File

@ -157,7 +157,10 @@ mod tests {
expected_url: &str,
_field_rev: &FieldRevision,
) {
let decode_cell_data = type_option.apply_changeset(input_str.to_owned(), None).unwrap().1;
let decode_cell_data = type_option
.apply_changeset(input_str.to_owned(), None)
.unwrap()
.1;
assert_eq!(expected_str.to_owned(), decode_cell_data.content);
assert_eq!(expected_url.to_owned(), decode_cell_data.url);
}

View File

@ -2,8 +2,9 @@ use crate::entities::{FieldType, TextFilterPB};
use crate::impl_type_option;
use crate::services::cell::{CellDataChangeset, CellDataDecoder, FromCellString, TypeCellData};
use crate::services::field::{
BoxTypeOptionBuilder, TypeOption, TypeOptionBuilder, TypeOptionCellData, TypeOptionCellDataCompare,
TypeOptionCellDataFilter, TypeOptionTransform, URLCellData, URLCellDataPB,
BoxTypeOptionBuilder, TypeOption, TypeOptionBuilder, TypeOptionCellData,
TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionTransform, URLCellData,
URLCellDataPB,
};
use bytes::Bytes;
use fancy_regex::Regex;
@ -49,11 +50,17 @@ impl TypeOption for URLTypeOptionPB {
impl TypeOptionTransform for URLTypeOptionPB {}
impl TypeOptionCellData for URLTypeOptionPB {
fn convert_to_protobuf(&self, cell_data: <Self as TypeOption>::CellData) -> <Self as TypeOption>::CellProtobufType {
fn convert_to_protobuf(
&self,
cell_data: <Self as TypeOption>::CellData,
) -> <Self as TypeOption>::CellProtobufType {
cell_data.into()
}
fn decode_type_option_cell_str(&self, cell_str: String) -> FlowyResult<<Self as TypeOption>::CellData> {
fn decode_type_option_cell_str(
&self,
cell_str: String,
) -> FlowyResult<<Self as TypeOption>::CellData> {
URLCellData::from_cell_str(&cell_str)
}
}
@ -130,10 +137,10 @@ fn auto_append_scheme(s: &str) -> String {
} else {
format!("https://{}", s)
}
}
},
Err(_) => {
format!("https://{}", s)
}
},
}
}

View File

@ -1,8 +1,12 @@
use crate::entities::filter_entities::*;
use crate::entities::{FieldType, InsertedRowPB, RowPB};
use crate::services::cell::{AnyTypeCache, AtomicCellDataCache, AtomicCellFilterCache, TypeCellData};
use crate::services::cell::{
AnyTypeCache, AtomicCellDataCache, AtomicCellFilterCache, TypeCellData,
};
use crate::services::field::*;
use crate::services::filter::{FilterChangeset, FilterResult, FilterResultNotification, FilterType};
use crate::services::filter::{
FilterChangeset, FilterResult, FilterResultNotification, FilterType,
};
use crate::services::row::DatabaseBlockRowRevision;
use crate::services::view_editor::{DatabaseViewChanged, GridViewChangedNotifier};
use dashmap::DashMap;
@ -70,7 +74,8 @@ impl FilterController {
}
pub async fn close(&self) {
self.task_scheduler
self
.task_scheduler
.write()
.await
.unregister_handler(&self.handler_id)
@ -80,7 +85,12 @@ impl FilterController {
#[tracing::instrument(name = "schedule_filter_task", level = "trace", skip(self))]
async fn gen_task(&self, task_type: FilterEvent, qos: QualityOfService) {
let task_id = self.task_scheduler.read().await.next_task_id();
let task = Task::new(&self.handler_id, task_id, TaskContent::Text(task_type.to_string()), qos);
let task = Task::new(
&self.handler_id,
task_id,
TaskContent::Text(task_type.to_string()),
qos,
);
self.task_scheduler.write().await.add_task(task);
}
@ -100,7 +110,8 @@ impl FilterController {
});
row_revs.retain(|row_rev| {
self.result_by_row_id
self
.result_by_row_id
.get(&row_rev.id)
.map(|result| result.is_visible())
.unwrap_or(false)
@ -108,7 +119,8 @@ impl FilterController {
}
async fn get_filter_revs_map(&self) -> HashMap<String, Arc<FieldRevision>> {
self.delegate
self
.delegate
.get_field_revs(None)
.await
.into_iter()
@ -116,7 +128,13 @@ impl FilterController {
.collect::<HashMap<String, Arc<FieldRevision>>>()
}
#[tracing::instrument(name = "process_filter_task", level = "trace", skip_all, fields(filter_result), err)]
#[tracing::instrument(
name = "process_filter_task",
level = "trace",
skip_all,
fields(filter_result),
err
)]
pub async fn process(&self, predicate: &str) -> FlowyResult<()> {
let event_type = FilterEvent::from_str(predicate).unwrap();
match event_type {
@ -129,7 +147,8 @@ impl FilterController {
async fn filter_row(&self, row_id: String) -> FlowyResult<()> {
if let Some((_, row_rev)) = self.delegate.get_row_rev(&row_id).await {
let field_rev_by_field_id = self.get_filter_revs_map().await;
let mut notification = FilterResultNotification::new(self.view_id.clone(), row_rev.block_id.clone());
let mut notification =
FilterResultNotification::new(self.view_id.clone(), row_rev.block_id.clone());
if let Some((row_id, is_visible)) = filter_row(
&row_rev,
&self.result_by_row_id,
@ -195,7 +214,8 @@ impl FilterController {
}
pub async fn did_receive_row_changed(&self, row_id: &str) {
self.gen_task(
self
.gen_task(
FilterEvent::RowDidChanged(row_id.to_string()),
QualityOfService::UserInteractive,
)
@ -203,11 +223,17 @@ impl FilterController {
}
#[tracing::instrument(level = "trace", skip(self))]
pub async fn did_receive_changes(&self, changeset: FilterChangeset) -> Option<FilterChangesetNotificationPB> {
pub async fn did_receive_changes(
&self,
changeset: FilterChangeset,
) -> Option<FilterChangesetNotificationPB> {
let mut notification: Option<FilterChangesetNotificationPB> = None;
if let Some(filter_type) = &changeset.insert_filter {
if let Some(filter) = self.filter_from_filter_type(filter_type).await {
notification = Some(FilterChangesetNotificationPB::from_insert(&self.view_id, vec![filter]));
notification = Some(FilterChangesetNotificationPB::from_insert(
&self.view_id,
vec![filter],
));
}
if let Some(filter_rev) = self.delegate.get_filter_rev(filter_type.clone()).await {
self.refresh_filters(vec![filter_rev]).await;
@ -226,7 +252,11 @@ impl FilterController {
}
// Update the corresponding filter in the cache
if let Some(filter_rev) = self.delegate.get_filter_rev(updated_filter_type.new.clone()).await {
if let Some(filter_rev) = self
.delegate
.get_filter_rev(updated_filter_type.new.clone())
.await
{
self.refresh_filters(vec![filter_rev]).await;
}
@ -244,19 +274,24 @@ impl FilterController {
if let Some(filter_type) = &changeset.delete_filter {
if let Some(filter) = self.filter_from_filter_type(filter_type).await {
notification = Some(FilterChangesetNotificationPB::from_delete(&self.view_id, vec![filter]));
notification = Some(FilterChangesetNotificationPB::from_delete(
&self.view_id,
vec![filter],
));
}
self.cell_filter_cache.write().remove(filter_type);
}
self.gen_task(FilterEvent::FilterDidChanged, QualityOfService::Background)
self
.gen_task(FilterEvent::FilterDidChanged, QualityOfService::Background)
.await;
tracing::trace!("{:?}", notification);
notification
}
async fn filter_from_filter_type(&self, filter_type: &FilterType) -> Option<FilterPB> {
self.delegate
self
.delegate
.get_filter_rev(filter_type.clone())
.await
.map(|filter| FilterPB::from(filter.as_ref()))
@ -270,40 +305,47 @@ impl FilterController {
tracing::trace!("Create filter with type: {:?}", filter_type);
match &filter_type.field_type {
FieldType::RichText => {
self.cell_filter_cache
.write()
.insert(&filter_type, TextFilterPB::from_filter_rev(filter_rev.as_ref()));
}
self.cell_filter_cache.write().insert(
&filter_type,
TextFilterPB::from_filter_rev(filter_rev.as_ref()),
);
},
FieldType::Number => {
self.cell_filter_cache
.write()
.insert(&filter_type, NumberFilterPB::from_filter_rev(filter_rev.as_ref()));
}
self.cell_filter_cache.write().insert(
&filter_type,
NumberFilterPB::from_filter_rev(filter_rev.as_ref()),
);
},
FieldType::DateTime => {
self.cell_filter_cache
.write()
.insert(&filter_type, DateFilterPB::from_filter_rev(filter_rev.as_ref()));
}
self.cell_filter_cache.write().insert(
&filter_type,
DateFilterPB::from_filter_rev(filter_rev.as_ref()),
);
},
FieldType::SingleSelect | FieldType::MultiSelect => {
self.cell_filter_cache
.write()
.insert(&filter_type, SelectOptionFilterPB::from_filter_rev(filter_rev.as_ref()));
}
self.cell_filter_cache.write().insert(
&filter_type,
SelectOptionFilterPB::from_filter_rev(filter_rev.as_ref()),
);
},
FieldType::Checkbox => {
self.cell_filter_cache
.write()
.insert(&filter_type, CheckboxFilterPB::from_filter_rev(filter_rev.as_ref()));
}
self.cell_filter_cache.write().insert(
&filter_type,
CheckboxFilterPB::from_filter_rev(filter_rev.as_ref()),
);
},
FieldType::URL => {
self.cell_filter_cache
.write()
.insert(&filter_type, TextFilterPB::from_filter_rev(filter_rev.as_ref()));
}
self.cell_filter_cache.write().insert(
&filter_type,
TextFilterPB::from_filter_rev(filter_rev.as_ref()),
);
},
FieldType::Checklist => {
self.cell_filter_cache
.write()
.insert(&filter_type, ChecklistFilterPB::from_filter_rev(filter_rev.as_ref()));
}
self.cell_filter_cache.write().insert(
&filter_type,
ChecklistFilterPB::from_filter_rev(filter_rev.as_ref()),
);
},
}
}
}
@ -336,8 +378,16 @@ fn filter_row(
let cell_rev = row_rev.cells.get(field_id);
// if the visibility of the cell_rew is changed, which means the visibility of the
// row is changed too.
if let Some(is_visible) = filter_cell(&filter_type, field_rev, cell_rev, cell_data_cache, cell_filter_cache) {
filter_result.visible_by_filter_id.insert(filter_type, is_visible);
if let Some(is_visible) = filter_cell(
&filter_type,
field_rev,
cell_rev,
cell_data_cache,
cell_filter_cache,
) {
filter_result
.visible_by_filter_id
.insert(filter_type, is_visible);
}
}
@ -367,7 +417,7 @@ fn filter_cell(
Err(err) => {
tracing::error!("Deserialize TypeCellData failed: {}", err);
TypeCellData::from_field_type(&filter_type.field_type)
}
},
},
};

View File

@ -50,7 +50,10 @@ impl FilterChangeset {
impl std::convert::From<&DatabaseSettingChangesetParams> for FilterChangeset {
fn from(params: &DatabaseSettingChangesetParams) -> Self {
let insert_filter = params.insert_filter.as_ref().map(|insert_filter_params| FilterType {
let insert_filter = params
.insert_filter
.as_ref()
.map(|insert_filter_params| FilterType {
field_id: insert_filter_params.field_id.clone(),
field_type: insert_filter_params.field_type.into(),
});

View File

@ -4,12 +4,12 @@ use crate::manager::DatabaseUser;
use crate::notification::{send_notification, DatabaseNotification};
use crate::services::block_manager::DatabaseBlockManager;
use crate::services::cell::{
apply_cell_data_changeset, get_type_cell_protobuf, stringify_cell_data, AnyTypeCache, AtomicCellDataCache,
CellProtobufBlob, ToCellChangesetString, TypeCellData,
apply_cell_data_changeset, get_type_cell_protobuf, stringify_cell_data, AnyTypeCache,
AtomicCellDataCache, CellProtobufBlob, ToCellChangesetString, TypeCellData,
};
use crate::services::field::{
default_type_option_builder_from_type, transform_type_option, type_option_builder_from_bytes, FieldBuilder,
RowSingleCellData,
default_type_option_builder_from_type, transform_type_option, type_option_builder_from_bytes,
FieldBuilder, RowSingleCellData,
};
use crate::services::filter::FilterType;
@ -18,11 +18,14 @@ use crate::services::persistence::block_index::BlockIndexCache;
use crate::services::row::{DatabaseBlockRow, DatabaseBlockRowRevision, RowRevisionBuilder};
use crate::services::view_editor::{DatabaseViewChanged, DatabaseViewManager};
use bytes::Bytes;
use flowy_client_sync::client_database::{DatabaseRevisionChangeset, DatabaseRevisionPad, JsonDeserializer};
use flowy_client_sync::client_database::{
DatabaseRevisionChangeset, DatabaseRevisionPad, JsonDeserializer,
};
use flowy_client_sync::errors::{SyncError, SyncResult};
use flowy_error::{FlowyError, FlowyResult};
use flowy_revision::{
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer, RevisionObjectSerializer,
RevisionCloudService, RevisionManager, RevisionMergeable, RevisionObjectDeserializer,
RevisionObjectSerializer,
};
use flowy_sqlite::ConnectionPool;
use flowy_task::TaskDispatcher;
@ -68,8 +71,9 @@ impl DatabaseRevisionEditor {
// Block manager
let (block_event_tx, block_event_rx) = broadcast::channel(100);
let block_meta_revs = database_pad.read().await.get_block_meta_revs();
let block_manager =
Arc::new(DatabaseBlockManager::new(&user, block_meta_revs, persistence, block_event_tx).await?);
let block_manager = Arc::new(
DatabaseBlockManager::new(&user, block_meta_revs, persistence, block_event_tx).await?,
);
let delegate = Arc::new(GridViewEditorDelegateImpl {
pad: database_pad.clone(),
block_manager: block_manager.clone(),
@ -134,17 +138,18 @@ impl DatabaseRevisionEditor {
return Ok(());
}
let field_rev = result.unwrap();
self.modify(|grid| {
self
.modify(|grid| {
let changeset = grid.modify_field(field_id, |field| {
let deserializer = TypeOptionJsonDeserializer(field_rev.ty.into());
match deserializer.deserialize(type_option_data) {
Ok(json_str) => {
let field_type = field.ty;
field.insert_type_option_str(&field_type, json_str);
}
},
Err(err) => {
tracing::error!("Deserialize data to type option json failed: {}", err);
}
},
}
Ok(Some(()))
})?;
@ -152,7 +157,8 @@ impl DatabaseRevisionEditor {
})
.await?;
self.view_manager
self
.view_manager
.did_update_view_field_type_option(field_id, old_field_rev)
.await?;
self.notify_did_update_grid_field(field_id).await?;
@ -160,14 +166,21 @@ impl DatabaseRevisionEditor {
}
pub async fn next_field_rev(&self, field_type: &FieldType) -> FlowyResult<FieldRevision> {
let name = format!("Property {}", self.database_pad.read().await.get_fields().len() + 1);
let field_rev = FieldBuilder::from_field_type(field_type).name(&name).build();
let name = format!(
"Property {}",
self.database_pad.read().await.get_fields().len() + 1
);
let field_rev = FieldBuilder::from_field_type(field_type)
.name(&name)
.build();
Ok(field_rev)
}
pub async fn create_new_field_rev(&self, field_rev: FieldRevision) -> FlowyResult<()> {
let field_id = field_rev.id.clone();
self.modify(|grid| Ok(grid.create_field_rev(field_rev, None)?)).await?;
self
.modify(|grid| Ok(grid.create_field_rev(field_rev, None)?))
.await?;
self.notify_did_insert_grid_field(&field_id).await?;
Ok(())
@ -183,7 +196,8 @@ impl DatabaseRevisionEditor {
let type_option_builder = type_option_builder_from_bytes(type_option_data, field_type);
field_rev.insert_type_option(type_option_builder.serializer());
}
self.modify(|grid| Ok(grid.create_field_rev(field_rev.clone(), None)?))
self
.modify(|grid| Ok(grid.create_field_rev(field_rev.clone(), None)?))
.await?;
self.notify_did_insert_grid_field(&field_rev.id).await?;
@ -196,7 +210,8 @@ impl DatabaseRevisionEditor {
pub async fn update_field(&self, params: FieldChangesetParams) -> FlowyResult<()> {
let field_id = params.field_id.clone();
self.modify(|grid| {
self
.modify(|grid| {
let changeset = grid.modify_field(&params.field_id, |field| {
if let Some(name) = params.name {
field.name = name;
@ -231,7 +246,8 @@ impl DatabaseRevisionEditor {
{
let mut is_changed = false;
let old_field_rev = self.get_field_rev(field_id).await;
self.modify(|grid| {
self
.modify(|grid| {
let changeset = grid.modify_field(field_id, |field_rev| {
f(field_rev).map_err(|e| SyncError::internal().context(e))
})?;
@ -246,7 +262,7 @@ impl DatabaseRevisionEditor {
.did_update_view_field_type_option(field_id, old_field_rev)
.await
{
Ok(_) => {}
Ok(_) => {},
Err(e) => tracing::error!("View manager update field failed: {:?}", e),
}
self.notify_did_update_grid_field(field_id).await?;
@ -255,7 +271,9 @@ impl DatabaseRevisionEditor {
}
pub async fn delete_field(&self, field_id: &str) -> FlowyResult<()> {
self.modify(|grid_pad| Ok(grid_pad.delete_field_rev(field_id)?)).await?;
self
.modify(|grid_pad| Ok(grid_pad.delete_field_rev(field_id)?))
.await?;
let field_order = FieldIdPB::from(field_id);
let notified_changeset = DatabaseFieldChangesetPB::delete(&self.database_id, vec![field_order]);
self.notify_did_update_grid(notified_changeset).await?;
@ -278,7 +296,11 @@ impl DatabaseRevisionEditor {
/// * `field_id`: the id of the field
/// * `new_field_type`: the new field type of the field
///
pub async fn switch_to_field_type(&self, field_id: &str, new_field_type: &FieldType) -> FlowyResult<()> {
pub async fn switch_to_field_type(
&self,
field_id: &str,
new_field_type: &FieldType,
) -> FlowyResult<()> {
//
let make_default_type_option = || -> String {
return default_type_option_builder_from_type(new_field_type)
@ -286,13 +308,20 @@ impl DatabaseRevisionEditor {
.json_str();
};
let type_option_transform =
|old_field_type: FieldTypeRevision, old_type_option: Option<String>, new_type_option: String| {
let type_option_transform = |old_field_type: FieldTypeRevision,
old_type_option: Option<String>,
new_type_option: String| {
let old_field_type: FieldType = old_field_type.into();
transform_type_option(&new_type_option, new_field_type, old_type_option, old_field_type)
transform_type_option(
&new_type_option,
new_field_type,
old_type_option,
old_field_type,
)
};
self.modify(|grid| {
self
.modify(|grid| {
Ok(grid.switch_to_field(
field_id,
new_field_type.clone(),
@ -309,19 +338,31 @@ impl DatabaseRevisionEditor {
pub async fn duplicate_field(&self, field_id: &str) -> FlowyResult<()> {
let duplicated_field_id = gen_field_id();
self.modify(|grid| Ok(grid.duplicate_field_rev(field_id, &duplicated_field_id)?))
self
.modify(|grid| Ok(grid.duplicate_field_rev(field_id, &duplicated_field_id)?))
.await?;
self.notify_did_insert_grid_field(&duplicated_field_id).await?;
self
.notify_did_insert_grid_field(&duplicated_field_id)
.await?;
Ok(())
}
pub async fn get_field_rev(&self, field_id: &str) -> Option<Arc<FieldRevision>> {
let field_rev = self.database_pad.read().await.get_field_rev(field_id)?.1.clone();
let field_rev = self
.database_pad
.read()
.await
.get_field_rev(field_id)?
.1
.clone();
Some(field_rev)
}
pub async fn get_field_revs(&self, field_ids: Option<Vec<String>>) -> FlowyResult<Vec<Arc<FieldRevision>>> {
pub async fn get_field_revs(
&self,
field_ids: Option<Vec<String>>,
) -> FlowyResult<Vec<Arc<FieldRevision>>> {
if field_ids.is_none() {
let field_revs = self.database_pad.read().await.get_field_revs(None)?;
return Ok(field_revs);
@ -329,7 +370,11 @@ impl DatabaseRevisionEditor {
let field_ids = field_ids.unwrap_or_default();
let expected_len = field_ids.len();
let field_revs = self.database_pad.read().await.get_field_revs(Some(field_ids))?;
let field_revs = self
.database_pad
.read()
.await
.get_field_revs(Some(field_ids))?;
if expected_len != 0 && field_revs.len() != expected_len {
tracing::error!(
"This is a bug. The len of the field_revs should equal to {}",
@ -341,13 +386,15 @@ impl DatabaseRevisionEditor {
}
pub async fn create_block(&self, block_meta_rev: GridBlockMetaRevision) -> FlowyResult<()> {
self.modify(|grid_pad| Ok(grid_pad.create_block_meta_rev(block_meta_rev)?))
self
.modify(|grid_pad| Ok(grid_pad.create_block_meta_rev(block_meta_rev)?))
.await?;
Ok(())
}
pub async fn update_block(&self, changeset: GridBlockMetaRevisionChangeset) -> FlowyResult<()> {
self.modify(|grid_pad| Ok(grid_pad.update_block_rev(changeset)?))
self
.modify(|grid_pad| Ok(grid_pad.update_block_rev(changeset)?))
.await?;
Ok(())
}
@ -355,9 +402,14 @@ impl DatabaseRevisionEditor {
pub async fn create_row(&self, params: CreateRowParams) -> FlowyResult<RowPB> {
let mut row_rev = self.create_row_rev().await?;
self.view_manager.will_create_row(&mut row_rev, &params).await;
self
.view_manager
.will_create_row(&mut row_rev, &params)
.await;
let row_pb = self.create_row_pb(row_rev, params.start_row_id.clone()).await?;
let row_pb = self
.create_row_pb(row_rev, params.start_row_id.clone())
.await?;
self.view_manager.did_create_row(&row_pb, &params).await;
Ok(row_pb)
@ -398,7 +450,10 @@ impl DatabaseRevisionEditor {
/// Returns all the rows in this block.
pub async fn get_row_pbs(&self, view_id: &str, block_id: &str) -> FlowyResult<Vec<RowPB>> {
let rows = self.view_manager.get_row_revs(view_id, block_id).await?;
let rows = rows.into_iter().map(|row_rev| RowPB::from(&row_rev)).collect();
let rows = rows
.into_iter()
.map(|row_rev| RowPB::from(&row_rev))
.collect();
Ok(rows)
}
@ -406,7 +461,10 @@ impl DatabaseRevisionEditor {
let mut all_rows = vec![];
let blocks = self.block_manager.get_blocks(None).await?;
for block in blocks {
let rows = self.view_manager.get_row_revs(view_id, &block.block_id).await?;
let rows = self
.view_manager
.get_row_revs(view_id, &block.block_id)
.await?;
all_rows.extend(rows);
}
Ok(all_rows)
@ -428,7 +486,10 @@ impl DatabaseRevisionEditor {
Ok(())
}
pub async fn subscribe_view_changed(&self, view_id: &str) -> FlowyResult<broadcast::Receiver<DatabaseViewChanged>> {
pub async fn subscribe_view_changed(
&self,
view_id: &str,
) -> FlowyResult<broadcast::Receiver<DatabaseViewChanged>> {
self.view_manager.subscribe_view_changed(view_id).await
}
@ -457,7 +518,10 @@ impl DatabaseRevisionEditor {
let display_str = || async {
let field_rev = self.get_field_rev(&params.field_id).await?;
let field_type: FieldType = field_rev.ty.into();
let cell_rev = self.get_cell_rev(&params.row_id, &params.field_id).await.ok()??;
let cell_rev = self
.get_cell_rev(&params.row_id, &params.field_id)
.await
.ok()??;
let type_cell_data: TypeCellData = cell_rev.try_into().ok()?;
Some(stringify_cell_data(
type_cell_data.cell_str,
@ -475,9 +539,16 @@ impl DatabaseRevisionEditor {
Some(cell_data)
}
async fn get_type_cell_protobuf(&self, params: &CellIdParams) -> Option<(FieldType, CellProtobufBlob)> {
async fn get_type_cell_protobuf(
&self,
params: &CellIdParams,
) -> Option<(FieldType, CellProtobufBlob)> {
let field_rev = self.get_field_rev(&params.field_id).await?;
let (_, row_rev) = self.block_manager.get_row_rev(&params.row_id).await.ok()??;
let (_, row_rev) = self
.block_manager
.get_row_rev(&params.row_id)
.await
.ok()??;
let cell_rev = row_rev.cells.get(&params.field_id)?.clone();
Some(get_type_cell_protobuf(
cell_rev.type_cell_data,
@ -486,18 +557,26 @@ impl DatabaseRevisionEditor {
))
}
pub async fn get_cell_rev(&self, row_id: &str, field_id: &str) -> FlowyResult<Option<CellRevision>> {
pub async fn get_cell_rev(
&self,
row_id: &str,
field_id: &str,
) -> FlowyResult<Option<CellRevision>> {
match self.block_manager.get_row_rev(row_id).await? {
None => Ok(None),
Some((_, row_rev)) => {
let cell_rev = row_rev.cells.get(field_id).cloned();
Ok(cell_rev)
}
},
}
}
/// Returns the list of cells corresponding to the given field.
pub async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> FlowyResult<Vec<RowSingleCellData>> {
pub async fn get_cells_for_field(
&self,
view_id: &str,
field_id: &str,
) -> FlowyResult<Vec<RowSingleCellData>> {
let view_editor = self.view_manager.get_view_editor(view_id).await?;
view_editor.get_cells_for_field(field_id).await
}
@ -513,14 +592,22 @@ impl DatabaseRevisionEditor {
None => {
let msg = format!("Field with id:{} not found", &field_id);
Err(FlowyError::internal().context(msg))
}
},
Some((_, field_rev)) => {
tracing::trace!("Cell changeset: id:{} / value:{:?}", &field_id, cell_changeset);
tracing::trace!(
"Cell changeset: id:{} / value:{:?}",
&field_id,
cell_changeset
);
let old_row_rev = self.get_row_rev(row_id).await?.clone();
let cell_rev = self.get_cell_rev(row_id, field_id).await?;
// Update the changeset.data property with the return value.
let type_cell_data =
apply_cell_data_changeset(cell_changeset, cell_rev, field_rev, Some(self.cell_data_cache.clone()))?;
let type_cell_data = apply_cell_data_changeset(
cell_changeset,
cell_rev,
field_rev,
Some(self.cell_data_cache.clone()),
)?;
let cell_changeset = CellChangesetPB {
database_id: self.database_id.clone(),
row_id: row_id.to_owned(),
@ -530,7 +617,7 @@ impl DatabaseRevisionEditor {
self.block_manager.update_cell(cell_changeset).await?;
self.view_manager.did_update_row(old_row_rev, row_id).await;
Ok(())
}
},
}
}
@ -541,7 +628,8 @@ impl DatabaseRevisionEditor {
field_id: String,
cell_changeset: T,
) -> FlowyResult<()> {
self.update_cell_with_changeset(&row_id, &field_id, cell_changeset)
self
.update_cell_with_changeset(&row_id, &field_id, cell_changeset)
.await
}
@ -550,7 +638,10 @@ impl DatabaseRevisionEditor {
Ok(block_meta_revs)
}
pub async fn get_blocks(&self, block_ids: Option<Vec<String>>) -> FlowyResult<Vec<DatabaseBlockRowRevision>> {
pub async fn get_blocks(
&self,
block_ids: Option<Vec<String>>,
) -> FlowyResult<Vec<DatabaseBlockRowRevision>> {
let block_ids = match block_ids {
None => self
.database_pad
@ -576,7 +667,11 @@ impl DatabaseRevisionEditor {
pub async fn get_database(&self, view_id: &str) -> FlowyResult<DatabasePB> {
let pad = self.database_pad.read().await;
let fields = pad.get_field_revs(None)?.iter().map(FieldIdPB::from).collect();
let fields = pad
.get_field_revs(None)?
.iter()
.map(FieldIdPB::from)
.collect();
let mut all_rows = vec![];
for block_rev in pad.get_block_meta_revs() {
if let Ok(rows) = self.get_row_pbs(view_id, &block_rev.block_id).await {
@ -596,13 +691,15 @@ impl DatabaseRevisionEditor {
}
pub async fn get_all_filters(&self) -> FlowyResult<Vec<FilterPB>> {
Ok(self
Ok(
self
.view_manager
.get_all_filters()
.await?
.into_iter()
.map(|filter| FilterPB::from(filter.as_ref()))
.collect())
.collect(),
)
}
pub async fn get_filters(&self, filter_id: FilterType) -> FlowyResult<Vec<Arc<FilterRevision>>> {
@ -620,13 +717,15 @@ impl DatabaseRevisionEditor {
}
pub async fn get_all_sorts(&self, view_id: &str) -> FlowyResult<Vec<SortPB>> {
Ok(self
Ok(
self
.view_manager
.get_all_sorts(view_id)
.await?
.into_iter()
.map(|sort| SortPB::from(sort.as_ref()))
.collect())
.collect(),
)
}
pub async fn delete_all_sorts(&self, view_id: &str) -> FlowyResult<()> {
@ -667,14 +766,15 @@ impl DatabaseRevisionEditor {
) {
(Some(from_index), Some(to_index)) => {
tracing::trace!("Move row from {} to {}", from_index, to_index);
self.block_manager
self
.block_manager
.move_row(row_rev.clone(), from_index, to_index)
.await?;
}
},
(_, None) => tracing::warn!("Can not find the from row id: {}", from_row_id),
(None, _) => tracing::warn!("Can not find the to row id: {}", to_row_id),
}
}
},
}
Ok(())
}
@ -691,7 +791,8 @@ impl DatabaseRevisionEditor {
None => tracing::warn!("Move row failed, can not find the row:{}", from_row_id),
Some((_, row_rev)) => {
let block_manager = self.block_manager.clone();
self.view_manager
self
.view_manager
.move_group_row(row_rev, to_group_id, to_row_id.clone(), |row_changeset| {
to_fut(async move {
tracing::trace!("Row data changed: {:?}", row_changeset);
@ -708,14 +809,14 @@ impl DatabaseRevisionEditor {
for cell_changeset in cell_changesets {
match block_manager.update_cell(cell_changeset).await {
Ok(_) => {}
Ok(_) => {},
Err(e) => tracing::error!("Apply cell changeset error:{:?}", e),
}
}
})
})
.await?;
}
},
}
Ok(())
}
@ -728,7 +829,10 @@ impl DatabaseRevisionEditor {
to_index,
} = params;
self.modify(|grid_pad| Ok(grid_pad.move_field(&field_id, from_index as usize, to_index as usize)?))
self
.modify(|grid_pad| {
Ok(grid_pad.move_field(&field_id, from_index as usize, to_index as usize)?)
})
.await?;
if let Some((index, field_rev)) = self.database_pad.read().await.get_field_rev(&field_id) {
let delete_field_order = FieldIdPB::from(field_id);
@ -761,7 +865,9 @@ impl DatabaseRevisionEditor {
let duplicated_block_id = &duplicated_blocks[index].block_id;
tracing::trace!("Duplicate block:{} meta data", duplicated_block_id);
let duplicated_block_meta_data = grid_block_meta_editor.duplicate_block(duplicated_block_id).await;
let duplicated_block_meta_data = grid_block_meta_editor
.duplicate_block(duplicated_block_id)
.await;
blocks_meta_data.push(duplicated_block_meta_data);
}
} else {
@ -791,7 +897,11 @@ impl DatabaseRevisionEditor {
Ok(row_rev)
}
async fn create_row_pb(&self, row_rev: RowRevision, start_row_id: Option<String>) -> FlowyResult<RowPB> {
async fn create_row_pb(
&self,
row_rev: RowRevision,
start_row_id: Option<String>,
) -> FlowyResult<RowPB> {
let row_pb = RowPB::from(&row_rev);
let block_id = row_rev.block_id.clone();
@ -806,7 +916,8 @@ impl DatabaseRevisionEditor {
async fn modify<F>(&self, f: F) -> FlowyResult<()>
where
F: for<'a> FnOnce(&'a mut DatabaseRevisionPad) -> FlowyResult<Option<DatabaseRevisionChangeset>>,
F:
for<'a> FnOnce(&'a mut DatabaseRevisionPad) -> FlowyResult<Option<DatabaseRevisionChangeset>>,
{
let mut write_guard = self.database_pad.write().await;
if let Some(changeset) = f(&mut write_guard)? {
@ -816,7 +927,10 @@ impl DatabaseRevisionEditor {
}
async fn apply_change(&self, change: DatabaseRevisionChangeset) -> FlowyResult<()> {
let DatabaseRevisionChangeset { operations: delta, md5 } = change;
let DatabaseRevisionChangeset {
operations: delta,
md5,
} = change;
let data = delta.json_bytes();
let _ = self.rev_manager.add_local_revision(data, md5).await?;
Ok(())
@ -833,7 +947,8 @@ impl DatabaseRevisionEditor {
async fn notify_did_insert_grid_field(&self, field_id: &str) -> FlowyResult<()> {
if let Some((index, field_rev)) = self.database_pad.read().await.get_field_rev(field_id) {
let index_field = IndexFieldPB::from_field_rev(field_rev, index);
let notified_changeset = DatabaseFieldChangesetPB::insert(&self.database_id, vec![index_field]);
let notified_changeset =
DatabaseFieldChangesetPB::insert(&self.database_id, vec![index_field]);
self.notify_did_update_grid(notified_changeset).await?;
}
Ok(())
@ -849,7 +964,8 @@ impl DatabaseRevisionEditor {
.map(|(index, field)| (index, field.clone()))
{
let updated_field = FieldPB::from(field_rev);
let notified_changeset = DatabaseFieldChangesetPB::update(&self.database_id, vec![updated_field.clone()]);
let notified_changeset =
DatabaseFieldChangesetPB::update(&self.database_id, vec![updated_field.clone()]);
self.notify_did_update_grid(notified_changeset).await?;
send_notification(field_id, DatabaseNotification::DidUpdateField)
@ -883,7 +999,10 @@ pub struct GridRevisionSerde();
impl RevisionObjectDeserializer for GridRevisionSerde {
type Output = DatabaseRevisionPad;
fn deserialize_revisions(_object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
fn deserialize_revisions(
_object_id: &str,
revisions: Vec<Revision>,
) -> FlowyResult<Self::Output> {
let pad = DatabaseRevisionPad::from_revisions(revisions)?;
Ok(pad)
}
@ -912,7 +1031,11 @@ impl GridRevisionCloudService {
impl RevisionCloudService for GridRevisionCloudService {
#[tracing::instrument(level = "trace", skip(self))]
fn fetch_object(&self, _user_id: &str, _object_id: &str) -> FutureResult<Vec<Revision>, FlowyError> {
fn fetch_object(
&self,
_user_id: &str,
_object_id: &str,
) -> FutureResult<Vec<Revision>, FlowyError> {
FutureResult::new(async move { Ok(vec![]) })
}
}

View File

@ -26,9 +26,12 @@ impl DatabaseViewEditorDelegate for GridViewEditorDelegateImpl {
match pad.read().await.get_field_revs(field_ids) {
Ok(field_revs) => field_revs,
Err(e) => {
tracing::error!("[GridViewRevisionDelegate] get field revisions failed: {}", e);
tracing::error!(
"[GridViewRevisionDelegate] get field revisions failed: {}",
e
);
vec![]
}
},
}
})
}

Some files were not shown because too many files have changed in this diff Show More