fix: import document that contains refs (#6551)

* fix: import document that contains refs

* chore: clippy
This commit is contained in:
Nathan.fooo 2024-10-15 23:55:27 +08:00 committed by GitHub
parent 2378c0c441
commit c6f042830f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 219 additions and 32 deletions

View File

@ -1535,10 +1535,10 @@ packages:
dependency: transitive
description:
name: platform
sha256: "9b71283fc13df574056616011fb138fd3b793ea47cc509c189a6c3fa5f8a1a65"
sha256: "12220bb4b65720483f8fa9450b4332347737cf8213dd2840d8b2c823e47243ec"
url: "https://pub.dev"
source: hosted
version: "3.1.5"
version: "3.1.4"
plugin_platform_interface:
dependency: "direct dev"
description:
@ -1933,10 +1933,10 @@ packages:
dependency: transitive
description:
name: string_scanner
sha256: "688af5ed3402a4bde5b3a6c15fd768dbf2621a614950b17f04626c431ab3c4c3"
sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde"
url: "https://pub.dev"
source: hosted
version: "1.3.0"
version: "1.2.0"
string_validator:
dependency: "direct main"
description:
@ -2238,10 +2238,10 @@ packages:
dependency: transitive
description:
name: vm_service
sha256: "5c5f338a667b4c644744b661f309fb8080bb94b18a7e91ef1dbd343bed00ed6d"
sha256: "3923c89304b715fb1eb6423f017651664a03bf5f4b29983627c4da791f74a4ec"
url: "https://pub.dev"
source: hosted
version: "14.2.5"
version: "14.2.1"
watcher:
dependency: transitive
description:

View File

@ -1,13 +1,100 @@
use crate::util::unzip;
use assert_json_diff::assert_json_include;
use collab::core::collab::DataSource;
use collab::core::origin::CollabOrigin;
use collab::preclude::{Any, Collab};
use collab_database::rows::database_row_document_id_from_row_id;
use collab_document::blocks::TextDelta;
use collab_document::document::Document;
use event_integration_test::user_event::use_localhost_af_cloud;
use event_integration_test::EventIntegrationTest;
use flowy_core::DEFAULT_NAME;
use flowy_folder::entities::ViewLayoutPB;
use flowy_user::errors::ErrorCode;
use serde_json::{json, Value};
use std::env::temp_dir;
#[tokio::test]
async fn import_appflowy_data_with_ref_views_test() {
let import_container_name = "data_ref_doc".to_string();
let (_cleaner, user_db_path) = unzip("./tests/asset", &import_container_name).unwrap();
use_localhost_af_cloud().await;
let test = EventIntegrationTest::new_with_name(DEFAULT_NAME).await;
let _ = test.af_cloud_sign_up().await;
let views = test.get_all_workspace_views().await;
let shared_space_id = views[1].id.clone();
test
.import_appflowy_data(
user_db_path.to_str().unwrap().to_string(),
Some(import_container_name.clone()),
)
.await
.unwrap();
let general_space = test.get_view(&shared_space_id).await;
let shared_sub_views = &general_space.child_views;
assert_eq!(shared_sub_views.len(), 1);
assert_eq!(shared_sub_views[0].name, import_container_name);
let imported_view_id = shared_sub_views[0].id.clone();
let imported_sub_views = test.get_view(&imported_view_id).await.child_views;
assert_eq!(imported_sub_views.len(), 1);
let imported_get_started_view_id = imported_sub_views[0].id.clone();
let doc_state = test
.get_document_doc_state(&imported_get_started_view_id)
.await;
let collab = Collab::new_with_source(
CollabOrigin::Empty,
&imported_get_started_view_id,
DataSource::DocStateV1(doc_state),
vec![],
false,
)
.unwrap();
let document = Document::open(collab).unwrap();
let page_id = document.get_page_id().unwrap();
let block_ids = document.get_block_children_ids(&page_id);
let mut page_ids = vec![];
let mut link_ids = vec![];
for block_id in block_ids.iter() {
// Process block deltas
if let Some(mut block_deltas) = document.get_block_delta(block_id).map(|t| t.1) {
for d in block_deltas.iter_mut() {
if let TextDelta::Inserted(_, Some(attrs)) = d {
if let Some(Any::Map(mention)) = attrs.get_mut("mention") {
if let Some(page_id) = mention.get("page_id").map(|v| v.to_string()) {
page_ids.push(page_id);
}
}
}
}
}
if let Some((_, data)) = document.get_block_data(block_id) {
if let Some(link_view_id) = data.get("view_id").and_then(|v| v.as_str()) {
link_ids.push(link_view_id.to_string());
}
}
}
assert_eq!(page_ids.len(), 1);
for page_id in page_ids {
let view = test.get_view(&page_id).await;
assert_eq!(view.name, "1");
let data = serde_json::to_string(&test.get_document_data(&view.id).await).unwrap();
assert!(data.contains("hello world"));
}
assert_eq!(link_ids.len(), 1);
for link_id in link_ids {
let database_view = test.get_view(&link_id).await;
assert_eq!(database_view.layout, ViewLayoutPB::Grid);
assert_eq!(database_view.name, "Untitled");
}
}
#[tokio::test]
async fn import_appflowy_data_folder_into_new_view_test() {
let import_container_name = "040_local".to_string();

View File

@ -44,7 +44,7 @@ use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::sync::{Arc, Weak};
use tokio::sync::RwLockWriteGuard;
use tracing::{error, info, instrument, warn};
use tracing::{error, info, instrument};
pub trait FolderUser: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
@ -399,7 +399,7 @@ impl FolderManager {
// Set the parent view ID for the child views.
if let Some(parent_view_id) = parent_view_id {
// If a valid parent_view_id is provided, set it for each child view.
if folder.get_view(&parent_view_id).is_some() {
if folder.get_view(parent_view_id).is_some() {
info!(
"[AppFlowyData]: Attach parent-child views with the latest view: {:?}",
parent_view_id
@ -427,7 +427,7 @@ impl FolderManager {
#[instrument(level = "info", skip_all, err)]
fn insert_into_latest_view(
views: &mut Vec<ParentChildViews>,
views: &mut [ParentChildViews],
folder: &mut RwLockWriteGuard<Folder>,
) -> Result<(), FlowyError> {
let workspace_id = folder

View File

@ -11,7 +11,7 @@ use collab::core::origin::CollabOrigin;
use collab::preclude::updates::decoder::Decode;
use collab::preclude::updates::encoder::Encode;
use collab::preclude::{Collab, Doc, ReadTxn, StateVector, Transact, Update};
use collab::preclude::{Any, Collab, Doc, ReadTxn, StateVector, Transact, Update};
use collab_database::database::{
is_database_collab, mut_database_views_with_collab, reset_inline_view_id,
};
@ -35,6 +35,9 @@ use flowy_user_pub::session::Session;
use rayon::prelude::*;
use std::collections::{HashMap, HashSet};
use collab_document::blocks::TextDelta;
use collab_document::document::Document;
use serde_json::json;
use std::ops::{Deref, DerefMut};
use std::path::Path;
use std::sync::{Arc, Weak};
@ -274,7 +277,7 @@ pub(crate) fn generate_import_data(
}
}
let gen_collabs = all_imported_object_ids
let gen_document_collabs = all_imported_object_ids
.par_iter()
.filter_map(|object_id| {
let f = || {
@ -285,6 +288,7 @@ pub(crate) fn generate_import_data(
new_object_id,
imported_collab,
CollabType::Document,
&old_to_new_id_map,
)
};
match f() {
@ -301,9 +305,9 @@ pub(crate) fn generate_import_data(
})
.collect::<Vec<_>>();
for gen_collab in gen_collabs {
document_object_ids.insert(gen_collab.object_id.clone());
write_gen_collab(gen_collab, current_collab_db_write_txn);
for document_collab in gen_document_collabs {
document_object_ids.insert(document_collab.object_id.clone());
write_gen_collab(document_collab, current_collab_db_write_txn);
}
let (mut views, orphan_views) = match imported_folder.source {
@ -639,6 +643,7 @@ where
new_object_id,
imported_collab,
CollabType::Document,
old_to_new_id_map,
)
})
.collect::<Vec<_>>();
@ -706,7 +711,7 @@ where
}
});
let gen_collabs = imported_row_ids
let gen_database_row_collabs = imported_row_ids
.par_iter()
.filter_map(|imported_row_id| {
let imported_collab = imported_collab_by_oid.get(imported_row_id)?;
@ -723,12 +728,13 @@ where
new_row_id,
imported_collab,
CollabType::DatabaseRow,
old_to_new_id_map,
),
}
})
.collect::<Vec<_>>();
for gen_collab in gen_collabs {
for gen_collab in gen_database_row_collabs {
write_gen_collab(gen_collab, collab_write_txn);
}
}
@ -806,29 +812,55 @@ fn gen_sv_and_doc_state(
object_id: &str,
collab: &Collab,
collab_type: CollabType,
ids_map: &OldToNewIdMap,
) -> Option<GenCollab> {
let encoded_collab = collab
.encode_collab_v1(|collab| collab_type.validate_require_data(collab))
.ok()?;
let update = Update::decode_v1(&encoded_collab.doc_state).ok()?;
let doc = Doc::new();
let mut txn = doc.transact_mut();
if let Err(e) = txn.apply_update(update) {
error!(
"Collab {} failed to apply update: {}",
collab.object_id(),
e
);
return None;
}
drop(txn);
let txn = doc.transact();
let state_vector = txn.state_vector();
let doc_state = txn.encode_state_as_update_v1(&StateVector::default());
let (state_vector, doc_state) = match collab_type {
CollabType::Document => {
let collab = Collab::new_with_source(
CollabOrigin::Empty,
object_id,
encoded_collab.into(),
vec![],
false,
)
.ok()?;
let mut document = Document::open(collab).ok()?;
if let Err(err) = replace_document_ref_ids(&mut document, ids_map) {
error!("[AppFlowyData]: replace document ref ids failed: {}", err);
}
let encode_collab = document.encode_collab().ok()?;
(
encode_collab.state_vector.to_vec(),
encode_collab.doc_state.to_vec(),
)
},
_ => {
let update = Update::decode_v1(&encoded_collab.doc_state).ok()?;
let doc = Doc::new();
let mut txn = doc.transact_mut();
if let Err(e) = txn.apply_update(update) {
error!(
"Collab {} failed to apply update: {}",
collab.object_id(),
e
);
return None;
}
drop(txn);
let txn = doc.transact();
let state_vector = txn.state_vector();
let doc_state = txn.encode_state_as_update_v1(&StateVector::default());
(state_vector.encode_v1(), doc_state)
},
};
Some(GenCollab {
uid,
sv: state_vector.encode_v1(),
sv: state_vector,
doc_state,
object_id: object_id.to_string(),
})
@ -1069,6 +1101,7 @@ impl OldToNewIdMap {
Self::default()
}
fn exchange_new_id(&mut self, old_id: &str) -> String {
println!("old_id: {}", old_id);
let view_id = self
.0
.entry(old_id.to_string())
@ -1238,3 +1271,70 @@ where
})
.collect()
}
fn replace_document_ref_ids(
document: &mut Document,
ids_map: &OldToNewIdMap,
) -> Result<(), anyhow::Error> {
if let Some(page_id) = document.get_page_id() {
// Get all block children and process them
let block_ids = document.get_block_children_ids(&page_id);
for block_id in block_ids.iter() {
// Process block deltas
let block_delta = document.get_block_delta(block_id).map(|t| t.1);
if let Some(mut block_deltas) = block_delta {
let mut is_change = false;
for d in block_deltas.iter_mut() {
if let TextDelta::Inserted(_, Some(attrs)) = d {
if let Some(Any::Map(mention)) = attrs.get_mut("mention") {
if let Some(page_id) = mention.get("page_id").map(|v| v.to_string()) {
if let Some(new_page_id) = ids_map.get_exchanged_id(&page_id) {
let mention = Arc::make_mut(mention);
mention.insert("page_id".to_string(), Any::from(new_page_id.clone()));
is_change = true;
}
}
}
}
}
if is_change {
let _ = document.set_block_delta(block_id, block_deltas);
}
}
// Process block data
if let Some((_block_type, mut data)) = document.get_block_data(block_id) {
println!("block data: {:?}", data);
let mut updated = false;
if let Some(view_id) = data.get("view_id").and_then(|v| v.as_str()) {
if let Some(new_view_id) = ids_map.get_exchanged_id(view_id) {
data.insert("view_id".to_string(), json!(new_view_id));
updated = true;
}
}
if let Some(parent_id) = data.get("parent_id").and_then(|v| v.as_str()) {
if let Some(new_parent_id) = ids_map.get_exchanged_id(parent_id) {
data.insert("parent_id".to_string(), json!(new_parent_id));
updated = true;
}
}
// Apply updates only if any changes were made
if updated {
println!("update block data: {:?}", data);
document.update_block(block_id, data).map_err(|err| {
anyhow::Error::msg(format!(
"[AppFlowyData]: update document block data: {}",
err
))
})?;
}
}
}
}
Ok(())
}