chore: support import appflowy data into current workspace (#4254)

* chore: support import appflowy data into current workspace

* refactor: code

* chore: unused ref

* chore: update url
This commit is contained in:
Nathan.fooo 2023-12-30 13:44:09 +08:00 committed by GitHub
parent 8ccd1ec72c
commit 36cf653d64
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 527 additions and 286 deletions

View File

@ -3,6 +3,7 @@ import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-folder2/import.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-folder2/import.pb.dart';
import 'package:dartz/dartz.dart'; import 'package:dartz/dartz.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:freezed_annotation/freezed_annotation.dart';
@ -14,7 +15,11 @@ class SettingFileImporterBloc
on<SettingFileImportEvent>((event, emit) async { on<SettingFileImportEvent>((event, emit) async {
await event.when( await event.when(
importAppFlowyDataFolder: (String path) async { importAppFlowyDataFolder: (String path) async {
final payload = ImportAppFlowyDataPB.create()..path = path; final formattedDate =
DateFormat('yyyy-MM-dd HH:mm:ss').format(DateTime.now());
final payload = ImportAppFlowyDataPB.create()
..path = path
..importContainerName = "appflowy_import_$formattedDate";
final result = final result =
await FolderEventImportAppFlowyDataFolder(payload).send(); await FolderEventImportAppFlowyDataFolder(payload).send();
result.fold( result.fold(

View File

@ -2,12 +2,15 @@ import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/workspace/application/settings/setting_file_importer_bloc.dart'; import 'package:appflowy/workspace/application/settings/setting_file_importer_bloc.dart';
import 'package:appflowy/workspace/presentation/home/toast.dart'; import 'package:appflowy/workspace/presentation/home/toast.dart';
import 'package:appflowy_backend/log.dart';
import 'package:easy_localization/easy_localization.dart'; import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra/file_picker/file_picker_service.dart'; import 'package:flowy_infra/file_picker/file_picker_service.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart'; import 'package:flowy_infra_ui/flowy_infra_ui.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:fluttertoast/fluttertoast.dart'; import 'package:fluttertoast/fluttertoast.dart';
import 'package:url_launcher/url_launcher.dart';
class ImportAppFlowyData extends StatefulWidget { class ImportAppFlowyData extends StatefulWidget {
const ImportAppFlowyData({super.key}); const ImportAppFlowyData({super.key});
@ -46,20 +49,11 @@ class _ImportAppFlowyDataState extends State<ImportAppFlowyData> {
}, },
child: BlocBuilder<SettingFileImporterBloc, SettingFileImportState>( child: BlocBuilder<SettingFileImporterBloc, SettingFileImportState>(
builder: (context, state) { builder: (context, state) {
return Column( return const Column(
children: [ children: [
const ImportAppFlowyDataButton(), ImportAppFlowyDataButton(),
const VSpace(6), VSpace(6),
IntrinsicHeight( AppFlowyDataImportTip(),
child: Opacity(
opacity: 0.6,
child: FlowyText.medium(
LocaleKeys.settings_menu_importAppFlowyDataDescription
.tr(),
maxLines: 13,
),
),
),
], ],
); );
}, },
@ -76,6 +70,45 @@ class _ImportAppFlowyDataState extends State<ImportAppFlowyData> {
} }
} }
class AppFlowyDataImportTip extends StatelessWidget {
final url = "https://docs.appflowy.io/docs/appflowy/product/data-storage";
const AppFlowyDataImportTip({super.key});
@override
Widget build(BuildContext context) {
return Opacity(
opacity: 0.6,
child: RichText(
text: TextSpan(
children: <TextSpan>[
TextSpan(
text: LocaleKeys.settings_menu_importAppFlowyDataDescription.tr(),
style: Theme.of(context).textTheme.bodySmall!,
),
TextSpan(
text: " ${LocaleKeys.settings_menu_importGuide.tr()} ",
style: Theme.of(context).textTheme.bodyMedium!.copyWith(
color: Theme.of(context).colorScheme.primary,
decoration: TextDecoration.underline,
),
recognizer: TapGestureRecognizer()..onTap = () => _launchURL(),
),
],
),
),
);
}
Future<void> _launchURL() async {
final uri = Uri.parse(url);
if (await canLaunchUrl(uri)) {
await launchUrl(uri);
} else {
Log.error("Could not launch $url");
}
}
}
class ImportAppFlowyDataButton extends StatefulWidget { class ImportAppFlowyDataButton extends StatefulWidget {
const ImportAppFlowyDataButton({super.key}); const ImportAppFlowyDataButton({super.key});

View File

@ -303,7 +303,8 @@
"importAppFlowyData": "Import Data from External AppFlowy Folder", "importAppFlowyData": "Import Data from External AppFlowy Folder",
"importAppFlowyDataDescription": "Copy data from an external AppFlowy data folder and import it into the current AppFlowy data folder", "importAppFlowyDataDescription": "Copy data from an external AppFlowy data folder and import it into the current AppFlowy data folder",
"importSuccess": "Successfully imported the AppFlowy data folder", "importSuccess": "Successfully imported the AppFlowy data folder",
"importFailed": "Importing the AppFlowy data folder failed" "importFailed": "Importing the AppFlowy data folder failed",
"importGuide": "For further details, please check the referenced document"
}, },
"notifications": { "notifications": {
"enableNotifications": { "enableNotifications": {

View File

@ -190,10 +190,14 @@ impl EventIntegrationTest {
Ok(user_profile) Ok(user_profile)
} }
pub async fn import_appflowy_data(&self, path: String, name: &str) -> Result<(), FlowyError> { pub async fn import_appflowy_data(
&self,
path: String,
name: Option<String>,
) -> Result<(), FlowyError> {
let payload = ImportAppFlowyDataPB { let payload = ImportAppFlowyDataPB {
path, path,
import_container_name: name.to_string(), import_container_name: name,
}; };
match EventBuilder::new(self.clone()) match EventBuilder::new(self.clone())
.event(FolderEvent::ImportAppFlowyDataFolder) .event(FolderEvent::ImportAppFlowyDataFolder)

View File

@ -78,6 +78,8 @@ async fn migrate_anon_user_data_to_af_cloud_test() {
assert_eq!(user.authenticator, AuthenticatorPB::AppFlowyCloud); assert_eq!(user.authenticator, AuthenticatorPB::AppFlowyCloud);
let user_first_level_views = test.get_all_workspace_views().await; let user_first_level_views = test.get_all_workspace_views().await;
// assert_eq!(user_first_level_views.len(), 2);
println!("user first level views: {:?}", user_first_level_views); println!("user first level views: {:?}", user_first_level_views);
let user_second_level_views = test let user_second_level_views = test
.get_views(&user_first_level_views[0].id) .get_views(&user_first_level_views[0].id)

View File

@ -9,7 +9,7 @@ use serde_json::{json, Value};
use std::env::temp_dir; use std::env::temp_dir;
#[tokio::test] #[tokio::test]
async fn import_appflowy_data_folder_test() { async fn import_appflowy_data_folder_into_new_view_test() {
let import_container_name = "040_local".to_string(); let import_container_name = "040_local".to_string();
let (cleaner, user_db_path) = let (cleaner, user_db_path) =
unzip_history_user_db("./tests/asset", &import_container_name).unwrap(); unzip_history_user_db("./tests/asset", &import_container_name).unwrap();
@ -29,7 +29,7 @@ async fn import_appflowy_data_folder_test() {
test test
.import_appflowy_data( .import_appflowy_data(
user_db_path.to_str().unwrap().to_string(), user_db_path.to_str().unwrap().to_string(),
&import_container_name, Some(import_container_name.clone()),
) )
.await .await
.unwrap(); .unwrap();
@ -65,7 +65,55 @@ async fn import_appflowy_data_folder_test() {
} }
#[tokio::test] #[tokio::test]
async fn import_appflowy_data_folder_test2() { async fn import_appflowy_data_folder_into_current_workspace_test() {
let import_container_name = "040_local".to_string();
let (cleaner, user_db_path) =
unzip_history_user_db("./tests/asset", &import_container_name).unwrap();
// In the 040_local, the structure is:
// workspace:
// view: Document1
// view: Document2
// view: Grid1
// view: Grid2
user_localhost_af_cloud().await;
let test = EventIntegrationTest::new_with_name(DEFAULT_NAME).await;
let _ = test.af_cloud_sign_up().await;
// after sign up, the initial workspace is created, so the structure is:
// workspace:
// view: Getting Started
test
.import_appflowy_data(user_db_path.to_str().unwrap().to_string(), None)
.await
.unwrap();
// after import, the structure is:
// workspace:
// view: Getting Started
// view: Document1
// view: Document2
// view: Grid1
// view: Grid2
let views = test.get_all_workspace_views().await;
assert_eq!(views.len(), 2);
assert_eq!(views[1].name, "Document1");
let document_1_child_views = test.get_views(&views[1].id).await.child_views;
assert_eq!(document_1_child_views.len(), 1);
assert_eq!(document_1_child_views[0].name, "Document2");
let document2_child_views = test
.get_views(&document_1_child_views[0].id)
.await
.child_views;
assert_eq!(document2_child_views.len(), 2);
assert_eq!(document2_child_views[0].name, "Grid1");
assert_eq!(document2_child_views[1].name, "Grid2");
drop(cleaner);
}
#[tokio::test]
async fn import_appflowy_data_folder_into_new_view_test2() {
let import_container_name = "040_local_2".to_string(); let import_container_name = "040_local_2".to_string();
let (cleaner, user_db_path) = let (cleaner, user_db_path) =
unzip_history_user_db("./tests/asset", &import_container_name).unwrap(); unzip_history_user_db("./tests/asset", &import_container_name).unwrap();
@ -75,7 +123,7 @@ async fn import_appflowy_data_folder_test2() {
test test
.import_appflowy_data( .import_appflowy_data(
user_db_path.to_str().unwrap().to_string(), user_db_path.to_str().unwrap().to_string(),
&import_container_name, Some(import_container_name.clone()),
) )
.await .await
.unwrap(); .unwrap();
@ -95,7 +143,10 @@ async fn import_empty_appflowy_data_folder_test() {
let test = EventIntegrationTest::new_with_name(DEFAULT_NAME).await; let test = EventIntegrationTest::new_with_name(DEFAULT_NAME).await;
let _ = test.af_cloud_sign_up().await; let _ = test.af_cloud_sign_up().await;
let error = test let error = test
.import_appflowy_data(path.to_str().unwrap().to_string(), "empty_folder") .import_appflowy_data(
path.to_str().unwrap().to_string(),
Some("empty_folder".to_string()),
)
.await .await
.unwrap_err(); .unwrap_err();
assert_eq!(error.code, ErrorCode::AppFlowyDataFolderImportError); assert_eq!(error.code, ErrorCode::AppFlowyDataFolderImportError);
@ -121,7 +172,7 @@ async fn import_appflowy_data_folder_multiple_times_test() {
test test
.import_appflowy_data( .import_appflowy_data(
user_db_path.to_str().unwrap().to_string(), user_db_path.to_str().unwrap().to_string(),
&import_container_name, Some(import_container_name.clone()),
) )
.await .await
.unwrap(); .unwrap();
@ -137,7 +188,7 @@ async fn import_appflowy_data_folder_multiple_times_test() {
test test
.import_appflowy_data( .import_appflowy_data(
user_db_path.to_str().unwrap().to_string(), user_db_path.to_str().unwrap().to_string(),
&import_container_name, Some(import_container_name.clone()),
) )
.await .await
.unwrap(); .unwrap();

View File

@ -3,12 +3,11 @@ use std::convert::TryFrom;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use bytes::Bytes; use bytes::Bytes;
use collab_entity::CollabType;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::info;
use collab_integrate::collab_builder::AppFlowyCollabBuilder; use collab_integrate::collab_builder::AppFlowyCollabBuilder;
use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction}; use collab_integrate::RocksCollabDB;
use flowy_database2::entities::DatabaseLayoutPB; use flowy_database2::entities::DatabaseLayoutPB;
use flowy_database2::services::share::csv::CSVFormat; use flowy_database2::services::share::csv::CSVFormat;
use flowy_database2::template::{make_default_board, make_default_calendar, make_default_grid}; use flowy_database2::template::{make_default_board, make_default_calendar, make_default_grid};
@ -16,17 +15,17 @@ use flowy_database2::DatabaseManager;
use flowy_document2::entities::DocumentDataPB; use flowy_document2::entities::DocumentDataPB;
use flowy_document2::manager::DocumentManager; use flowy_document2::manager::DocumentManager;
use flowy_document2::parser::json::parser::JsonToDocumentParser; use flowy_document2::parser::json::parser::JsonToDocumentParser;
use flowy_error::{internal_error, ErrorCode, FlowyError}; use flowy_error::FlowyError;
use flowy_folder2::entities::ViewLayoutPB; use flowy_folder2::entities::ViewLayoutPB;
use flowy_folder2::manager::{FolderManager, FolderUser}; use flowy_folder2::manager::{FolderManager, FolderUser};
use flowy_folder2::share::ImportType; use flowy_folder2::share::ImportType;
use flowy_folder2::view_operation::{FolderOperationHandler, FolderOperationHandlers, View}; use flowy_folder2::view_operation::{FolderOperationHandler, FolderOperationHandlers, View};
use flowy_folder2::ViewLayout; use flowy_folder2::ViewLayout;
use flowy_folder_deps::cloud::{FolderCloudService, FolderCollabParams};
use flowy_folder_deps::entities::ImportData; use flowy_folder_deps::entities::ImportData;
use flowy_folder_deps::folder_builder::{ParentChildViews, WorkspaceViewBuilder}; use flowy_folder_deps::folder_builder::{ParentChildViews, WorkspaceViewBuilder};
use flowy_user::manager::UserManager; use flowy_user::manager::UserManager;
use flowy_user::services::data_import::{load_collab_by_oid, ImportDataSource}; use flowy_user::services::data_import::ImportDataSource;
use crate::integrate::server::ServerProvider; use crate::integrate::server::ServerProvider;
use lib_dispatch::prelude::ToBytes; use lib_dispatch::prelude::ToBytes;
@ -45,7 +44,6 @@ impl FolderDepsResolver {
let user: Arc<dyn FolderUser> = Arc::new(FolderUserImpl { let user: Arc<dyn FolderUser> = Arc::new(FolderUserImpl {
user_manager: user_manager.clone(), user_manager: user_manager.clone(),
database_manager: Arc::downgrade(database_manager), database_manager: Arc::downgrade(database_manager),
server_provider: server_provider.clone(),
}); });
let handlers = folder_operation_handlers(document_manager.clone(), database_manager.clone()); let handlers = folder_operation_handlers(document_manager.clone(), database_manager.clone());
@ -81,7 +79,6 @@ fn folder_operation_handlers(
struct FolderUserImpl { struct FolderUserImpl {
user_manager: Weak<UserManager>, user_manager: Weak<UserManager>,
database_manager: Weak<DatabaseManager>, database_manager: Weak<DatabaseManager>,
server_provider: Arc<ServerProvider>,
} }
#[async_trait] #[async_trait]
@ -112,46 +109,29 @@ impl FolderUser for FolderUserImpl {
async fn import_appflowy_data_folder( async fn import_appflowy_data_folder(
&self, &self,
workspace_id: &str,
path: &str, path: &str,
container_name: &str, container_name: Option<String>,
) -> Result<ParentChildViews, FlowyError> { ) -> Result<Vec<ParentChildViews>, FlowyError> {
match (self.user_manager.upgrade(), self.database_manager.upgrade()) { match (self.user_manager.upgrade(), self.database_manager.upgrade()) {
(Some(user_manager), Some(data_manager)) => { (Some(user_manager), Some(data_manager)) => {
let source = ImportDataSource::AppFlowyDataFolder { let source = ImportDataSource::AppFlowyDataFolder {
path: path.to_string(), path: path.to_string(),
container_name: container_name.to_string(), container_name,
}; };
let cloned_user_manager = user_manager.clone(); let import_data = user_manager.import_data_from_source(source).await?;
let import_data =
tokio::task::spawn_blocking(move || cloned_user_manager.import_data(source))
.await
.map_err(internal_error)??;
match import_data { match import_data {
ImportData::AppFlowyDataFolder { ImportData::AppFlowyDataFolder {
view, views,
database_view_ids_by_database_id, database_view_ids_by_database_id,
row_object_ids, row_object_ids: _,
database_object_ids, database_object_ids: _,
document_object_ids, document_object_ids: _,
} => { } => {
let uid = self.user_id()?; let _uid = self.user_id()?;
self
.upload_collab_data(
workspace_id,
row_object_ids,
database_object_ids,
document_object_ids,
uid,
)
.await?;
data_manager data_manager
.track_database(database_view_ids_by_database_id) .track_database(database_view_ids_by_database_id)
.await?; .await?;
Ok(views)
Ok(view)
}, },
} }
}, },
@ -160,106 +140,6 @@ impl FolderUser for FolderUserImpl {
} }
} }
impl FolderUserImpl {
async fn upload_collab_data(
&self,
workspace_id: &str,
row_object_ids: Vec<String>,
database_object_ids: Vec<String>,
document_object_ids: Vec<String>,
uid: i64,
) -> Result<(), FlowyError> {
// Only support uploading the collab data when the current server is AppFlowy Cloud server
if self.server_provider.get_appflowy_cloud_server().is_err() {
return Ok(());
}
let collab_db = self
.collab_db(uid)
.unwrap()
.upgrade()
.ok_or(FlowyError::new(
ErrorCode::Internal,
"Can't get the collab db",
))?;
let object_by_collab_type = tokio::task::spawn_blocking(move || {
let collab_read = collab_db.read_txn();
let mut object_by_collab_type = HashMap::new();
object_by_collab_type.insert(
CollabType::Database,
load_and_process_collab_data(uid, &collab_read, &database_object_ids),
);
object_by_collab_type.insert(
CollabType::Document,
load_and_process_collab_data(uid, &collab_read, &document_object_ids),
);
object_by_collab_type.insert(
CollabType::DatabaseRow,
load_and_process_collab_data(uid, &collab_read, &row_object_ids),
);
object_by_collab_type
})
.await
.map_err(internal_error)?;
// Upload
let mut size_counter = 0;
let mut objects: Vec<FolderCollabParams> = vec![];
let upload_size_limit = 2 * 1024 * 1024;
for (collab_type, encoded_v1_by_oid) in object_by_collab_type {
info!(
"Batch import collab:{} ids: {:?}",
collab_type,
encoded_v1_by_oid.keys(),
);
for (oid, encoded_v1) in encoded_v1_by_oid {
let obj_size = encoded_v1.len();
if size_counter + obj_size > upload_size_limit && !objects.is_empty() {
// When the limit is exceeded, batch create with the current list of objects
// and reset for the next batch.
self
.server_provider
.batch_create_collab_object(workspace_id, objects)
.await?;
objects = Vec::new();
size_counter = 0;
}
// Add the current object to the batch.
objects.push(FolderCollabParams {
object_id: oid,
encoded_collab_v1: encoded_v1,
collab_type: collab_type.clone(),
override_if_exist: false,
});
size_counter += obj_size;
}
}
// After the loop, upload any remaining objects.
if !objects.is_empty() {
info!(
"Batch create collab objects: {}, payload size: {}",
objects
.iter()
.map(|o| o.object_id.clone())
.collect::<Vec<_>>()
.join(", "),
size_counter
);
self
.server_provider
.batch_create_collab_object(workspace_id, objects)
.await?;
}
Ok(())
}
}
struct DocumentFolderOperation(Arc<DocumentManager>); struct DocumentFolderOperation(Arc<DocumentManager>);
impl FolderOperationHandler for DocumentFolderOperation { impl FolderOperationHandler for DocumentFolderOperation {
fn create_workspace_view( fn create_workspace_view(
@ -581,24 +461,3 @@ pub fn layout_type_from_view_layout(layout: ViewLayoutPB) -> DatabaseLayoutPB {
ViewLayoutPB::Document => DatabaseLayoutPB::Grid, ViewLayoutPB::Document => DatabaseLayoutPB::Grid,
} }
} }
fn load_and_process_collab_data<'a, R>(
uid: i64,
collab_read: &R,
object_ids: &[String],
) -> HashMap<String, Vec<u8>>
where
R: YrsDocAction<'a>,
PersistenceError: From<R::Error>,
{
load_collab_by_oid(uid, collab_read, object_ids)
.into_iter()
.filter_map(|(oid, collab)| {
collab
.encode_collab_v1()
.encode_to_bytes()
.ok()
.map(|encoded_v1| (oid, encoded_v1))
})
.collect()
}

View File

@ -93,6 +93,10 @@ impl ServerProvider {
*self.server.write() = server_type; *self.server.write() = server_type;
} }
pub fn get_user_authenticator(&self) -> Authenticator {
self.user_authenticator.read().clone()
}
pub fn get_appflowy_cloud_server(&self) -> FlowyResult<Arc<dyn AppFlowyServer>> { pub fn get_appflowy_cloud_server(&self) -> FlowyResult<Arc<dyn AppFlowyServer>> {
let server = self.get_server(&Server::AppFlowyCloud)?; let server = self.get_server(&Server::AppFlowyCloud)?;
Ok(server) Ok(server)

View File

@ -196,7 +196,7 @@ impl FolderCloudService for ServerProvider {
}) })
} }
fn batch_create_collab_object( fn batch_create_collab_object_f(
&self, &self,
workspace_id: &str, workspace_id: &str,
objects: Vec<FolderCollabParams>, objects: Vec<FolderCollabParams>,
@ -206,7 +206,7 @@ impl FolderCloudService for ServerProvider {
FutureResult::new(async move { FutureResult::new(async move {
server? server?
.folder_service() .folder_service()
.batch_create_collab_object(&workspace_id, objects) .batch_create_collab_object_f(&workspace_id, objects)
.await .await
}) })
} }

View File

@ -39,7 +39,8 @@ pub trait FolderCloudService: Send + Sync + 'static {
object_id: &str, object_id: &str,
) -> FutureResult<CollabDocState, Error>; ) -> FutureResult<CollabDocState, Error>;
fn batch_create_collab_object( /// The suffix 'f' in the method name serves as a workaround to avoid naming conflicts with the existing method `get_collab_doc_state`.
fn batch_create_collab_object_f(
&self, &self,
workspace_id: &str, workspace_id: &str,
objects: Vec<FolderCollabParams>, objects: Vec<FolderCollabParams>,

View File

@ -3,7 +3,7 @@ use std::collections::HashMap;
pub enum ImportData { pub enum ImportData {
AppFlowyDataFolder { AppFlowyDataFolder {
view: ParentChildViews, views: Vec<ParentChildViews>,
/// Used to update the [DatabaseViewTrackerList] when importing the database. /// Used to update the [DatabaseViewTrackerList] when importing the database.
database_view_ids_by_database_id: HashMap<String, Vec<String>>, database_view_ids_by_database_id: HashMap<String, Vec<String>>,
row_object_ids: Vec<String>, row_object_ids: Vec<String>,
@ -11,3 +11,9 @@ pub enum ImportData {
database_object_ids: Vec<String>, database_object_ids: Vec<String>,
}, },
} }
pub struct ImportViews {
pub views: Vec<ParentChildViews>,
/// Used to update the [DatabaseViewTrackerList] when importing the database.
pub database_view_ids_by_database_id: HashMap<String, Vec<String>>,
}

View File

@ -91,6 +91,6 @@ pub struct ImportAppFlowyDataPB {
#[validate(custom = "lib_infra::validator_fn::required_not_empty_str")] #[validate(custom = "lib_infra::validator_fn::required_not_empty_str")]
pub path: String, pub path: String,
#[pb(index = 2)] #[pb(index = 2, one_of)]
pub import_container_name: String, pub import_container_name: Option<String>,
} }

View File

@ -42,12 +42,14 @@ pub trait FolderUser: Send + Sync {
fn token(&self) -> Result<Option<String>, FlowyError>; fn token(&self) -> Result<Option<String>, FlowyError>;
fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError>; fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError>;
/// Import appflowy data from the given path.
/// If the container name is not empty, then the data will be imported to the given container.
/// Otherwise, the data will be imported to the current workspace.
async fn import_appflowy_data_folder( async fn import_appflowy_data_folder(
&self, &self,
workspace_id: &str,
path: &str, path: &str,
container_name: &str, container_name: Option<String>,
) -> Result<ParentChildViews, FlowyError>; ) -> Result<Vec<ParentChildViews>, FlowyError>;
} }
pub struct FolderManager { pub struct FolderManager {
@ -832,21 +834,23 @@ impl FolderManager {
Ok(()) Ok(())
} }
pub async fn import_appflowy_data(&self, path: String, name: String) -> Result<(), FlowyError> { pub async fn import_appflowy_data(
&self,
path: String,
name: Option<String>,
) -> Result<(), FlowyError> {
let (tx, rx) = tokio::sync::oneshot::channel(); let (tx, rx) = tokio::sync::oneshot::channel();
let workspace_id = self.get_current_workspace_id().await?;
let folder = self.mutex_folder.clone(); let folder = self.mutex_folder.clone();
let user = self.user.clone(); let user = self.user.clone();
tokio::spawn(async move { tokio::spawn(async move {
match user match user.import_appflowy_data_folder(&path, name).await {
.import_appflowy_data_folder(&workspace_id, &path, &name) Ok(views) => {
.await
{
Ok(view) => {
if let Some(folder) = &*folder.lock() { if let Some(folder) = &*folder.lock() {
for view in views {
insert_parent_child_views(folder, view); insert_parent_child_views(folder, view);
} }
}
let _ = tx.send(Ok(())); let _ = tx.send(Ok(()));
}, },
Err(err) => { Err(err) => {
@ -856,7 +860,6 @@ impl FolderManager {
}); });
rx.await.map_err(internal_error)??; rx.await.map_err(internal_error)??;
Ok(()) Ok(())
} }

View File

@ -116,7 +116,7 @@ where
}) })
} }
fn batch_create_collab_object( fn batch_create_collab_object_f(
&self, &self,
workspace_id: &str, workspace_id: &str,
objects: Vec<FolderCollabParams>, objects: Vec<FolderCollabParams>,

View File

@ -9,7 +9,7 @@ use collab_entity::CollabObject;
use parking_lot::RwLock; use parking_lot::RwLock;
use flowy_error::{ErrorCode, FlowyError}; use flowy_error::{ErrorCode, FlowyError};
use flowy_user_deps::cloud::{UserCloudService, UserUpdate, UserUpdateReceiver}; use flowy_user_deps::cloud::{UserCloudService, UserCollabParams, UserUpdate, UserUpdateReceiver};
use flowy_user_deps::entities::*; use flowy_user_deps::entities::*;
use lib_infra::box_any::BoxAny; use lib_infra::box_any::BoxAny;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
@ -251,6 +251,31 @@ where
Ok(()) Ok(())
}) })
} }
fn batch_create_collab_object(
&self,
workspace_id: &str,
objects: Vec<UserCollabParams>,
) -> FutureResult<(), Error> {
let workspace_id = workspace_id.to_string();
let try_get_client = self.server.try_get_client();
FutureResult::new(async move {
let params = objects
.into_iter()
.map(|object| CollabParams {
object_id: object.object_id,
encoded_collab_v1: object.encoded_collab_v1,
collab_type: object.collab_type,
override_if_exist: false,
})
.collect::<Vec<_>>();
try_get_client?
.batch_create_collab(&workspace_id, params)
.await
.map_err(FlowyError::from)?;
Ok(())
})
}
} }
pub async fn user_sign_up_request( pub async fn user_sign_up_request(

View File

@ -67,7 +67,7 @@ impl FolderCloudService for LocalServerFolderCloudServiceImpl {
}) })
} }
fn batch_create_collab_object( fn batch_create_collab_object_f(
&self, &self,
_workspace_id: &str, _workspace_id: &str,
_objects: Vec<FolderCollabParams>, _objects: Vec<FolderCollabParams>,

View File

@ -1,6 +1,6 @@
use std::sync::Arc; use std::sync::Arc;
use anyhow::Error; use anyhow::{anyhow, Error};
use collab::core::collab::CollabDocState; use collab::core::collab::CollabDocState;
use collab_entity::CollabObject; use collab_entity::CollabObject;
use lazy_static::lazy_static; use lazy_static::lazy_static;
@ -8,7 +8,7 @@ use parking_lot::Mutex;
use uuid::Uuid; use uuid::Uuid;
use flowy_error::FlowyError; use flowy_error::FlowyError;
use flowy_user_deps::cloud::UserCloudService; use flowy_user_deps::cloud::{UserCloudService, UserCollabParams};
use flowy_user_deps::entities::*; use flowy_user_deps::entities::*;
use flowy_user_deps::DEFAULT_USER_NAME; use flowy_user_deps::DEFAULT_USER_NAME;
use lib_infra::box_any::BoxAny; use lib_infra::box_any::BoxAny;
@ -149,6 +149,14 @@ impl UserCloudService for LocalServerUserAuthServiceImpl {
) -> FutureResult<(), FlowyError> { ) -> FutureResult<(), FlowyError> {
FutureResult::new(async { Ok(()) }) FutureResult::new(async { Ok(()) })
} }
fn batch_create_collab_object(
&self,
_workspace_id: &str,
_objects: Vec<UserCollabParams>,
) -> FutureResult<(), Error> {
FutureResult::new(async { Err(anyhow!("local server doesn't support create collab object")) })
}
} }
fn make_user_workspace() -> UserWorkspace { fn make_user_workspace() -> UserWorkspace {

View File

@ -154,7 +154,7 @@ where
FutureResult::new(async { rx.await? }) FutureResult::new(async { rx.await? })
} }
fn batch_create_collab_object( fn batch_create_collab_object_f(
&self, &self,
_workspace_id: &str, _workspace_id: &str,
_objects: Vec<FolderCollabParams>, _objects: Vec<FolderCollabParams>,

View File

@ -5,7 +5,7 @@ use std::pin::Pin;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use std::time::Duration; use std::time::Duration;
use anyhow::Error; use anyhow::{anyhow, Error};
use collab::core::collab::{CollabDocState, MutexCollab}; use collab::core::collab::{CollabDocState, MutexCollab};
use collab::core::origin::CollabOrigin; use collab::core::origin::CollabOrigin;
use collab_entity::{CollabObject, CollabType}; use collab_entity::{CollabObject, CollabType};
@ -326,6 +326,18 @@ where
}); });
FutureResult::new(async { rx.await? }) FutureResult::new(async { rx.await? })
} }
fn batch_create_collab_object(
&self,
_workspace_id: &str,
_objects: Vec<UserCollabParams>,
) -> FutureResult<(), Error> {
FutureResult::new(async {
Err(anyhow!(
"supabase server doesn't support batch create collab"
))
})
}
} }
pub struct CreateCollabAction { pub struct CreateCollabAction {

View File

@ -5,13 +5,14 @@ use std::sync::Arc;
use anyhow::Error; use anyhow::Error;
use collab::core::collab::CollabDocState; use collab::core::collab::CollabDocState;
use collab_entity::CollabObject; use collab_entity::{CollabObject, CollabType};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use tokio_stream::wrappers::WatchStream; use tokio_stream::wrappers::WatchStream;
use uuid::Uuid; use uuid::Uuid;
use flowy_error::{ErrorCode, FlowyError}; use flowy_error::{ErrorCode, FlowyError};
use lib_infra::box_any::BoxAny; use lib_infra::box_any::BoxAny;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
@ -216,6 +217,12 @@ pub trait UserCloudService: Send + Sync + 'static {
data: Vec<u8>, data: Vec<u8>,
override_if_exist: bool, override_if_exist: bool,
) -> FutureResult<(), FlowyError>; ) -> FutureResult<(), FlowyError>;
fn batch_create_collab_object(
&self,
workspace_id: &str,
objects: Vec<UserCollabParams>,
) -> FutureResult<(), Error>;
} }
pub type UserUpdateReceiver = tokio::sync::mpsc::Receiver<UserUpdate>; pub type UserUpdateReceiver = tokio::sync::mpsc::Receiver<UserUpdate>;
@ -236,3 +243,10 @@ pub fn uuid_from_map(map: &HashMap<String, String>) -> Result<Uuid, Error> {
let uuid = Uuid::from_str(uuid)?; let uuid = Uuid::from_str(uuid)?;
Ok(uuid) Ok(uuid)
} }
#[derive(Debug)]
pub struct UserCollabParams {
pub object_id: String,
pub encoded_collab_v1: Vec<u8>,
pub collab_type: CollabType,
}

View File

@ -348,6 +348,10 @@ impl Authenticator {
pub fn is_local(&self) -> bool { pub fn is_local(&self) -> bool {
matches!(self, Authenticator::Local) matches!(self, Authenticator::Local)
} }
pub fn is_appflowy_cloud(&self) -> bool {
matches!(self, Authenticator::AppFlowyCloud)
}
} }
impl From<i32> for Authenticator { impl From<i32> for Authenticator {

View File

@ -37,6 +37,9 @@ use crate::migrations::MigrationUser;
use crate::services::cloud_config::get_cloud_config; use crate::services::cloud_config::get_cloud_config;
use crate::services::collab_interact::{CollabInteract, DefaultCollabInteract}; use crate::services::collab_interact::{CollabInteract, DefaultCollabInteract};
use crate::services::data_import::importer::{import_data, ImportDataSource}; use crate::services::data_import::importer::{import_data, ImportDataSource};
use crate::services::data_import::{
get_appflowy_data_folder_import_context, upload_imported_data, ImportContext,
};
use crate::services::db::UserDB; use crate::services::db::UserDB;
use crate::services::entities::{ResumableSignUp, Session, UserConfig, UserPaths}; use crate::services::entities::{ResumableSignUp, Session, UserConfig, UserPaths};
use crate::services::user_awareness::UserAwarenessDataSource; use crate::services::user_awareness::UserAwarenessDataSource;
@ -399,31 +402,43 @@ impl UserManager {
} else { } else {
UserAwarenessDataSource::Remote UserAwarenessDataSource::Remote
}; };
self
.save_auth_data(&response, authenticator, &new_session)
.await?;
if response.is_new_user { if response.is_new_user {
if let Some(old_user) = migration_user { if let Some(old_user) = migration_user {
let new_user = MigrationUser {
user_profile: new_user_profile.clone(),
session: new_session.clone(),
};
event!( event!(
tracing::Level::INFO, tracing::Level::INFO,
"Migrate anon user data from {:?} to {:?}", "Migrate anon user data from {:?} to {:?}",
old_user.user_profile.uid, old_user.user_profile.uid,
new_user.user_profile.uid new_user_profile.uid
); );
self self
.migrate_anon_user_data_to_cloud(&old_user, &new_user, authenticator) .migrate_anon_user_data_to_cloud(
&old_user,
&MigrationUser {
user_profile: new_user_profile.clone(),
session: new_session.clone(),
},
authenticator,
)
.await?; .await?;
// let old_collab_db = self.database.get_collab_db(old_user.session.user_id)?;
// self
// .import_appflowy_data_with_context(ImportContext {
// imported_session: old_user.session.clone(),
// imported_collab_db: old_collab_db,
// container_name: None,
// })
// .await?;
self.remove_anon_user(); self.remove_anon_user();
let _ = self.database.close(old_user.session.user_id); let _ = self.database.close(old_user.session.user_id);
} }
} }
self
.save_auth_data(&response, authenticator, &new_session)
.await?;
self self
.user_status_callback .user_status_callback
.read() .read()
@ -663,12 +678,23 @@ impl UserManager {
} }
} }
pub fn import_data(&self, source: ImportDataSource) -> Result<ImportData, FlowyError> { pub async fn import_data_from_source(
let session = self.get_session()?; &self,
let collab_db = self.database.get_collab_db(session.user_id)?; source: ImportDataSource,
let import_result = import_data(&session, source, collab_db) ) -> Result<ImportData, FlowyError> {
.map_err(|err| FlowyError::new(ErrorCode::AppFlowyDataFolderImportError, err.to_string()))?; match source {
Ok(import_result) ImportDataSource::AppFlowyDataFolder {
path,
container_name,
} => {
let context = get_appflowy_data_folder_import_context(&path)
.map_err(|err| {
FlowyError::new(ErrorCode::AppFlowyDataFolderImportError, err.to_string())
})?
.with_container_name(container_name);
self.import_appflowy_data(context).await
},
}
} }
pub(crate) fn set_session(&self, session: Option<Session>) -> Result<(), FlowyError> { pub(crate) fn set_session(&self, session: Option<Session>) -> Result<(), FlowyError> {
@ -821,6 +847,32 @@ impl UserManager {
)?; )?;
Ok(()) Ok(())
} }
async fn import_appflowy_data(&self, context: ImportContext) -> Result<ImportData, FlowyError> {
let session = self.get_session()?;
let uid = session.user_id;
let user_collab_db = self.database.get_collab_db(session.user_id)?;
let cloned_collab_db = user_collab_db.clone();
let import_data = tokio::task::spawn_blocking(move || {
import_data(&session, context, cloned_collab_db)
.map_err(|err| FlowyError::new(ErrorCode::AppFlowyDataFolderImportError, err.to_string()))
})
.await
.map_err(internal_error)??;
let user = self.get_user_profile_from_disk(uid).await?;
upload_imported_data(
uid,
user_collab_db,
&user.workspace_id,
&user.authenticator,
&import_data,
self.cloud_services.get_user_service()?,
)
.await?;
Ok(import_data)
}
} }
fn current_authenticator() -> Authenticator { fn current_authenticator() -> Authenticator {

View File

@ -14,16 +14,54 @@ use collab_database::database::{
use collab_database::rows::{database_row_document_id_from_row_id, mut_row_with_collab, RowId}; use collab_database::rows::{database_row_document_id_from_row_id, mut_row_with_collab, RowId};
use collab_database::user::DatabaseViewTrackerList; use collab_database::user::DatabaseViewTrackerList;
use collab_document::document_data::default_document_collab_data; use collab_document::document_data::default_document_collab_data;
use collab_entity::CollabType;
use collab_folder::{Folder, UserId, View, ViewIdentifier, ViewLayout}; use collab_folder::{Folder, UserId, View, ViewIdentifier, ViewLayout};
use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction}; use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction};
use flowy_error::{internal_error, FlowyError};
use flowy_folder_deps::cloud::gen_view_id; use flowy_folder_deps::cloud::gen_view_id;
use flowy_folder_deps::entities::ImportData; use flowy_folder_deps::entities::ImportData;
use flowy_folder_deps::folder_builder::{ParentChildViews, ViewBuilder}; use flowy_folder_deps::folder_builder::{ParentChildViews, ViewBuilder};
use flowy_sqlite::kv::StorePreferences; use flowy_sqlite::kv::StorePreferences;
use flowy_user_deps::cloud::{UserCloudService, UserCollabParams};
use flowy_user_deps::entities::Authenticator;
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
use std::sync::Arc; use std::sync::Arc;
use tracing::info;
pub(crate) struct ImportContext {
pub imported_session: Session,
pub imported_collab_db: Arc<RocksCollabDB>,
pub container_name: Option<String>,
}
impl ImportContext {
pub fn with_container_name(mut self, container_name: Option<String>) -> Self {
self.container_name = container_name;
self
}
}
pub(crate) fn get_appflowy_data_folder_import_context(path: &str) -> anyhow::Result<ImportContext> {
let user_paths = UserPaths::new(path.to_string());
let other_store_preferences = Arc::new(StorePreferences::new(path)?);
migrate_session_with_user_uuid("appflowy_session_cache", &other_store_preferences);
let session = other_store_preferences
.get_object::<Session>("appflowy_session_cache")
.ok_or(anyhow!(
"Can't find the session cache in the appflowy data folder at path: {}",
path
))?;
let collab_db_path = user_paths.collab_db_path(session.user_id);
let collab_db = Arc::new(RocksCollabDB::open(collab_db_path)?);
Ok(ImportContext {
imported_session: session,
imported_collab_db: collab_db,
container_name: None,
})
}
/// This path refers to the directory where AppFlowy stores its data. The directory structure is as follows: /// This path refers to the directory where AppFlowy stores its data. The directory structure is as follows:
/// root folder: /// root folder:
@ -33,49 +71,43 @@ use std::sync::Arc;
pub(crate) fn import_appflowy_data_folder( pub(crate) fn import_appflowy_data_folder(
session: &Session, session: &Session,
path: String, workspace_id: &str,
container_name: String,
collab_db: &Arc<RocksCollabDB>, collab_db: &Arc<RocksCollabDB>,
import_context: ImportContext,
) -> anyhow::Result<ImportData> { ) -> anyhow::Result<ImportData> {
let user_paths = UserPaths::new(path.clone()); let imported_session = import_context.imported_session;
let other_store_preferences = Arc::new(StorePreferences::new(&path)?); let imported_collab_db = import_context.imported_collab_db;
migrate_session_with_user_uuid("appflowy_session_cache", &other_store_preferences); let container_name = import_context.container_name;
let other_session = other_store_preferences let imported_collab_read_txn = imported_collab_db.read_txn();
.get_object::<Session>("appflowy_session_cache")
.ok_or(anyhow!(
"Can't find the session cache in the appflowy data folder at path: {}",
path
))?;
let other_collab_db = Arc::new(RocksCollabDB::open(
user_paths.collab_db_path(other_session.user_id),
)?);
let other_collab_read_txn = other_collab_db.read_txn();
let mut database_view_ids_by_database_id: HashMap<String, Vec<String>> = HashMap::new(); let mut database_view_ids_by_database_id: HashMap<String, Vec<String>> = HashMap::new();
let row_object_ids = Mutex::new(HashSet::new()); let row_object_ids = Mutex::new(HashSet::new());
let document_object_ids = Mutex::new(HashSet::new()); let document_object_ids = Mutex::new(HashSet::new());
let database_object_ids = Mutex::new(HashSet::new()); let database_object_ids = Mutex::new(HashSet::new());
let import_container_view_id = gen_view_id().to_string(); let import_container_view_id = match &container_name {
None => workspace_id.to_string(),
Some(_) => gen_view_id().to_string(),
};
let view = collab_db.with_write_txn(|collab_write_txn| { let views = collab_db.with_write_txn(|collab_write_txn| {
// use the old_to_new_id_map to keep track of the other collab object id and the new collab object id // use the old_to_new_id_map to keep track of the other collab object id and the new collab object id
let old_to_new_id_map = Arc::new(Mutex::new(OldToNewIdMap::new())); let old_to_new_id_map = Arc::new(Mutex::new(OldToNewIdMap::new()));
let mut all_object_ids = other_collab_read_txn let mut all_object_ids = imported_collab_read_txn
.get_all_docs() .get_all_docs()
.map(|iter| iter.collect::<Vec<String>>()) .map(|iter| iter.collect::<Vec<String>>())
.unwrap_or_default(); .unwrap_or_default();
// when doing import, we don't want to import the user workspace, database view tracker and the user awareness // when doing import, we don't want to import the user workspace, database view tracker and the user awareness
all_object_ids.retain(|id| id != &other_session.user_workspace.id); all_object_ids.retain(|id| id != &imported_session.user_workspace.id);
all_object_ids.retain(|id| id != &other_session.user_workspace.database_view_tracker_id); all_object_ids.retain(|id| id != &imported_session.user_workspace.database_view_tracker_id);
all_object_ids all_object_ids
.retain(|id| id != &awareness_oid_from_user_uuid(&other_session.user_uuid).to_string()); .retain(|id| id != &awareness_oid_from_user_uuid(&imported_session.user_uuid).to_string());
// import database view tracker // import database view tracker
migrate_database_view_tracker( migrate_database_view_tracker(
&mut old_to_new_id_map.lock(), &mut old_to_new_id_map.lock(),
&other_session, &imported_session,
&other_collab_read_txn, &imported_collab_read_txn,
&mut database_view_ids_by_database_id, &mut database_view_ids_by_database_id,
&database_object_ids, &database_object_ids,
)?; )?;
@ -90,8 +122,8 @@ pub(crate) fn import_appflowy_data_folder(
// load other collab objects // load other collab objects
let collab_by_oid = load_collab_by_oid( let collab_by_oid = load_collab_by_oid(
other_session.user_id, imported_session.user_id,
&other_collab_read_txn, &imported_collab_read_txn,
&all_object_ids, &all_object_ids,
); );
// import the database // import the database
@ -119,10 +151,13 @@ pub(crate) fn import_appflowy_data_folder(
let child_views = import_workspace_views( let child_views = import_workspace_views(
&import_container_view_id, &import_container_view_id,
&mut old_to_new_id_map.lock(), &mut old_to_new_id_map.lock(),
&other_session, &imported_session,
&other_collab_read_txn, &imported_collab_read_txn,
)?; )?;
match container_name {
None => Ok(child_views),
Some(container_name) => {
let name = if container_name.is_empty() { let name = if container_name.is_empty() {
format!( format!(
"import_{}", "import_{}",
@ -150,11 +185,12 @@ pub(crate) fn import_appflowy_data_folder(
.with_name(name) .with_name(name)
.with_child_views(child_views) .with_child_views(child_views)
.build(); .build();
Ok(vec![import_container_view])
Ok(import_container_view) },
}
})?; })?;
Ok(ImportData::AppFlowyDataFolder { Ok(ImportData::AppFlowyDataFolder {
view, views,
database_view_ids_by_database_id, database_view_ids_by_database_id,
row_object_ids: row_object_ids.into_inner().into_iter().collect(), row_object_ids: row_object_ids.into_inner().into_iter().collect(),
database_object_ids: database_object_ids.into_inner().into_iter().collect(), database_object_ids: database_object_ids.into_inner().into_iter().collect(),
@ -482,3 +518,124 @@ impl DerefMut for OldToNewIdMap {
&mut self.0 &mut self.0
} }
} }
pub async fn upload_imported_data(
uid: i64,
user_collab_db: Arc<RocksCollabDB>,
workspace_id: &str,
user_authenticator: &Authenticator,
import_data: &ImportData,
user_cloud_service: Arc<dyn UserCloudService>,
) -> Result<(), FlowyError> {
// Only support uploading the collab data when the current server is AppFlowy Cloud server
if !user_authenticator.is_appflowy_cloud() {
return Ok(());
}
let (row_object_ids, document_object_ids, database_object_ids) = match import_data {
ImportData::AppFlowyDataFolder {
views: _,
database_view_ids_by_database_id: _,
row_object_ids,
document_object_ids,
database_object_ids,
} => (
row_object_ids.clone(),
document_object_ids.clone(),
database_object_ids.clone(),
),
};
let object_by_collab_type = tokio::task::spawn_blocking(move || {
let collab_read = user_collab_db.read_txn();
let mut object_by_collab_type = HashMap::new();
object_by_collab_type.insert(
CollabType::Database,
load_and_process_collab_data(uid, &collab_read, &database_object_ids),
);
object_by_collab_type.insert(
CollabType::Document,
load_and_process_collab_data(uid, &collab_read, &document_object_ids),
);
object_by_collab_type.insert(
CollabType::DatabaseRow,
load_and_process_collab_data(uid, &collab_read, &row_object_ids),
);
object_by_collab_type
})
.await
.map_err(internal_error)?;
// Upload
let mut size_counter = 0;
let mut objects: Vec<UserCollabParams> = vec![];
let upload_size_limit = 2 * 1024 * 1024;
for (collab_type, encoded_v1_by_oid) in object_by_collab_type {
info!(
"Batch import collab:{} ids: {:?}",
collab_type,
encoded_v1_by_oid.keys(),
);
for (oid, encoded_v1) in encoded_v1_by_oid {
let obj_size = encoded_v1.len();
if size_counter + obj_size > upload_size_limit && !objects.is_empty() {
// When the limit is exceeded, batch create with the current list of objects
// and reset for the next batch.
user_cloud_service
.batch_create_collab_object(workspace_id, objects)
.await?;
objects = Vec::new();
size_counter = 0;
}
// Add the current object to the batch.
objects.push(UserCollabParams {
object_id: oid,
encoded_collab_v1: encoded_v1,
collab_type: collab_type.clone(),
});
size_counter += obj_size;
}
}
// After the loop, upload any remaining objects.
if !objects.is_empty() {
info!(
"Batch create collab objects: {}, payload size: {}",
objects
.iter()
.map(|o| o.object_id.clone())
.collect::<Vec<_>>()
.join(", "),
size_counter
);
user_cloud_service
.batch_create_collab_object(workspace_id, objects)
.await?;
}
Ok(())
}
fn load_and_process_collab_data<'a, R>(
uid: i64,
collab_read: &R,
object_ids: &[String],
) -> HashMap<String, Vec<u8>>
where
R: YrsDocAction<'a>,
PersistenceError: From<R::Error>,
{
load_collab_by_oid(uid, collab_read, object_ids)
.into_iter()
.filter_map(|(oid, collab)| {
collab
.encode_collab_v1()
.encode_to_bytes()
.ok()
.map(|encoded_v1| (oid, encoded_v1))
})
.collect()
}

View File

@ -3,6 +3,7 @@ use crate::services::entities::Session;
use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction}; use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction};
use std::collections::HashMap; use std::collections::HashMap;
use crate::services::data_import::ImportContext;
use collab::preclude::Collab; use collab::preclude::Collab;
use flowy_folder_deps::entities::ImportData; use flowy_folder_deps::entities::ImportData;
use std::sync::Arc; use std::sync::Arc;
@ -10,21 +11,19 @@ use std::sync::Arc;
pub enum ImportDataSource { pub enum ImportDataSource {
AppFlowyDataFolder { AppFlowyDataFolder {
path: String, path: String,
container_name: String, container_name: Option<String>,
}, },
} }
/// Import appflowy data from the given path.
/// If the container name is not empty, then the data will be imported to the given container.
/// Otherwise, the data will be imported to the current workspace.
pub(crate) fn import_data( pub(crate) fn import_data(
session: &Session, session: &Session,
source: ImportDataSource, context: ImportContext,
collab_db: Arc<RocksCollabDB>, collab_db: Arc<RocksCollabDB>,
) -> anyhow::Result<ImportData> { ) -> anyhow::Result<ImportData> {
match source { import_appflowy_data_folder(session, &session.user_workspace.id, &collab_db, context)
ImportDataSource::AppFlowyDataFolder {
path,
container_name,
} => import_appflowy_data_folder(session, path, container_name, &collab_db),
}
} }
pub fn load_collab_by_oid<'a, R>( pub fn load_collab_by_oid<'a, R>(

View File

@ -1,5 +1,6 @@
mod appflowy_data_import; mod appflowy_data_import;
pub use appflowy_data_import::*;
pub(crate) mod importer; pub(crate) mod importer;
pub use importer::load_collab_by_oid; pub use importer::load_collab_by_oid;
pub use importer::ImportDataSource; pub use importer::ImportDataSource;