mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
chore: support import appflowy data into current workspace (#4254)
* chore: support import appflowy data into current workspace * refactor: code * chore: unused ref * chore: update url
This commit is contained in:
parent
8ccd1ec72c
commit
36cf653d64
@ -3,6 +3,7 @@ import 'package:appflowy_backend/log.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-folder2/import.pb.dart';
|
||||
import 'package:dartz/dartz.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter_bloc/flutter_bloc.dart';
|
||||
import 'package:freezed_annotation/freezed_annotation.dart';
|
||||
|
||||
@ -14,7 +15,11 @@ class SettingFileImporterBloc
|
||||
on<SettingFileImportEvent>((event, emit) async {
|
||||
await event.when(
|
||||
importAppFlowyDataFolder: (String path) async {
|
||||
final payload = ImportAppFlowyDataPB.create()..path = path;
|
||||
final formattedDate =
|
||||
DateFormat('yyyy-MM-dd HH:mm:ss').format(DateTime.now());
|
||||
final payload = ImportAppFlowyDataPB.create()
|
||||
..path = path
|
||||
..importContainerName = "appflowy_import_$formattedDate";
|
||||
final result =
|
||||
await FolderEventImportAppFlowyDataFolder(payload).send();
|
||||
result.fold(
|
||||
|
@ -2,12 +2,15 @@ import 'package:appflowy/generated/locale_keys.g.dart';
|
||||
import 'package:appflowy/startup/startup.dart';
|
||||
import 'package:appflowy/workspace/application/settings/setting_file_importer_bloc.dart';
|
||||
import 'package:appflowy/workspace/presentation/home/toast.dart';
|
||||
import 'package:appflowy_backend/log.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flowy_infra/file_picker/file_picker_service.dart';
|
||||
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
|
||||
import 'package:flutter/gestures.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_bloc/flutter_bloc.dart';
|
||||
import 'package:fluttertoast/fluttertoast.dart';
|
||||
import 'package:url_launcher/url_launcher.dart';
|
||||
|
||||
class ImportAppFlowyData extends StatefulWidget {
|
||||
const ImportAppFlowyData({super.key});
|
||||
@ -46,20 +49,11 @@ class _ImportAppFlowyDataState extends State<ImportAppFlowyData> {
|
||||
},
|
||||
child: BlocBuilder<SettingFileImporterBloc, SettingFileImportState>(
|
||||
builder: (context, state) {
|
||||
return Column(
|
||||
return const Column(
|
||||
children: [
|
||||
const ImportAppFlowyDataButton(),
|
||||
const VSpace(6),
|
||||
IntrinsicHeight(
|
||||
child: Opacity(
|
||||
opacity: 0.6,
|
||||
child: FlowyText.medium(
|
||||
LocaleKeys.settings_menu_importAppFlowyDataDescription
|
||||
.tr(),
|
||||
maxLines: 13,
|
||||
),
|
||||
),
|
||||
),
|
||||
ImportAppFlowyDataButton(),
|
||||
VSpace(6),
|
||||
AppFlowyDataImportTip(),
|
||||
],
|
||||
);
|
||||
},
|
||||
@ -76,6 +70,45 @@ class _ImportAppFlowyDataState extends State<ImportAppFlowyData> {
|
||||
}
|
||||
}
|
||||
|
||||
class AppFlowyDataImportTip extends StatelessWidget {
|
||||
final url = "https://docs.appflowy.io/docs/appflowy/product/data-storage";
|
||||
const AppFlowyDataImportTip({super.key});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Opacity(
|
||||
opacity: 0.6,
|
||||
child: RichText(
|
||||
text: TextSpan(
|
||||
children: <TextSpan>[
|
||||
TextSpan(
|
||||
text: LocaleKeys.settings_menu_importAppFlowyDataDescription.tr(),
|
||||
style: Theme.of(context).textTheme.bodySmall!,
|
||||
),
|
||||
TextSpan(
|
||||
text: " ${LocaleKeys.settings_menu_importGuide.tr()} ",
|
||||
style: Theme.of(context).textTheme.bodyMedium!.copyWith(
|
||||
color: Theme.of(context).colorScheme.primary,
|
||||
decoration: TextDecoration.underline,
|
||||
),
|
||||
recognizer: TapGestureRecognizer()..onTap = () => _launchURL(),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> _launchURL() async {
|
||||
final uri = Uri.parse(url);
|
||||
if (await canLaunchUrl(uri)) {
|
||||
await launchUrl(uri);
|
||||
} else {
|
||||
Log.error("Could not launch $url");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ImportAppFlowyDataButton extends StatefulWidget {
|
||||
const ImportAppFlowyDataButton({super.key});
|
||||
|
||||
|
@ -303,7 +303,8 @@
|
||||
"importAppFlowyData": "Import Data from External AppFlowy Folder",
|
||||
"importAppFlowyDataDescription": "Copy data from an external AppFlowy data folder and import it into the current AppFlowy data folder",
|
||||
"importSuccess": "Successfully imported the AppFlowy data folder",
|
||||
"importFailed": "Importing the AppFlowy data folder failed"
|
||||
"importFailed": "Importing the AppFlowy data folder failed",
|
||||
"importGuide": "For further details, please check the referenced document"
|
||||
},
|
||||
"notifications": {
|
||||
"enableNotifications": {
|
||||
|
@ -190,10 +190,14 @@ impl EventIntegrationTest {
|
||||
Ok(user_profile)
|
||||
}
|
||||
|
||||
pub async fn import_appflowy_data(&self, path: String, name: &str) -> Result<(), FlowyError> {
|
||||
pub async fn import_appflowy_data(
|
||||
&self,
|
||||
path: String,
|
||||
name: Option<String>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let payload = ImportAppFlowyDataPB {
|
||||
path,
|
||||
import_container_name: name.to_string(),
|
||||
import_container_name: name,
|
||||
};
|
||||
match EventBuilder::new(self.clone())
|
||||
.event(FolderEvent::ImportAppFlowyDataFolder)
|
||||
|
@ -78,6 +78,8 @@ async fn migrate_anon_user_data_to_af_cloud_test() {
|
||||
assert_eq!(user.authenticator, AuthenticatorPB::AppFlowyCloud);
|
||||
|
||||
let user_first_level_views = test.get_all_workspace_views().await;
|
||||
// assert_eq!(user_first_level_views.len(), 2);
|
||||
|
||||
println!("user first level views: {:?}", user_first_level_views);
|
||||
let user_second_level_views = test
|
||||
.get_views(&user_first_level_views[0].id)
|
||||
|
@ -9,7 +9,7 @@ use serde_json::{json, Value};
|
||||
use std::env::temp_dir;
|
||||
|
||||
#[tokio::test]
|
||||
async fn import_appflowy_data_folder_test() {
|
||||
async fn import_appflowy_data_folder_into_new_view_test() {
|
||||
let import_container_name = "040_local".to_string();
|
||||
let (cleaner, user_db_path) =
|
||||
unzip_history_user_db("./tests/asset", &import_container_name).unwrap();
|
||||
@ -29,7 +29,7 @@ async fn import_appflowy_data_folder_test() {
|
||||
test
|
||||
.import_appflowy_data(
|
||||
user_db_path.to_str().unwrap().to_string(),
|
||||
&import_container_name,
|
||||
Some(import_container_name.clone()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
@ -65,7 +65,55 @@ async fn import_appflowy_data_folder_test() {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn import_appflowy_data_folder_test2() {
|
||||
async fn import_appflowy_data_folder_into_current_workspace_test() {
|
||||
let import_container_name = "040_local".to_string();
|
||||
let (cleaner, user_db_path) =
|
||||
unzip_history_user_db("./tests/asset", &import_container_name).unwrap();
|
||||
// In the 040_local, the structure is:
|
||||
// workspace:
|
||||
// view: Document1
|
||||
// view: Document2
|
||||
// view: Grid1
|
||||
// view: Grid2
|
||||
user_localhost_af_cloud().await;
|
||||
let test = EventIntegrationTest::new_with_name(DEFAULT_NAME).await;
|
||||
let _ = test.af_cloud_sign_up().await;
|
||||
// after sign up, the initial workspace is created, so the structure is:
|
||||
// workspace:
|
||||
// view: Getting Started
|
||||
|
||||
test
|
||||
.import_appflowy_data(user_db_path.to_str().unwrap().to_string(), None)
|
||||
.await
|
||||
.unwrap();
|
||||
// after import, the structure is:
|
||||
// workspace:
|
||||
// view: Getting Started
|
||||
// view: Document1
|
||||
// view: Document2
|
||||
// view: Grid1
|
||||
// view: Grid2
|
||||
let views = test.get_all_workspace_views().await;
|
||||
assert_eq!(views.len(), 2);
|
||||
assert_eq!(views[1].name, "Document1");
|
||||
|
||||
let document_1_child_views = test.get_views(&views[1].id).await.child_views;
|
||||
assert_eq!(document_1_child_views.len(), 1);
|
||||
assert_eq!(document_1_child_views[0].name, "Document2");
|
||||
|
||||
let document2_child_views = test
|
||||
.get_views(&document_1_child_views[0].id)
|
||||
.await
|
||||
.child_views;
|
||||
assert_eq!(document2_child_views.len(), 2);
|
||||
assert_eq!(document2_child_views[0].name, "Grid1");
|
||||
assert_eq!(document2_child_views[1].name, "Grid2");
|
||||
|
||||
drop(cleaner);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn import_appflowy_data_folder_into_new_view_test2() {
|
||||
let import_container_name = "040_local_2".to_string();
|
||||
let (cleaner, user_db_path) =
|
||||
unzip_history_user_db("./tests/asset", &import_container_name).unwrap();
|
||||
@ -75,7 +123,7 @@ async fn import_appflowy_data_folder_test2() {
|
||||
test
|
||||
.import_appflowy_data(
|
||||
user_db_path.to_str().unwrap().to_string(),
|
||||
&import_container_name,
|
||||
Some(import_container_name.clone()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
@ -95,7 +143,10 @@ async fn import_empty_appflowy_data_folder_test() {
|
||||
let test = EventIntegrationTest::new_with_name(DEFAULT_NAME).await;
|
||||
let _ = test.af_cloud_sign_up().await;
|
||||
let error = test
|
||||
.import_appflowy_data(path.to_str().unwrap().to_string(), "empty_folder")
|
||||
.import_appflowy_data(
|
||||
path.to_str().unwrap().to_string(),
|
||||
Some("empty_folder".to_string()),
|
||||
)
|
||||
.await
|
||||
.unwrap_err();
|
||||
assert_eq!(error.code, ErrorCode::AppFlowyDataFolderImportError);
|
||||
@ -121,7 +172,7 @@ async fn import_appflowy_data_folder_multiple_times_test() {
|
||||
test
|
||||
.import_appflowy_data(
|
||||
user_db_path.to_str().unwrap().to_string(),
|
||||
&import_container_name,
|
||||
Some(import_container_name.clone()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
@ -137,7 +188,7 @@ async fn import_appflowy_data_folder_multiple_times_test() {
|
||||
test
|
||||
.import_appflowy_data(
|
||||
user_db_path.to_str().unwrap().to_string(),
|
||||
&import_container_name,
|
||||
Some(import_container_name.clone()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -3,12 +3,11 @@ use std::convert::TryFrom;
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use bytes::Bytes;
|
||||
use collab_entity::CollabType;
|
||||
|
||||
use tokio::sync::RwLock;
|
||||
use tracing::info;
|
||||
|
||||
use collab_integrate::collab_builder::AppFlowyCollabBuilder;
|
||||
use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction};
|
||||
use collab_integrate::RocksCollabDB;
|
||||
use flowy_database2::entities::DatabaseLayoutPB;
|
||||
use flowy_database2::services::share::csv::CSVFormat;
|
||||
use flowy_database2::template::{make_default_board, make_default_calendar, make_default_grid};
|
||||
@ -16,17 +15,17 @@ use flowy_database2::DatabaseManager;
|
||||
use flowy_document2::entities::DocumentDataPB;
|
||||
use flowy_document2::manager::DocumentManager;
|
||||
use flowy_document2::parser::json::parser::JsonToDocumentParser;
|
||||
use flowy_error::{internal_error, ErrorCode, FlowyError};
|
||||
use flowy_error::FlowyError;
|
||||
use flowy_folder2::entities::ViewLayoutPB;
|
||||
use flowy_folder2::manager::{FolderManager, FolderUser};
|
||||
use flowy_folder2::share::ImportType;
|
||||
use flowy_folder2::view_operation::{FolderOperationHandler, FolderOperationHandlers, View};
|
||||
use flowy_folder2::ViewLayout;
|
||||
use flowy_folder_deps::cloud::{FolderCloudService, FolderCollabParams};
|
||||
|
||||
use flowy_folder_deps::entities::ImportData;
|
||||
use flowy_folder_deps::folder_builder::{ParentChildViews, WorkspaceViewBuilder};
|
||||
use flowy_user::manager::UserManager;
|
||||
use flowy_user::services::data_import::{load_collab_by_oid, ImportDataSource};
|
||||
use flowy_user::services::data_import::ImportDataSource;
|
||||
|
||||
use crate::integrate::server::ServerProvider;
|
||||
use lib_dispatch::prelude::ToBytes;
|
||||
@ -45,7 +44,6 @@ impl FolderDepsResolver {
|
||||
let user: Arc<dyn FolderUser> = Arc::new(FolderUserImpl {
|
||||
user_manager: user_manager.clone(),
|
||||
database_manager: Arc::downgrade(database_manager),
|
||||
server_provider: server_provider.clone(),
|
||||
});
|
||||
|
||||
let handlers = folder_operation_handlers(document_manager.clone(), database_manager.clone());
|
||||
@ -81,7 +79,6 @@ fn folder_operation_handlers(
|
||||
struct FolderUserImpl {
|
||||
user_manager: Weak<UserManager>,
|
||||
database_manager: Weak<DatabaseManager>,
|
||||
server_provider: Arc<ServerProvider>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@ -112,46 +109,29 @@ impl FolderUser for FolderUserImpl {
|
||||
|
||||
async fn import_appflowy_data_folder(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
path: &str,
|
||||
container_name: &str,
|
||||
) -> Result<ParentChildViews, FlowyError> {
|
||||
container_name: Option<String>,
|
||||
) -> Result<Vec<ParentChildViews>, FlowyError> {
|
||||
match (self.user_manager.upgrade(), self.database_manager.upgrade()) {
|
||||
(Some(user_manager), Some(data_manager)) => {
|
||||
let source = ImportDataSource::AppFlowyDataFolder {
|
||||
path: path.to_string(),
|
||||
container_name: container_name.to_string(),
|
||||
container_name,
|
||||
};
|
||||
let cloned_user_manager = user_manager.clone();
|
||||
let import_data =
|
||||
tokio::task::spawn_blocking(move || cloned_user_manager.import_data(source))
|
||||
.await
|
||||
.map_err(internal_error)??;
|
||||
|
||||
let import_data = user_manager.import_data_from_source(source).await?;
|
||||
match import_data {
|
||||
ImportData::AppFlowyDataFolder {
|
||||
view,
|
||||
views,
|
||||
database_view_ids_by_database_id,
|
||||
row_object_ids,
|
||||
database_object_ids,
|
||||
document_object_ids,
|
||||
row_object_ids: _,
|
||||
database_object_ids: _,
|
||||
document_object_ids: _,
|
||||
} => {
|
||||
let uid = self.user_id()?;
|
||||
self
|
||||
.upload_collab_data(
|
||||
workspace_id,
|
||||
row_object_ids,
|
||||
database_object_ids,
|
||||
document_object_ids,
|
||||
uid,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _uid = self.user_id()?;
|
||||
data_manager
|
||||
.track_database(database_view_ids_by_database_id)
|
||||
.await?;
|
||||
|
||||
Ok(view)
|
||||
Ok(views)
|
||||
},
|
||||
}
|
||||
},
|
||||
@ -160,106 +140,6 @@ impl FolderUser for FolderUserImpl {
|
||||
}
|
||||
}
|
||||
|
||||
impl FolderUserImpl {
|
||||
async fn upload_collab_data(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
row_object_ids: Vec<String>,
|
||||
database_object_ids: Vec<String>,
|
||||
document_object_ids: Vec<String>,
|
||||
uid: i64,
|
||||
) -> Result<(), FlowyError> {
|
||||
// Only support uploading the collab data when the current server is AppFlowy Cloud server
|
||||
if self.server_provider.get_appflowy_cloud_server().is_err() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let collab_db = self
|
||||
.collab_db(uid)
|
||||
.unwrap()
|
||||
.upgrade()
|
||||
.ok_or(FlowyError::new(
|
||||
ErrorCode::Internal,
|
||||
"Can't get the collab db",
|
||||
))?;
|
||||
|
||||
let object_by_collab_type = tokio::task::spawn_blocking(move || {
|
||||
let collab_read = collab_db.read_txn();
|
||||
let mut object_by_collab_type = HashMap::new();
|
||||
object_by_collab_type.insert(
|
||||
CollabType::Database,
|
||||
load_and_process_collab_data(uid, &collab_read, &database_object_ids),
|
||||
);
|
||||
|
||||
object_by_collab_type.insert(
|
||||
CollabType::Document,
|
||||
load_and_process_collab_data(uid, &collab_read, &document_object_ids),
|
||||
);
|
||||
|
||||
object_by_collab_type.insert(
|
||||
CollabType::DatabaseRow,
|
||||
load_and_process_collab_data(uid, &collab_read, &row_object_ids),
|
||||
);
|
||||
|
||||
object_by_collab_type
|
||||
})
|
||||
.await
|
||||
.map_err(internal_error)?;
|
||||
|
||||
// Upload
|
||||
let mut size_counter = 0;
|
||||
let mut objects: Vec<FolderCollabParams> = vec![];
|
||||
let upload_size_limit = 2 * 1024 * 1024;
|
||||
for (collab_type, encoded_v1_by_oid) in object_by_collab_type {
|
||||
info!(
|
||||
"Batch import collab:{} ids: {:?}",
|
||||
collab_type,
|
||||
encoded_v1_by_oid.keys(),
|
||||
);
|
||||
for (oid, encoded_v1) in encoded_v1_by_oid {
|
||||
let obj_size = encoded_v1.len();
|
||||
if size_counter + obj_size > upload_size_limit && !objects.is_empty() {
|
||||
// When the limit is exceeded, batch create with the current list of objects
|
||||
// and reset for the next batch.
|
||||
self
|
||||
.server_provider
|
||||
.batch_create_collab_object(workspace_id, objects)
|
||||
.await?;
|
||||
objects = Vec::new();
|
||||
size_counter = 0;
|
||||
}
|
||||
|
||||
// Add the current object to the batch.
|
||||
objects.push(FolderCollabParams {
|
||||
object_id: oid,
|
||||
encoded_collab_v1: encoded_v1,
|
||||
collab_type: collab_type.clone(),
|
||||
override_if_exist: false,
|
||||
});
|
||||
size_counter += obj_size;
|
||||
}
|
||||
}
|
||||
|
||||
// After the loop, upload any remaining objects.
|
||||
if !objects.is_empty() {
|
||||
info!(
|
||||
"Batch create collab objects: {}, payload size: {}",
|
||||
objects
|
||||
.iter()
|
||||
.map(|o| o.object_id.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
size_counter
|
||||
);
|
||||
self
|
||||
.server_provider
|
||||
.batch_create_collab_object(workspace_id, objects)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct DocumentFolderOperation(Arc<DocumentManager>);
|
||||
impl FolderOperationHandler for DocumentFolderOperation {
|
||||
fn create_workspace_view(
|
||||
@ -581,24 +461,3 @@ pub fn layout_type_from_view_layout(layout: ViewLayoutPB) -> DatabaseLayoutPB {
|
||||
ViewLayoutPB::Document => DatabaseLayoutPB::Grid,
|
||||
}
|
||||
}
|
||||
|
||||
fn load_and_process_collab_data<'a, R>(
|
||||
uid: i64,
|
||||
collab_read: &R,
|
||||
object_ids: &[String],
|
||||
) -> HashMap<String, Vec<u8>>
|
||||
where
|
||||
R: YrsDocAction<'a>,
|
||||
PersistenceError: From<R::Error>,
|
||||
{
|
||||
load_collab_by_oid(uid, collab_read, object_ids)
|
||||
.into_iter()
|
||||
.filter_map(|(oid, collab)| {
|
||||
collab
|
||||
.encode_collab_v1()
|
||||
.encode_to_bytes()
|
||||
.ok()
|
||||
.map(|encoded_v1| (oid, encoded_v1))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -93,6 +93,10 @@ impl ServerProvider {
|
||||
*self.server.write() = server_type;
|
||||
}
|
||||
|
||||
pub fn get_user_authenticator(&self) -> Authenticator {
|
||||
self.user_authenticator.read().clone()
|
||||
}
|
||||
|
||||
pub fn get_appflowy_cloud_server(&self) -> FlowyResult<Arc<dyn AppFlowyServer>> {
|
||||
let server = self.get_server(&Server::AppFlowyCloud)?;
|
||||
Ok(server)
|
||||
|
@ -196,7 +196,7 @@ impl FolderCloudService for ServerProvider {
|
||||
})
|
||||
}
|
||||
|
||||
fn batch_create_collab_object(
|
||||
fn batch_create_collab_object_f(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
objects: Vec<FolderCollabParams>,
|
||||
@ -206,7 +206,7 @@ impl FolderCloudService for ServerProvider {
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.batch_create_collab_object(&workspace_id, objects)
|
||||
.batch_create_collab_object_f(&workspace_id, objects)
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
@ -39,7 +39,8 @@ pub trait FolderCloudService: Send + Sync + 'static {
|
||||
object_id: &str,
|
||||
) -> FutureResult<CollabDocState, Error>;
|
||||
|
||||
fn batch_create_collab_object(
|
||||
/// The suffix 'f' in the method name serves as a workaround to avoid naming conflicts with the existing method `get_collab_doc_state`.
|
||||
fn batch_create_collab_object_f(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
objects: Vec<FolderCollabParams>,
|
||||
|
@ -3,7 +3,7 @@ use std::collections::HashMap;
|
||||
|
||||
pub enum ImportData {
|
||||
AppFlowyDataFolder {
|
||||
view: ParentChildViews,
|
||||
views: Vec<ParentChildViews>,
|
||||
/// Used to update the [DatabaseViewTrackerList] when importing the database.
|
||||
database_view_ids_by_database_id: HashMap<String, Vec<String>>,
|
||||
row_object_ids: Vec<String>,
|
||||
@ -11,3 +11,9 @@ pub enum ImportData {
|
||||
database_object_ids: Vec<String>,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct ImportViews {
|
||||
pub views: Vec<ParentChildViews>,
|
||||
/// Used to update the [DatabaseViewTrackerList] when importing the database.
|
||||
pub database_view_ids_by_database_id: HashMap<String, Vec<String>>,
|
||||
}
|
||||
|
@ -91,6 +91,6 @@ pub struct ImportAppFlowyDataPB {
|
||||
#[validate(custom = "lib_infra::validator_fn::required_not_empty_str")]
|
||||
pub path: String,
|
||||
|
||||
#[pb(index = 2)]
|
||||
pub import_container_name: String,
|
||||
#[pb(index = 2, one_of)]
|
||||
pub import_container_name: Option<String>,
|
||||
}
|
||||
|
@ -42,12 +42,14 @@ pub trait FolderUser: Send + Sync {
|
||||
fn token(&self) -> Result<Option<String>, FlowyError>;
|
||||
fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError>;
|
||||
|
||||
/// Import appflowy data from the given path.
|
||||
/// If the container name is not empty, then the data will be imported to the given container.
|
||||
/// Otherwise, the data will be imported to the current workspace.
|
||||
async fn import_appflowy_data_folder(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
path: &str,
|
||||
container_name: &str,
|
||||
) -> Result<ParentChildViews, FlowyError>;
|
||||
container_name: Option<String>,
|
||||
) -> Result<Vec<ParentChildViews>, FlowyError>;
|
||||
}
|
||||
|
||||
pub struct FolderManager {
|
||||
@ -832,20 +834,22 @@ impl FolderManager {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn import_appflowy_data(&self, path: String, name: String) -> Result<(), FlowyError> {
|
||||
pub async fn import_appflowy_data(
|
||||
&self,
|
||||
path: String,
|
||||
name: Option<String>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let (tx, rx) = tokio::sync::oneshot::channel();
|
||||
let workspace_id = self.get_current_workspace_id().await?;
|
||||
let folder = self.mutex_folder.clone();
|
||||
let user = self.user.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
match user
|
||||
.import_appflowy_data_folder(&workspace_id, &path, &name)
|
||||
.await
|
||||
{
|
||||
Ok(view) => {
|
||||
match user.import_appflowy_data_folder(&path, name).await {
|
||||
Ok(views) => {
|
||||
if let Some(folder) = &*folder.lock() {
|
||||
insert_parent_child_views(folder, view);
|
||||
for view in views {
|
||||
insert_parent_child_views(folder, view);
|
||||
}
|
||||
}
|
||||
let _ = tx.send(Ok(()));
|
||||
},
|
||||
@ -856,7 +860,6 @@ impl FolderManager {
|
||||
});
|
||||
|
||||
rx.await.map_err(internal_error)??;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn batch_create_collab_object(
|
||||
fn batch_create_collab_object_f(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
objects: Vec<FolderCollabParams>,
|
||||
|
@ -9,7 +9,7 @@ use collab_entity::CollabObject;
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use flowy_error::{ErrorCode, FlowyError};
|
||||
use flowy_user_deps::cloud::{UserCloudService, UserUpdate, UserUpdateReceiver};
|
||||
use flowy_user_deps::cloud::{UserCloudService, UserCollabParams, UserUpdate, UserUpdateReceiver};
|
||||
use flowy_user_deps::entities::*;
|
||||
use lib_infra::box_any::BoxAny;
|
||||
use lib_infra::future::FutureResult;
|
||||
@ -251,6 +251,31 @@ where
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn batch_create_collab_object(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
objects: Vec<UserCollabParams>,
|
||||
) -> FutureResult<(), Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let try_get_client = self.server.try_get_client();
|
||||
FutureResult::new(async move {
|
||||
let params = objects
|
||||
.into_iter()
|
||||
.map(|object| CollabParams {
|
||||
object_id: object.object_id,
|
||||
encoded_collab_v1: object.encoded_collab_v1,
|
||||
collab_type: object.collab_type,
|
||||
override_if_exist: false,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
try_get_client?
|
||||
.batch_create_collab(&workspace_id, params)
|
||||
.await
|
||||
.map_err(FlowyError::from)?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn user_sign_up_request(
|
||||
|
@ -67,7 +67,7 @@ impl FolderCloudService for LocalServerFolderCloudServiceImpl {
|
||||
})
|
||||
}
|
||||
|
||||
fn batch_create_collab_object(
|
||||
fn batch_create_collab_object_f(
|
||||
&self,
|
||||
_workspace_id: &str,
|
||||
_objects: Vec<FolderCollabParams>,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Error;
|
||||
use anyhow::{anyhow, Error};
|
||||
use collab::core::collab::CollabDocState;
|
||||
use collab_entity::CollabObject;
|
||||
use lazy_static::lazy_static;
|
||||
@ -8,7 +8,7 @@ use parking_lot::Mutex;
|
||||
use uuid::Uuid;
|
||||
|
||||
use flowy_error::FlowyError;
|
||||
use flowy_user_deps::cloud::UserCloudService;
|
||||
use flowy_user_deps::cloud::{UserCloudService, UserCollabParams};
|
||||
use flowy_user_deps::entities::*;
|
||||
use flowy_user_deps::DEFAULT_USER_NAME;
|
||||
use lib_infra::box_any::BoxAny;
|
||||
@ -149,6 +149,14 @@ impl UserCloudService for LocalServerUserAuthServiceImpl {
|
||||
) -> FutureResult<(), FlowyError> {
|
||||
FutureResult::new(async { Ok(()) })
|
||||
}
|
||||
|
||||
fn batch_create_collab_object(
|
||||
&self,
|
||||
_workspace_id: &str,
|
||||
_objects: Vec<UserCollabParams>,
|
||||
) -> FutureResult<(), Error> {
|
||||
FutureResult::new(async { Err(anyhow!("local server doesn't support create collab object")) })
|
||||
}
|
||||
}
|
||||
|
||||
fn make_user_workspace() -> UserWorkspace {
|
||||
|
@ -154,7 +154,7 @@ where
|
||||
FutureResult::new(async { rx.await? })
|
||||
}
|
||||
|
||||
fn batch_create_collab_object(
|
||||
fn batch_create_collab_object_f(
|
||||
&self,
|
||||
_workspace_id: &str,
|
||||
_objects: Vec<FolderCollabParams>,
|
||||
|
@ -5,7 +5,7 @@ use std::pin::Pin;
|
||||
use std::sync::{Arc, Weak};
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Error;
|
||||
use anyhow::{anyhow, Error};
|
||||
use collab::core::collab::{CollabDocState, MutexCollab};
|
||||
use collab::core::origin::CollabOrigin;
|
||||
use collab_entity::{CollabObject, CollabType};
|
||||
@ -326,6 +326,18 @@ where
|
||||
});
|
||||
FutureResult::new(async { rx.await? })
|
||||
}
|
||||
|
||||
fn batch_create_collab_object(
|
||||
&self,
|
||||
_workspace_id: &str,
|
||||
_objects: Vec<UserCollabParams>,
|
||||
) -> FutureResult<(), Error> {
|
||||
FutureResult::new(async {
|
||||
Err(anyhow!(
|
||||
"supabase server doesn't support batch create collab"
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CreateCollabAction {
|
||||
|
@ -5,13 +5,14 @@ use std::sync::Arc;
|
||||
|
||||
use anyhow::Error;
|
||||
use collab::core::collab::CollabDocState;
|
||||
use collab_entity::CollabObject;
|
||||
use collab_entity::{CollabObject, CollabType};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use tokio_stream::wrappers::WatchStream;
|
||||
use uuid::Uuid;
|
||||
|
||||
use flowy_error::{ErrorCode, FlowyError};
|
||||
|
||||
use lib_infra::box_any::BoxAny;
|
||||
use lib_infra::future::FutureResult;
|
||||
|
||||
@ -216,6 +217,12 @@ pub trait UserCloudService: Send + Sync + 'static {
|
||||
data: Vec<u8>,
|
||||
override_if_exist: bool,
|
||||
) -> FutureResult<(), FlowyError>;
|
||||
|
||||
fn batch_create_collab_object(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
objects: Vec<UserCollabParams>,
|
||||
) -> FutureResult<(), Error>;
|
||||
}
|
||||
|
||||
pub type UserUpdateReceiver = tokio::sync::mpsc::Receiver<UserUpdate>;
|
||||
@ -236,3 +243,10 @@ pub fn uuid_from_map(map: &HashMap<String, String>) -> Result<Uuid, Error> {
|
||||
let uuid = Uuid::from_str(uuid)?;
|
||||
Ok(uuid)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UserCollabParams {
|
||||
pub object_id: String,
|
||||
pub encoded_collab_v1: Vec<u8>,
|
||||
pub collab_type: CollabType,
|
||||
}
|
||||
|
@ -348,6 +348,10 @@ impl Authenticator {
|
||||
pub fn is_local(&self) -> bool {
|
||||
matches!(self, Authenticator::Local)
|
||||
}
|
||||
|
||||
pub fn is_appflowy_cloud(&self) -> bool {
|
||||
matches!(self, Authenticator::AppFlowyCloud)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i32> for Authenticator {
|
||||
|
@ -37,6 +37,9 @@ use crate::migrations::MigrationUser;
|
||||
use crate::services::cloud_config::get_cloud_config;
|
||||
use crate::services::collab_interact::{CollabInteract, DefaultCollabInteract};
|
||||
use crate::services::data_import::importer::{import_data, ImportDataSource};
|
||||
use crate::services::data_import::{
|
||||
get_appflowy_data_folder_import_context, upload_imported_data, ImportContext,
|
||||
};
|
||||
use crate::services::db::UserDB;
|
||||
use crate::services::entities::{ResumableSignUp, Session, UserConfig, UserPaths};
|
||||
use crate::services::user_awareness::UserAwarenessDataSource;
|
||||
@ -399,31 +402,43 @@ impl UserManager {
|
||||
} else {
|
||||
UserAwarenessDataSource::Remote
|
||||
};
|
||||
self
|
||||
.save_auth_data(&response, authenticator, &new_session)
|
||||
.await?;
|
||||
|
||||
if response.is_new_user {
|
||||
if let Some(old_user) = migration_user {
|
||||
let new_user = MigrationUser {
|
||||
user_profile: new_user_profile.clone(),
|
||||
session: new_session.clone(),
|
||||
};
|
||||
event!(
|
||||
tracing::Level::INFO,
|
||||
"Migrate anon user data from {:?} to {:?}",
|
||||
old_user.user_profile.uid,
|
||||
new_user.user_profile.uid
|
||||
new_user_profile.uid
|
||||
);
|
||||
self
|
||||
.migrate_anon_user_data_to_cloud(&old_user, &new_user, authenticator)
|
||||
.migrate_anon_user_data_to_cloud(
|
||||
&old_user,
|
||||
&MigrationUser {
|
||||
user_profile: new_user_profile.clone(),
|
||||
session: new_session.clone(),
|
||||
},
|
||||
authenticator,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// let old_collab_db = self.database.get_collab_db(old_user.session.user_id)?;
|
||||
// self
|
||||
// .import_appflowy_data_with_context(ImportContext {
|
||||
// imported_session: old_user.session.clone(),
|
||||
// imported_collab_db: old_collab_db,
|
||||
// container_name: None,
|
||||
// })
|
||||
// .await?;
|
||||
|
||||
self.remove_anon_user();
|
||||
let _ = self.database.close(old_user.session.user_id);
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
.save_auth_data(&response, authenticator, &new_session)
|
||||
.await?;
|
||||
|
||||
self
|
||||
.user_status_callback
|
||||
.read()
|
||||
@ -663,12 +678,23 @@ impl UserManager {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn import_data(&self, source: ImportDataSource) -> Result<ImportData, FlowyError> {
|
||||
let session = self.get_session()?;
|
||||
let collab_db = self.database.get_collab_db(session.user_id)?;
|
||||
let import_result = import_data(&session, source, collab_db)
|
||||
.map_err(|err| FlowyError::new(ErrorCode::AppFlowyDataFolderImportError, err.to_string()))?;
|
||||
Ok(import_result)
|
||||
pub async fn import_data_from_source(
|
||||
&self,
|
||||
source: ImportDataSource,
|
||||
) -> Result<ImportData, FlowyError> {
|
||||
match source {
|
||||
ImportDataSource::AppFlowyDataFolder {
|
||||
path,
|
||||
container_name,
|
||||
} => {
|
||||
let context = get_appflowy_data_folder_import_context(&path)
|
||||
.map_err(|err| {
|
||||
FlowyError::new(ErrorCode::AppFlowyDataFolderImportError, err.to_string())
|
||||
})?
|
||||
.with_container_name(container_name);
|
||||
self.import_appflowy_data(context).await
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn set_session(&self, session: Option<Session>) -> Result<(), FlowyError> {
|
||||
@ -821,6 +847,32 @@ impl UserManager {
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn import_appflowy_data(&self, context: ImportContext) -> Result<ImportData, FlowyError> {
|
||||
let session = self.get_session()?;
|
||||
let uid = session.user_id;
|
||||
let user_collab_db = self.database.get_collab_db(session.user_id)?;
|
||||
let cloned_collab_db = user_collab_db.clone();
|
||||
let import_data = tokio::task::spawn_blocking(move || {
|
||||
import_data(&session, context, cloned_collab_db)
|
||||
.map_err(|err| FlowyError::new(ErrorCode::AppFlowyDataFolderImportError, err.to_string()))
|
||||
})
|
||||
.await
|
||||
.map_err(internal_error)??;
|
||||
let user = self.get_user_profile_from_disk(uid).await?;
|
||||
|
||||
upload_imported_data(
|
||||
uid,
|
||||
user_collab_db,
|
||||
&user.workspace_id,
|
||||
&user.authenticator,
|
||||
&import_data,
|
||||
self.cloud_services.get_user_service()?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(import_data)
|
||||
}
|
||||
}
|
||||
|
||||
fn current_authenticator() -> Authenticator {
|
||||
|
@ -14,16 +14,54 @@ use collab_database::database::{
|
||||
use collab_database::rows::{database_row_document_id_from_row_id, mut_row_with_collab, RowId};
|
||||
use collab_database::user::DatabaseViewTrackerList;
|
||||
use collab_document::document_data::default_document_collab_data;
|
||||
use collab_entity::CollabType;
|
||||
use collab_folder::{Folder, UserId, View, ViewIdentifier, ViewLayout};
|
||||
use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction};
|
||||
use flowy_error::{internal_error, FlowyError};
|
||||
use flowy_folder_deps::cloud::gen_view_id;
|
||||
use flowy_folder_deps::entities::ImportData;
|
||||
use flowy_folder_deps::folder_builder::{ParentChildViews, ViewBuilder};
|
||||
use flowy_sqlite::kv::StorePreferences;
|
||||
use flowy_user_deps::cloud::{UserCloudService, UserCollabParams};
|
||||
use flowy_user_deps::entities::Authenticator;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::Arc;
|
||||
use tracing::info;
|
||||
|
||||
pub(crate) struct ImportContext {
|
||||
pub imported_session: Session,
|
||||
pub imported_collab_db: Arc<RocksCollabDB>,
|
||||
pub container_name: Option<String>,
|
||||
}
|
||||
|
||||
impl ImportContext {
|
||||
pub fn with_container_name(mut self, container_name: Option<String>) -> Self {
|
||||
self.container_name = container_name;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_appflowy_data_folder_import_context(path: &str) -> anyhow::Result<ImportContext> {
|
||||
let user_paths = UserPaths::new(path.to_string());
|
||||
let other_store_preferences = Arc::new(StorePreferences::new(path)?);
|
||||
migrate_session_with_user_uuid("appflowy_session_cache", &other_store_preferences);
|
||||
let session = other_store_preferences
|
||||
.get_object::<Session>("appflowy_session_cache")
|
||||
.ok_or(anyhow!(
|
||||
"Can't find the session cache in the appflowy data folder at path: {}",
|
||||
path
|
||||
))?;
|
||||
|
||||
let collab_db_path = user_paths.collab_db_path(session.user_id);
|
||||
let collab_db = Arc::new(RocksCollabDB::open(collab_db_path)?);
|
||||
Ok(ImportContext {
|
||||
imported_session: session,
|
||||
imported_collab_db: collab_db,
|
||||
container_name: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// This path refers to the directory where AppFlowy stores its data. The directory structure is as follows:
|
||||
/// root folder:
|
||||
@ -33,49 +71,43 @@ use std::sync::Arc;
|
||||
|
||||
pub(crate) fn import_appflowy_data_folder(
|
||||
session: &Session,
|
||||
path: String,
|
||||
container_name: String,
|
||||
workspace_id: &str,
|
||||
collab_db: &Arc<RocksCollabDB>,
|
||||
import_context: ImportContext,
|
||||
) -> anyhow::Result<ImportData> {
|
||||
let user_paths = UserPaths::new(path.clone());
|
||||
let other_store_preferences = Arc::new(StorePreferences::new(&path)?);
|
||||
migrate_session_with_user_uuid("appflowy_session_cache", &other_store_preferences);
|
||||
let other_session = other_store_preferences
|
||||
.get_object::<Session>("appflowy_session_cache")
|
||||
.ok_or(anyhow!(
|
||||
"Can't find the session cache in the appflowy data folder at path: {}",
|
||||
path
|
||||
))?;
|
||||
let imported_session = import_context.imported_session;
|
||||
let imported_collab_db = import_context.imported_collab_db;
|
||||
let container_name = import_context.container_name;
|
||||
let imported_collab_read_txn = imported_collab_db.read_txn();
|
||||
|
||||
let other_collab_db = Arc::new(RocksCollabDB::open(
|
||||
user_paths.collab_db_path(other_session.user_id),
|
||||
)?);
|
||||
let other_collab_read_txn = other_collab_db.read_txn();
|
||||
let mut database_view_ids_by_database_id: HashMap<String, Vec<String>> = HashMap::new();
|
||||
let row_object_ids = Mutex::new(HashSet::new());
|
||||
let document_object_ids = Mutex::new(HashSet::new());
|
||||
let database_object_ids = Mutex::new(HashSet::new());
|
||||
let import_container_view_id = gen_view_id().to_string();
|
||||
let import_container_view_id = match &container_name {
|
||||
None => workspace_id.to_string(),
|
||||
Some(_) => gen_view_id().to_string(),
|
||||
};
|
||||
|
||||
let view = collab_db.with_write_txn(|collab_write_txn| {
|
||||
let views = collab_db.with_write_txn(|collab_write_txn| {
|
||||
// use the old_to_new_id_map to keep track of the other collab object id and the new collab object id
|
||||
let old_to_new_id_map = Arc::new(Mutex::new(OldToNewIdMap::new()));
|
||||
let mut all_object_ids = other_collab_read_txn
|
||||
let mut all_object_ids = imported_collab_read_txn
|
||||
.get_all_docs()
|
||||
.map(|iter| iter.collect::<Vec<String>>())
|
||||
.unwrap_or_default();
|
||||
|
||||
// when doing import, we don't want to import the user workspace, database view tracker and the user awareness
|
||||
all_object_ids.retain(|id| id != &other_session.user_workspace.id);
|
||||
all_object_ids.retain(|id| id != &other_session.user_workspace.database_view_tracker_id);
|
||||
all_object_ids.retain(|id| id != &imported_session.user_workspace.id);
|
||||
all_object_ids.retain(|id| id != &imported_session.user_workspace.database_view_tracker_id);
|
||||
all_object_ids
|
||||
.retain(|id| id != &awareness_oid_from_user_uuid(&other_session.user_uuid).to_string());
|
||||
.retain(|id| id != &awareness_oid_from_user_uuid(&imported_session.user_uuid).to_string());
|
||||
|
||||
// import database view tracker
|
||||
migrate_database_view_tracker(
|
||||
&mut old_to_new_id_map.lock(),
|
||||
&other_session,
|
||||
&other_collab_read_txn,
|
||||
&imported_session,
|
||||
&imported_collab_read_txn,
|
||||
&mut database_view_ids_by_database_id,
|
||||
&database_object_ids,
|
||||
)?;
|
||||
@ -90,8 +122,8 @@ pub(crate) fn import_appflowy_data_folder(
|
||||
|
||||
// load other collab objects
|
||||
let collab_by_oid = load_collab_by_oid(
|
||||
other_session.user_id,
|
||||
&other_collab_read_txn,
|
||||
imported_session.user_id,
|
||||
&imported_collab_read_txn,
|
||||
&all_object_ids,
|
||||
);
|
||||
// import the database
|
||||
@ -119,42 +151,46 @@ pub(crate) fn import_appflowy_data_folder(
|
||||
let child_views = import_workspace_views(
|
||||
&import_container_view_id,
|
||||
&mut old_to_new_id_map.lock(),
|
||||
&other_session,
|
||||
&other_collab_read_txn,
|
||||
&imported_session,
|
||||
&imported_collab_read_txn,
|
||||
)?;
|
||||
|
||||
let name = if container_name.is_empty() {
|
||||
format!(
|
||||
"import_{}",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S")
|
||||
)
|
||||
} else {
|
||||
container_name
|
||||
};
|
||||
match container_name {
|
||||
None => Ok(child_views),
|
||||
Some(container_name) => {
|
||||
let name = if container_name.is_empty() {
|
||||
format!(
|
||||
"import_{}",
|
||||
chrono::Local::now().format("%Y-%m-%d %H:%M:%S")
|
||||
)
|
||||
} else {
|
||||
container_name
|
||||
};
|
||||
|
||||
// create the content for the container view
|
||||
let import_container_doc_state = default_document_collab_data(&import_container_view_id)
|
||||
.doc_state
|
||||
.to_vec();
|
||||
import_collab_object_with_doc_state(
|
||||
import_container_doc_state,
|
||||
session.user_id,
|
||||
&import_container_view_id,
|
||||
collab_write_txn,
|
||||
)?;
|
||||
// create the content for the container view
|
||||
let import_container_doc_state = default_document_collab_data(&import_container_view_id)
|
||||
.doc_state
|
||||
.to_vec();
|
||||
import_collab_object_with_doc_state(
|
||||
import_container_doc_state,
|
||||
session.user_id,
|
||||
&import_container_view_id,
|
||||
collab_write_txn,
|
||||
)?;
|
||||
|
||||
let import_container_view =
|
||||
ViewBuilder::new(session.user_id, session.user_workspace.id.clone())
|
||||
.with_view_id(import_container_view_id)
|
||||
.with_layout(ViewLayout::Document)
|
||||
.with_name(name)
|
||||
.with_child_views(child_views)
|
||||
.build();
|
||||
|
||||
Ok(import_container_view)
|
||||
let import_container_view =
|
||||
ViewBuilder::new(session.user_id, session.user_workspace.id.clone())
|
||||
.with_view_id(import_container_view_id)
|
||||
.with_layout(ViewLayout::Document)
|
||||
.with_name(name)
|
||||
.with_child_views(child_views)
|
||||
.build();
|
||||
Ok(vec![import_container_view])
|
||||
},
|
||||
}
|
||||
})?;
|
||||
Ok(ImportData::AppFlowyDataFolder {
|
||||
view,
|
||||
views,
|
||||
database_view_ids_by_database_id,
|
||||
row_object_ids: row_object_ids.into_inner().into_iter().collect(),
|
||||
database_object_ids: database_object_ids.into_inner().into_iter().collect(),
|
||||
@ -482,3 +518,124 @@ impl DerefMut for OldToNewIdMap {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn upload_imported_data(
|
||||
uid: i64,
|
||||
user_collab_db: Arc<RocksCollabDB>,
|
||||
workspace_id: &str,
|
||||
user_authenticator: &Authenticator,
|
||||
import_data: &ImportData,
|
||||
user_cloud_service: Arc<dyn UserCloudService>,
|
||||
) -> Result<(), FlowyError> {
|
||||
// Only support uploading the collab data when the current server is AppFlowy Cloud server
|
||||
if !user_authenticator.is_appflowy_cloud() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let (row_object_ids, document_object_ids, database_object_ids) = match import_data {
|
||||
ImportData::AppFlowyDataFolder {
|
||||
views: _,
|
||||
database_view_ids_by_database_id: _,
|
||||
row_object_ids,
|
||||
document_object_ids,
|
||||
database_object_ids,
|
||||
} => (
|
||||
row_object_ids.clone(),
|
||||
document_object_ids.clone(),
|
||||
database_object_ids.clone(),
|
||||
),
|
||||
};
|
||||
|
||||
let object_by_collab_type = tokio::task::spawn_blocking(move || {
|
||||
let collab_read = user_collab_db.read_txn();
|
||||
let mut object_by_collab_type = HashMap::new();
|
||||
object_by_collab_type.insert(
|
||||
CollabType::Database,
|
||||
load_and_process_collab_data(uid, &collab_read, &database_object_ids),
|
||||
);
|
||||
|
||||
object_by_collab_type.insert(
|
||||
CollabType::Document,
|
||||
load_and_process_collab_data(uid, &collab_read, &document_object_ids),
|
||||
);
|
||||
|
||||
object_by_collab_type.insert(
|
||||
CollabType::DatabaseRow,
|
||||
load_and_process_collab_data(uid, &collab_read, &row_object_ids),
|
||||
);
|
||||
|
||||
object_by_collab_type
|
||||
})
|
||||
.await
|
||||
.map_err(internal_error)?;
|
||||
|
||||
// Upload
|
||||
let mut size_counter = 0;
|
||||
let mut objects: Vec<UserCollabParams> = vec![];
|
||||
let upload_size_limit = 2 * 1024 * 1024;
|
||||
for (collab_type, encoded_v1_by_oid) in object_by_collab_type {
|
||||
info!(
|
||||
"Batch import collab:{} ids: {:?}",
|
||||
collab_type,
|
||||
encoded_v1_by_oid.keys(),
|
||||
);
|
||||
for (oid, encoded_v1) in encoded_v1_by_oid {
|
||||
let obj_size = encoded_v1.len();
|
||||
if size_counter + obj_size > upload_size_limit && !objects.is_empty() {
|
||||
// When the limit is exceeded, batch create with the current list of objects
|
||||
// and reset for the next batch.
|
||||
user_cloud_service
|
||||
.batch_create_collab_object(workspace_id, objects)
|
||||
.await?;
|
||||
objects = Vec::new();
|
||||
size_counter = 0;
|
||||
}
|
||||
|
||||
// Add the current object to the batch.
|
||||
objects.push(UserCollabParams {
|
||||
object_id: oid,
|
||||
encoded_collab_v1: encoded_v1,
|
||||
collab_type: collab_type.clone(),
|
||||
});
|
||||
size_counter += obj_size;
|
||||
}
|
||||
}
|
||||
|
||||
// After the loop, upload any remaining objects.
|
||||
if !objects.is_empty() {
|
||||
info!(
|
||||
"Batch create collab objects: {}, payload size: {}",
|
||||
objects
|
||||
.iter()
|
||||
.map(|o| o.object_id.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
size_counter
|
||||
);
|
||||
user_cloud_service
|
||||
.batch_create_collab_object(workspace_id, objects)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn load_and_process_collab_data<'a, R>(
|
||||
uid: i64,
|
||||
collab_read: &R,
|
||||
object_ids: &[String],
|
||||
) -> HashMap<String, Vec<u8>>
|
||||
where
|
||||
R: YrsDocAction<'a>,
|
||||
PersistenceError: From<R::Error>,
|
||||
{
|
||||
load_collab_by_oid(uid, collab_read, object_ids)
|
||||
.into_iter()
|
||||
.filter_map(|(oid, collab)| {
|
||||
collab
|
||||
.encode_collab_v1()
|
||||
.encode_to_bytes()
|
||||
.ok()
|
||||
.map(|encoded_v1| (oid, encoded_v1))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ use crate::services::entities::Session;
|
||||
use collab_integrate::{PersistenceError, RocksCollabDB, YrsDocAction};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::services::data_import::ImportContext;
|
||||
use collab::preclude::Collab;
|
||||
use flowy_folder_deps::entities::ImportData;
|
||||
use std::sync::Arc;
|
||||
@ -10,21 +11,19 @@ use std::sync::Arc;
|
||||
pub enum ImportDataSource {
|
||||
AppFlowyDataFolder {
|
||||
path: String,
|
||||
container_name: String,
|
||||
container_name: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Import appflowy data from the given path.
|
||||
/// If the container name is not empty, then the data will be imported to the given container.
|
||||
/// Otherwise, the data will be imported to the current workspace.
|
||||
pub(crate) fn import_data(
|
||||
session: &Session,
|
||||
source: ImportDataSource,
|
||||
context: ImportContext,
|
||||
collab_db: Arc<RocksCollabDB>,
|
||||
) -> anyhow::Result<ImportData> {
|
||||
match source {
|
||||
ImportDataSource::AppFlowyDataFolder {
|
||||
path,
|
||||
container_name,
|
||||
} => import_appflowy_data_folder(session, path, container_name, &collab_db),
|
||||
}
|
||||
import_appflowy_data_folder(session, &session.user_workspace.id, &collab_db, context)
|
||||
}
|
||||
|
||||
pub fn load_collab_by_oid<'a, R>(
|
||||
|
@ -1,5 +1,6 @@
|
||||
mod appflowy_data_import;
|
||||
pub use appflowy_data_import::*;
|
||||
|
||||
pub(crate) mod importer;
|
||||
pub use importer::load_collab_by_oid;
|
||||
|
||||
pub use importer::ImportDataSource;
|
||||
|
Loading…
Reference in New Issue
Block a user