From fd5299a13d2c6782282cc5caf5a46cef5872ae7d Mon Sep 17 00:00:00 2001 From: Bartosz Sypytkowski Date: Sun, 18 Aug 2024 05:16:42 +0200 Subject: [PATCH] move to latest appflowy collab version (#5894) * chore: move to latest appflowy collab version * chore: filter mapping * chore: remove mutex folder * chore: cleanup borrow checker issues * chore: fixed flowy user crate compilation errors * chore: removed parking lot crate * chore: adjusting non locking approach * chore: remove with folder method * chore: fix folder manager * chore: fixed workspace database compilation errors * chore: initialize database plugins * chore: fix locks in flowy core * chore: remove supabase * chore: async traits * chore: add mutexes in dart ffi * chore: post rebase fixes * chore: remove supabase dart code * chore: fix deadlock * chore: fix page_id is empty * chore: use data source to init collab * chore: fix user awareness test * chore: fix database deadlock * fix: initialize user awareness * chore: fix open workspace test * chore: fix import csv * chore: fix update row meta deadlock * chore: fix document size test * fix: timestamp set/get type convert * fix: calculation * chore: revert Arc to Rc * chore: attach plugin to database and database row * chore: async get row * chore: clippy * chore: fix tauri build * chore: clippy * fix: duplicate view deadlock * chore: fmt * chore: tauri build --------- Co-authored-by: nathan --- .../cloud/supabase_auth_test.dart | 146 +-- .../shared/auth_operation.dart | 30 - .../integration_test/shared/base.dart | 17 - frontend/appflowy_flutter/ios/Podfile.lock | 4 +- .../appflowy_flutter/lib/env/backend_env.dart | 30 - .../appflowy_flutter/lib/env/cloud_env.dart | 67 +- .../database/application/row/row_service.dart | 8 + .../grid/application/row/row_bloc.dart | 2 + .../lib/startup/deps_resolver.dart | 4 - .../appflowy_flutter/lib/startup/startup.dart | 1 - .../startup/tasks/appflowy_cloud_task.dart | 3 +- .../lib/startup/tasks/prelude.dart | 1 - .../lib/startup/tasks/rust_sdk.dart | 1 - .../lib/startup/tasks/supabase_task.dart | 118 --- .../auth/af_cloud_mock_auth_service.dart | 2 +- .../auth/supabase_auth_service.dart | 252 ----- .../auth/supabase_mock_auth_service.dart | 113 --- .../settings/settings_dialog_bloc.dart | 1 - .../settings/supabase_cloud_setting_bloc.dart | 103 -- .../settings/supabase_cloud_urls_bloc.dart | 128 --- .../settings/widgets/setting_cloud.dart | 8 - .../widgets/setting_supabase_cloud.dart | 339 ------- frontend/appflowy_tauri/src-tauri/Cargo.lock | 75 +- frontend/appflowy_tauri/src-tauri/Cargo.toml | 14 +- frontend/appflowy_tauri/src-tauri/src/init.rs | 18 +- frontend/appflowy_tauri/src-tauri/src/main.rs | 7 +- .../appflowy_tauri/src-tauri/src/request.rs | 4 +- .../appflowy_web_app/src-tauri/Cargo.lock | 75 +- .../appflowy_web_app/src-tauri/Cargo.toml | 14 +- .../appflowy_web_app/src-tauri/src/init.rs | 18 +- .../appflowy_web_app/src-tauri/src/main.rs | 2 +- .../appflowy_web_app/src-tauri/src/request.rs | 4 +- frontend/rust-lib/Cargo.lock | 77 +- frontend/rust-lib/Cargo.toml | 20 +- frontend/rust-lib/collab-integrate/Cargo.toml | 6 +- .../collab-integrate/src/collab_builder.rs | 371 ++++---- frontend/rust-lib/collab-integrate/src/lib.rs | 1 - .../src/native/plugin_provider.rs | 11 +- frontend/rust-lib/dart-ffi/Cargo.toml | 1 - frontend/rust-lib/dart-ffi/src/env_serde.rs | 5 +- frontend/rust-lib/dart-ffi/src/lib.rs | 19 +- .../event-integration-test/Cargo.toml | 1 - .../src/database_event.rs | 2 +- .../src/document/document_event.rs | 4 +- .../src/document_event.rs | 17 +- .../src/event_builder.rs | 3 +- .../src/folder_event.rs | 12 +- .../event-integration-test/src/lib.rs | 21 +- .../event-integration-test/src/user_event.rs | 47 +- .../tests/folder/local_test/script.rs | 16 + .../user/af_cloud_test/workspace_test.rs | 20 +- .../event-integration-test/tests/util.rs | 113 +-- frontend/rust-lib/flowy-ai/Cargo.toml | 2 +- frontend/rust-lib/flowy-ai/src/ai_manager.rs | 3 +- .../flowy-ai/src/local_ai/local_llm_chat.rs | 14 +- .../src/local_ai/local_llm_resource.rs | 80 +- frontend/rust-lib/flowy-core/Cargo.toml | 3 +- frontend/rust-lib/flowy-core/src/config.rs | 9 +- .../src/deps_resolve/folder_deps.rs | 7 +- .../flowy-core/src/integrate/server.rs | 70 +- .../flowy-core/src/integrate/trait_impls.rs | 303 +++--- .../rust-lib/flowy-core/src/integrate/user.rs | 11 +- frontend/rust-lib/flowy-core/src/lib.rs | 12 - .../rust-lib/flowy-database-pub/src/cloud.rs | 17 +- frontend/rust-lib/flowy-database2/Cargo.toml | 2 +- .../flowy-database2/src/event_handler.rs | 249 +++-- .../rust-lib/flowy-database2/src/event_map.rs | 162 ++-- .../rust-lib/flowy-database2/src/manager.rs | 345 ++++--- .../src/services/calculations/cache.rs | 3 +- .../src/services/calculations/controller.rs | 45 +- .../src/services/calculations/entities.rs | 45 +- .../src/services/cell/cell_data_cache.rs | 3 +- .../src/services/database/database_editor.rs | 883 ++++++++++-------- .../src/services/database/database_observe.rs | 25 +- .../src/services/database_view/layout_deps.rs | 72 +- .../database_view/view_calculations.rs | 38 +- .../src/services/database_view/view_editor.rs | 154 +-- .../src/services/database_view/view_filter.rs | 28 +- .../src/services/database_view/view_group.rs | 62 +- .../services/database_view/view_operation.rs | 85 +- .../src/services/database_view/view_sort.rs | 41 +- .../src/services/database_view/views.rs | 19 +- .../src/services/field/field_operation.rs | 17 +- .../checkbox_type_option.rs | 2 +- .../checkbox_type_option_entities.rs | 10 +- .../checklist_type_option/checklist.rs | 2 +- .../checklist_entities.rs | 10 +- .../date_type_option/date_type_option.rs | 18 +- .../date_type_option_entities.rs | 29 +- .../number_type_option/number_type_option.rs | 52 +- .../relation_type_option/relation.rs | 10 +- .../relation_type_option/relation_entities.rs | 6 +- .../multi_select_type_option.rs | 10 +- .../selection_type_option/select_ids.rs | 10 +- .../single_select_type_option.rs | 8 +- .../summary_type_option/summary.rs | 8 +- .../summary_type_option/summary_entities.rs | 10 +- .../text_type_option/text_type_option.rs | 19 +- .../type_options/time_type_option/time.rs | 2 +- .../time_type_option/time_entities.rs | 10 +- .../timestamp_type_option.rs | 22 +- .../timestamp_type_option_entities.rs | 12 +- .../translate_type_option/translate.rs | 23 +- .../translate_entities.rs | 10 +- .../field/type_options/type_option_cell.rs | 38 +- .../url_type_option/url_type_option.rs | 17 +- .../url_type_option_entities.rs | 13 +- .../src/services/field_settings/entities.rs | 29 +- .../field_settings/field_settings_builder.rs | 5 +- .../src/services/filter/controller.rs | 21 +- .../src/services/filter/entities.rs | 67 +- .../src/services/group/action.rs | 18 +- .../src/services/group/configuration.rs | 10 +- .../src/services/group/controller.rs | 26 +- .../controller_impls/checkbox_controller.rs | 12 +- .../group/controller_impls/date_controller.rs | 11 +- .../controller_impls/default_controller.rs | 11 +- .../multi_select_controller.rs | 17 +- .../single_select_controller.rs | 17 +- .../group/controller_impls/url_controller.rs | 11 +- .../src/services/group/entities.rs | 62 +- .../src/services/setting/entities.rs | 72 +- .../src/services/share/csv/export.rs | 8 +- .../src/services/share/csv/import.rs | 21 +- .../src/services/sort/controller.rs | 13 +- .../src/services/sort/entities.rs | 19 +- .../flowy-database2/src/utils/cache.rs | 42 +- .../tests/database/block_test/row_test.rs | 6 +- .../tests/database/cell_test/test.rs | 10 +- .../tests/database/database_editor.rs | 35 +- .../database/field_settings_test/test.rs | 9 +- .../tests/database/field_test/script.rs | 14 +- .../tests/database/field_test/test.rs | 24 +- .../filter_test/checklist_filter_test.rs | 2 +- .../tests/database/filter_test/script.rs | 2 +- .../filter_test/select_option_filter_test.rs | 24 +- .../tests/database/group_test/script.rs | 5 +- .../tests/database/layout_test/script.rs | 5 +- .../pre_fill_row_according_to_filter_test.rs | 32 +- .../pre_fill_row_with_payload_test.rs | 38 +- .../database/pre_fill_cell_test/script.rs | 2 +- .../tests/database/share_test/export_test.rs | 4 +- .../database/sort_test/multi_sort_test.rs | 8 +- .../tests/database/sort_test/script.rs | 2 +- .../database/sort_test/single_sort_test.rs | 34 +- .../rust-lib/flowy-document-pub/src/cloud.rs | 15 +- frontend/rust-lib/flowy-document/Cargo.toml | 1 - .../rust-lib/flowy-document/src/document.rs | 124 +-- .../flowy-document/src/event_handler.rs | 36 +- .../rust-lib/flowy-document/src/manager.rs | 221 +++-- .../tests/document/document_insert_test.rs | 8 +- .../tests/document/document_redo_undo_test.rs | 6 +- .../tests/document/document_test.rs | 55 +- .../flowy-document/tests/document/util.rs | 43 +- frontend/rust-lib/flowy-error/src/code.rs | 4 + .../rust-lib/flowy-folder-pub/src/cloud.rs | 37 +- frontend/rust-lib/flowy-folder/Cargo.toml | 2 +- .../flowy-folder/src/event_handler.rs | 10 +- frontend/rust-lib/flowy-folder/src/manager.rs | 844 +++++++++-------- .../rust-lib/flowy-folder/src/manager_init.rs | 104 +-- .../flowy-folder/src/manager_observer.rs | 99 +- .../flowy-folder/src/manager_test_util.rs | 8 +- frontend/rust-lib/flowy-folder/src/util.rs | 2 +- .../flowy-folder/src/view_operation.rs | 1 - frontend/rust-lib/flowy-server-pub/src/lib.rs | 4 - .../flowy-server-pub/src/supabase_config.rs | 41 - frontend/rust-lib/flowy-server/Cargo.toml | 3 +- .../src/af_cloud/impls/database.rs | 124 ++- .../src/af_cloud/impls/document.rs | 101 +- .../flowy-server/src/af_cloud/impls/folder.rs | 279 +++--- .../af_cloud/impls/user/cloud_service_impl.rs | 826 ++++++++-------- frontend/rust-lib/flowy-server/src/lib.rs | 3 - .../src/local_server/impls/database.rs | 79 +- .../src/local_server/impls/document.rs | 30 +- .../src/local_server/impls/folder.rs | 102 +- .../src/local_server/impls/user.rs | 231 +++-- .../flowy-server/src/local_server/server.rs | 5 +- frontend/rust-lib/flowy-server/src/server.rs | 10 +- .../src/supabase/api/collab_storage.rs | 17 +- .../flowy-server/src/supabase/api/document.rs | 2 +- .../src/supabase/api/postgres_server.rs | 10 +- .../flowy-server/src/supabase/api/user.rs | 29 +- .../src/supabase/file_storage/plan.rs | 37 + .../flowy-server/src/supabase/server.rs | 31 +- .../flowy-server/tests/af_cloud_test/util.rs | 5 +- frontend/rust-lib/flowy-sqlite/Cargo.toml | 1 - frontend/rust-lib/flowy-sqlite/src/kv/kv.rs | 6 +- frontend/rust-lib/flowy-user-pub/src/cloud.rs | 167 ++-- .../rust-lib/flowy-user-pub/src/entities.rs | 3 - frontend/rust-lib/flowy-user/Cargo.toml | 5 +- .../src/anon_user/migrate_anon_user_collab.rs | 124 +-- .../rust-lib/flowy-user/src/anon_user/mod.rs | 6 +- .../anon_user/sync_supabase_user_collab.rs | 350 ++++--- .../rust-lib/flowy-user/src/entities/auth.rs | 4 +- .../rust-lib/flowy-user/src/event_handler.rs | 10 +- .../src/migrations/document_empty_content.rs | 19 +- .../rust-lib/flowy-user/src/migrations/mod.rs | 3 +- .../flowy-user/src/migrations/util.rs | 15 +- .../migrations/workspace_and_favorite_v1.rs | 8 +- .../src/migrations/workspace_trash_v1.rs | 4 +- .../src/services/authenticate_user.rs | 53 +- .../flowy-user/src/services/cloud_config.rs | 4 +- .../data_import/appflowy_data_import.rs | 188 ++-- .../src/services/data_import/importer.rs | 7 +- .../rust-lib/flowy-user/src/services/db.rs | 86 +- .../flowy-user/src/user_manager/manager.rs | 71 +- .../src/user_manager/manager_history_user.rs | 5 +- .../user_manager/manager_user_awareness.rs | 330 ++++--- .../user_manager/manager_user_workspace.rs | 11 +- frontend/rust-lib/lib-dispatch/Cargo.toml | 1 - .../lib-dispatch/src/module/module.rs | 25 +- .../rust-lib/lib-infra/src/native/future.rs | 30 - 212 files changed, 5068 insertions(+), 6341 deletions(-) delete mode 100644 frontend/appflowy_flutter/lib/startup/tasks/supabase_task.dart delete mode 100644 frontend/appflowy_flutter/lib/user/application/auth/supabase_auth_service.dart delete mode 100644 frontend/appflowy_flutter/lib/user/application/auth/supabase_mock_auth_service.dart delete mode 100644 frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_setting_bloc.dart delete mode 100644 frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_urls_bloc.dart delete mode 100644 frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_supabase_cloud.dart delete mode 100644 frontend/rust-lib/flowy-server-pub/src/supabase_config.rs create mode 100644 frontend/rust-lib/flowy-server/src/supabase/file_storage/plan.rs diff --git a/frontend/appflowy_flutter/integration_test/cloud/supabase_auth_test.dart b/frontend/appflowy_flutter/integration_test/cloud/supabase_auth_test.dart index 15c9c3c347..71cbc11431 100644 --- a/frontend/appflowy_flutter/integration_test/cloud/supabase_auth_test.dart +++ b/frontend/appflowy_flutter/integration_test/cloud/supabase_auth_test.dart @@ -1,93 +1,93 @@ -import 'package:appflowy/env/cloud_env.dart'; -import 'package:appflowy/workspace/application/settings/prelude.dart'; -import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart'; -import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart'; -import 'package:flutter_test/flutter_test.dart'; -import 'package:integration_test/integration_test.dart'; +// import 'package:appflowy/env/cloud_env.dart'; +// import 'package:appflowy/workspace/application/settings/prelude.dart'; +// import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart'; +// import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart'; +// import 'package:flutter_test/flutter_test.dart'; +// import 'package:integration_test/integration_test.dart'; -import '../shared/util.dart'; +// import '../shared/util.dart'; -void main() { - IntegrationTestWidgetsFlutterBinding.ensureInitialized(); +// void main() { +// IntegrationTestWidgetsFlutterBinding.ensureInitialized(); - group('supabase auth', () { - testWidgets('sign in with supabase', (tester) async { - await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); - await tester.tapGoogleLoginInButton(); - await tester.expectToSeeHomePageWithGetStartedPage(); - }); +// group('supabase auth', () { +// testWidgets('sign in with supabase', (tester) async { +// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); +// await tester.tapGoogleLoginInButton(); +// await tester.expectToSeeHomePageWithGetStartedPage(); +// }); - testWidgets('sign out with supabase', (tester) async { - await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); - await tester.tapGoogleLoginInButton(); +// testWidgets('sign out with supabase', (tester) async { +// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); +// await tester.tapGoogleLoginInButton(); - // Open the setting page and sign out - await tester.openSettings(); - await tester.openSettingsPage(SettingsPage.account); - await tester.logout(); +// // Open the setting page and sign out +// await tester.openSettings(); +// await tester.openSettingsPage(SettingsPage.account); +// await tester.logout(); - // Go to the sign in page again - await tester.pumpAndSettle(const Duration(seconds: 1)); - tester.expectToSeeGoogleLoginButton(); - }); +// // Go to the sign in page again +// await tester.pumpAndSettle(const Duration(seconds: 1)); +// tester.expectToSeeGoogleLoginButton(); +// }); - testWidgets('sign in as anonymous', (tester) async { - await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); - await tester.tapSignInAsGuest(); +// testWidgets('sign in as anonymous', (tester) async { +// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); +// await tester.tapSignInAsGuest(); - // should not see the sync setting page when sign in as anonymous - await tester.openSettings(); - await tester.openSettingsPage(SettingsPage.account); +// // should not see the sync setting page when sign in as anonymous +// await tester.openSettings(); +// await tester.openSettingsPage(SettingsPage.account); - // Scroll to sign-out - await tester.scrollUntilVisible( - find.byType(SignInOutButton), - 100, - scrollable: find.findSettingsScrollable(), - ); - await tester.tapButton(find.byType(SignInOutButton)); +// // Scroll to sign-out +// await tester.scrollUntilVisible( +// find.byType(SignInOutButton), +// 100, +// scrollable: find.findSettingsScrollable(), +// ); +// await tester.tapButton(find.byType(SignInOutButton)); - tester.expectToSeeGoogleLoginButton(); - }); +// tester.expectToSeeGoogleLoginButton(); +// }); - // testWidgets('enable encryption', (tester) async { - // await tester.initializeAppFlowy(cloudType: CloudType.supabase); - // await tester.tapGoogleLoginInButton(); +// // testWidgets('enable encryption', (tester) async { +// // await tester.initializeAppFlowy(cloudType: CloudType.supabase); +// // await tester.tapGoogleLoginInButton(); - // // Open the setting page and sign out - // await tester.openSettings(); - // await tester.openSettingsPage(SettingsPage.cloud); +// // // Open the setting page and sign out +// // await tester.openSettings(); +// // await tester.openSettingsPage(SettingsPage.cloud); - // // the switch should be off by default - // tester.assertEnableEncryptSwitchValue(false); - // await tester.toggleEnableEncrypt(); +// // // the switch should be off by default +// // tester.assertEnableEncryptSwitchValue(false); +// // await tester.toggleEnableEncrypt(); - // // the switch should be on after toggling - // tester.assertEnableEncryptSwitchValue(true); +// // // the switch should be on after toggling +// // tester.assertEnableEncryptSwitchValue(true); - // // the switch can not be toggled back to off - // await tester.toggleEnableEncrypt(); - // tester.assertEnableEncryptSwitchValue(true); - // }); +// // // the switch can not be toggled back to off +// // await tester.toggleEnableEncrypt(); +// // tester.assertEnableEncryptSwitchValue(true); +// // }); - testWidgets('enable sync', (tester) async { - await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); - await tester.tapGoogleLoginInButton(); +// testWidgets('enable sync', (tester) async { +// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); +// await tester.tapGoogleLoginInButton(); - // Open the setting page and sign out - await tester.openSettings(); - await tester.openSettingsPage(SettingsPage.cloud); +// // Open the setting page and sign out +// await tester.openSettings(); +// await tester.openSettingsPage(SettingsPage.cloud); - // the switch should be on by default - tester.assertSupabaseEnableSyncSwitchValue(true); - await tester.toggleEnableSync(SupabaseEnableSync); +// // the switch should be on by default +// tester.assertSupabaseEnableSyncSwitchValue(true); +// await tester.toggleEnableSync(SupabaseEnableSync); - // the switch should be off - tester.assertSupabaseEnableSyncSwitchValue(false); +// // the switch should be off +// tester.assertSupabaseEnableSyncSwitchValue(false); - // the switch should be on after toggling - await tester.toggleEnableSync(SupabaseEnableSync); - tester.assertSupabaseEnableSyncSwitchValue(true); - }); - }); -} +// // the switch should be on after toggling +// await tester.toggleEnableSync(SupabaseEnableSync); +// tester.assertSupabaseEnableSyncSwitchValue(true); +// }); +// }); +// } diff --git a/frontend/appflowy_flutter/integration_test/shared/auth_operation.dart b/frontend/appflowy_flutter/integration_test/shared/auth_operation.dart index 56815714c0..e01e02c6e1 100644 --- a/frontend/appflowy_flutter/integration_test/shared/auth_operation.dart +++ b/frontend/appflowy_flutter/integration_test/shared/auth_operation.dart @@ -2,7 +2,6 @@ import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart'; import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart'; import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart'; -import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart'; import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart'; import 'package:easy_localization/easy_localization.dart'; import 'package:flutter/material.dart'; @@ -52,26 +51,6 @@ extension AppFlowyAuthTest on WidgetTester { assert(isSwitched == value); } - void assertEnableEncryptSwitchValue(bool value) { - assertSwitchValue( - find.descendant( - of: find.byType(EnableEncrypt), - matching: find.byWidgetPredicate((widget) => widget is Switch), - ), - value, - ); - } - - void assertSupabaseEnableSyncSwitchValue(bool value) { - assertSwitchValue( - find.descendant( - of: find.byType(SupabaseEnableSync), - matching: find.byWidgetPredicate((widget) => widget is Switch), - ), - value, - ); - } - void assertAppFlowyCloudEnableSyncSwitchValue(bool value) { assertToggleValue( find.descendant( @@ -82,15 +61,6 @@ extension AppFlowyAuthTest on WidgetTester { ); } - Future toggleEnableEncrypt() async { - final finder = find.descendant( - of: find.byType(EnableEncrypt), - matching: find.byWidgetPredicate((widget) => widget is Switch), - ); - - await tapButton(finder); - } - Future toggleEnableSync(Type syncButton) async { final finder = find.descendant( of: find.byType(syncButton), diff --git a/frontend/appflowy_flutter/integration_test/shared/base.dart b/frontend/appflowy_flutter/integration_test/shared/base.dart index 16a576154f..371cd9b839 100644 --- a/frontend/appflowy_flutter/integration_test/shared/base.dart +++ b/frontend/appflowy_flutter/integration_test/shared/base.dart @@ -7,7 +7,6 @@ import 'package:appflowy/startup/entry_point.dart'; import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/user/application/auth/af_cloud_mock_auth_service.dart'; import 'package:appflowy/user/application/auth/auth_service.dart'; -import 'package:appflowy/user/application/auth/supabase_mock_auth_service.dart'; import 'package:appflowy/user/presentation/presentation.dart'; import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart'; import 'package:appflowy/workspace/application/settings/prelude.dart'; @@ -55,8 +54,6 @@ extension AppFlowyTestBase on WidgetTester { switch (cloudType) { case AuthenticatorType.local: break; - case AuthenticatorType.supabase: - break; case AuthenticatorType.appflowyCloudSelfHost: rustEnvs["GOTRUE_ADMIN_EMAIL"] = "admin@example.com"; rustEnvs["GOTRUE_ADMIN_PASSWORD"] = "password"; @@ -75,13 +72,6 @@ extension AppFlowyTestBase on WidgetTester { case AuthenticatorType.local: await useLocalServer(); break; - case AuthenticatorType.supabase: - await useTestSupabaseCloud(); - getIt.unregister(); - getIt.registerFactory( - () => SupabaseMockAuthService(), - ); - break; case AuthenticatorType.appflowyCloudSelfHost: await useTestSelfHostedAppFlowyCloud(); getIt.unregister(); @@ -242,13 +232,6 @@ extension AppFlowyFinderTestBase on CommonFinders { } } -Future useTestSupabaseCloud() async { - await useSupabaseCloud( - url: TestEnv.supabaseUrl, - anonKey: TestEnv.supabaseAnonKey, - ); -} - Future useTestSelfHostedAppFlowyCloud() async { await useSelfHostedAppFlowyCloudWithURL(TestEnv.afCloudUrl); } diff --git a/frontend/appflowy_flutter/ios/Podfile.lock b/frontend/appflowy_flutter/ios/Podfile.lock index 8829c71074..af96ce7ccb 100644 --- a/frontend/appflowy_flutter/ios/Podfile.lock +++ b/frontend/appflowy_flutter/ios/Podfile.lock @@ -174,7 +174,7 @@ SPEC CHECKSUMS: file_picker: 09aa5ec1ab24135ccd7a1621c46c84134bfd6655 flowy_infra_ui: 0455e1fa8c51885aa1437848e361e99419f34ebc Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7 - fluttertoast: e9a18c7be5413da53898f660530c56f35edfba9c + fluttertoast: 723e187574b149e68e63ca4d39b837586b903cfa image_picker_ios: 99dfe1854b4fa34d0364e74a78448a0151025425 integration_test: ce0a3ffa1de96d1a89ca0ac26fca7ea18a749ef4 irondash_engine_context: 3458bf979b90d616ffb8ae03a150bafe2e860cc9 @@ -196,4 +196,4 @@ SPEC CHECKSUMS: PODFILE CHECKSUM: d0d9b4ff572d8695c38eb3f9b490f55cdfc57eca -COCOAPODS: 1.15.2 +COCOAPODS: 1.11.3 diff --git a/frontend/appflowy_flutter/lib/env/backend_env.dart b/frontend/appflowy_flutter/lib/env/backend_env.dart index fa0bf575a3..f8aa715a40 100644 --- a/frontend/appflowy_flutter/lib/env/backend_env.dart +++ b/frontend/appflowy_flutter/lib/env/backend_env.dart @@ -13,7 +13,6 @@ class AppFlowyConfiguration { required this.device_id, required this.platform, required this.authenticator_type, - required this.supabase_config, required this.appflowy_cloud_config, required this.envs, }); @@ -28,41 +27,12 @@ class AppFlowyConfiguration { final String device_id; final String platform; final int authenticator_type; - final SupabaseConfiguration supabase_config; final AppFlowyCloudConfiguration appflowy_cloud_config; final Map envs; Map toJson() => _$AppFlowyConfigurationToJson(this); } -@JsonSerializable() -class SupabaseConfiguration { - SupabaseConfiguration({ - required this.url, - required this.anon_key, - }); - - factory SupabaseConfiguration.fromJson(Map json) => - _$SupabaseConfigurationFromJson(json); - - /// Indicates whether the sync feature is enabled. - final String url; - final String anon_key; - - Map toJson() => _$SupabaseConfigurationToJson(this); - - static SupabaseConfiguration defaultConfig() { - return SupabaseConfiguration( - url: '', - anon_key: '', - ); - } - - bool get isValid { - return url.isNotEmpty && anon_key.isNotEmpty; - } -} - @JsonSerializable() class AppFlowyCloudConfiguration { AppFlowyCloudConfiguration({ diff --git a/frontend/appflowy_flutter/lib/env/cloud_env.dart b/frontend/appflowy_flutter/lib/env/cloud_env.dart index 9e8ea0d4f9..fcad1a1f2f 100644 --- a/frontend/appflowy_flutter/lib/env/cloud_env.dart +++ b/frontend/appflowy_flutter/lib/env/cloud_env.dart @@ -21,9 +21,6 @@ Future _setAuthenticatorType(AuthenticatorType ty) async { case AuthenticatorType.local: await getIt().set(KVKeys.kCloudType, 0.toString()); break; - case AuthenticatorType.supabase: - await getIt().set(KVKeys.kCloudType, 1.toString()); - break; case AuthenticatorType.appflowyCloud: await getIt().set(KVKeys.kCloudType, 2.toString()); break; @@ -63,8 +60,6 @@ Future getAuthenticatorType() async { switch (value ?? "0") { case "0": return AuthenticatorType.local; - case "1": - return AuthenticatorType.supabase; case "2": return AuthenticatorType.appflowyCloud; case "3": @@ -93,10 +88,6 @@ Future getAuthenticatorType() async { /// Returns `false` otherwise. bool get isAuthEnabled { final env = getIt(); - if (env.authenticatorType == AuthenticatorType.supabase) { - return env.supabaseConfig.isValid; - } - if (env.authenticatorType.isAppFlowyCloudEnabled) { return env.appflowyCloudConfig.isValid; } @@ -104,19 +95,6 @@ bool get isAuthEnabled { return false; } -/// Checks if Supabase is enabled. -/// -/// This getter evaluates if Supabase should be enabled based on the -/// current integration mode and cloud type setting. -/// -/// Returns: -/// A boolean value indicating whether Supabase is enabled. It returns `true` -/// if the application is in release or develop mode and the current cloud type -/// is `CloudType.supabase`. Otherwise, it returns `false`. -bool get isSupabaseEnabled { - return currentCloudType().isSupabaseEnabled; -} - /// Determines if AppFlowy Cloud is enabled. bool get isAppFlowyCloudEnabled { return currentCloudType().isAppFlowyCloudEnabled; @@ -124,7 +102,6 @@ bool get isAppFlowyCloudEnabled { enum AuthenticatorType { local, - supabase, appflowyCloud, appflowyCloudSelfHost, // The 'appflowyCloudDevelop' type is used for develop purposes only. @@ -137,14 +114,10 @@ enum AuthenticatorType { this == AuthenticatorType.appflowyCloudDevelop || this == AuthenticatorType.appflowyCloud; - bool get isSupabaseEnabled => this == AuthenticatorType.supabase; - int get value { switch (this) { case AuthenticatorType.local: return 0; - case AuthenticatorType.supabase: - return 1; case AuthenticatorType.appflowyCloud: return 2; case AuthenticatorType.appflowyCloudSelfHost: @@ -158,8 +131,6 @@ enum AuthenticatorType { switch (value) { case 0: return AuthenticatorType.local; - case 1: - return AuthenticatorType.supabase; case 2: return AuthenticatorType.appflowyCloud; case 3: @@ -197,25 +168,15 @@ Future useLocalServer() async { await _setAuthenticatorType(AuthenticatorType.local); } -Future useSupabaseCloud({ - required String url, - required String anonKey, -}) async { - await _setAuthenticatorType(AuthenticatorType.supabase); - await setSupabaseServer(url, anonKey); -} - /// Use getIt() to get the shared environment. class AppFlowyCloudSharedEnv { AppFlowyCloudSharedEnv({ required AuthenticatorType authenticatorType, required this.appflowyCloudConfig, - required this.supabaseConfig, }) : _authenticatorType = authenticatorType; final AuthenticatorType _authenticatorType; final AppFlowyCloudConfiguration appflowyCloudConfig; - final SupabaseConfiguration supabaseConfig; AuthenticatorType get authenticatorType => _authenticatorType; @@ -229,10 +190,6 @@ class AppFlowyCloudSharedEnv { ? await getAppFlowyCloudConfig(authenticatorType) : AppFlowyCloudConfiguration.defaultConfig(); - final supabaseCloudConfig = authenticatorType.isSupabaseEnabled - ? await getSupabaseCloudConfig() - : SupabaseConfiguration.defaultConfig(); - // In the backend, the value '2' represents the use of AppFlowy Cloud. However, in the frontend, // we distinguish between [AuthenticatorType.appflowyCloudSelfHost] and [AuthenticatorType.appflowyCloud]. // When the cloud type is [AuthenticatorType.appflowyCloudSelfHost] in the frontend, it should be @@ -244,7 +201,6 @@ class AppFlowyCloudSharedEnv { return AppFlowyCloudSharedEnv( authenticatorType: authenticatorType, appflowyCloudConfig: appflowyCloudConfig, - supabaseConfig: supabaseCloudConfig, ); } else { // Using the cloud settings from the .env file. @@ -257,7 +213,6 @@ class AppFlowyCloudSharedEnv { return AppFlowyCloudSharedEnv( authenticatorType: AuthenticatorType.fromValue(Env.authenticatorType), appflowyCloudConfig: appflowyCloudConfig, - supabaseConfig: SupabaseConfiguration.defaultConfig(), ); } } @@ -265,8 +220,7 @@ class AppFlowyCloudSharedEnv { @override String toString() { return 'authenticator: $_authenticatorType\n' - 'appflowy: ${appflowyCloudConfig.toJson()}\n' - 'supabase: ${supabaseConfig.toJson()})\n'; + 'appflowy: ${appflowyCloudConfig.toJson()}\n'; } } @@ -354,22 +308,3 @@ Future setSupabaseServer( await getIt().set(KVKeys.kSupabaseAnonKey, anonKey); } } - -Future getSupabaseCloudConfig() async { - final url = await _getSupabaseUrl(); - final anonKey = await _getSupabaseAnonKey(); - return SupabaseConfiguration( - url: url, - anon_key: anonKey, - ); -} - -Future _getSupabaseUrl() async { - final result = await getIt().get(KVKeys.kSupabaseURL); - return result ?? ''; -} - -Future _getSupabaseAnonKey() async { - final result = await getIt().get(KVKeys.kSupabaseAnonKey); - return result ?? ''; -} diff --git a/frontend/appflowy_flutter/lib/plugins/database/application/row/row_service.dart b/frontend/appflowy_flutter/lib/plugins/database/application/row/row_service.dart index 1866891336..c5e71ba78b 100644 --- a/frontend/appflowy_flutter/lib/plugins/database/application/row/row_service.dart +++ b/frontend/appflowy_flutter/lib/plugins/database/application/row/row_service.dart @@ -37,6 +37,14 @@ class RowBackendService { return DatabaseEventCreateRow(payload).send(); } + Future> initRow(RowId rowId) async { + final payload = RowIdPB() + ..viewId = viewId + ..rowId = rowId; + + return DatabaseEventInitRow(payload).send(); + } + Future> createRowBefore(RowId rowId) { return createRow( viewId: viewId, diff --git a/frontend/appflowy_flutter/lib/plugins/database/grid/application/row/row_bloc.dart b/frontend/appflowy_flutter/lib/plugins/database/grid/application/row/row_bloc.dart index a0c0467b95..322d7d59a4 100644 --- a/frontend/appflowy_flutter/lib/plugins/database/grid/application/row/row_bloc.dart +++ b/frontend/appflowy_flutter/lib/plugins/database/grid/application/row/row_bloc.dart @@ -23,6 +23,8 @@ class RowBloc extends Bloc { }) : _rowBackendSvc = RowBackendService(viewId: viewId), _rowController = rowController, super(RowState.initial()) { + _rowBackendSvc.initRow(rowId); + _dispatch(); _startListening(); _init(); diff --git a/frontend/appflowy_flutter/lib/startup/deps_resolver.dart b/frontend/appflowy_flutter/lib/startup/deps_resolver.dart index d19e0b3f7a..4136cfd07d 100644 --- a/frontend/appflowy_flutter/lib/startup/deps_resolver.dart +++ b/frontend/appflowy_flutter/lib/startup/deps_resolver.dart @@ -12,7 +12,6 @@ import 'package:appflowy/startup/tasks/appflowy_cloud_task.dart'; import 'package:appflowy/user/application/ai_service.dart'; import 'package:appflowy/user/application/auth/af_cloud_auth_service.dart'; import 'package:appflowy/user/application/auth/auth_service.dart'; -import 'package:appflowy/user/application/auth/supabase_auth_service.dart'; import 'package:appflowy/user/application/prelude.dart'; import 'package:appflowy/user/application/reminder/reminder_bloc.dart'; import 'package:appflowy/user/application/user_listener.dart'; @@ -124,9 +123,6 @@ void _resolveUserDeps(GetIt getIt, IntegrationMode mode) { ), ); break; - case AuthenticatorType.supabase: - getIt.registerFactory(() => SupabaseAuthService()); - break; case AuthenticatorType.appflowyCloud: case AuthenticatorType.appflowyCloudSelfHost: case AuthenticatorType.appflowyCloudDevelop: diff --git a/frontend/appflowy_flutter/lib/startup/startup.dart b/frontend/appflowy_flutter/lib/startup/startup.dart index 213e5f6227..85be02f6a6 100644 --- a/frontend/appflowy_flutter/lib/startup/startup.dart +++ b/frontend/appflowy_flutter/lib/startup/startup.dart @@ -133,7 +133,6 @@ class FlowyRunner { // It is unable to get the device information from the test environment. const ApplicationInfoTask(), const HotKeyTask(), - if (isSupabaseEnabled) InitSupabaseTask(), if (isAppFlowyCloudEnabled) InitAppFlowyCloudTask(), const InitAppWidgetTask(), const InitPlatformServiceTask(), diff --git a/frontend/appflowy_flutter/lib/startup/tasks/appflowy_cloud_task.dart b/frontend/appflowy_flutter/lib/startup/tasks/appflowy_cloud_task.dart index 542e8b75a2..5aad45b3c5 100644 --- a/frontend/appflowy_flutter/lib/startup/tasks/appflowy_cloud_task.dart +++ b/frontend/appflowy_flutter/lib/startup/tasks/appflowy_cloud_task.dart @@ -7,7 +7,6 @@ import 'package:app_links/app_links.dart'; import 'package:appflowy/env/cloud_env.dart'; import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/startup/tasks/app_widget.dart'; -import 'package:appflowy/startup/tasks/supabase_task.dart'; import 'package:appflowy/user/application/auth/auth_error.dart'; import 'package:appflowy/user/application/auth/auth_service.dart'; import 'package:appflowy/user/application/auth/device_id.dart'; @@ -22,6 +21,8 @@ import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'; import 'package:appflowy_result/appflowy_result.dart'; import 'package:url_protocol/url_protocol.dart'; +const appflowyDeepLinkSchema = 'appflowy-flutter'; + class AppFlowyCloudDeepLink { AppFlowyCloudDeepLink() { if (_deeplinkSubscription == null) { diff --git a/frontend/appflowy_flutter/lib/startup/tasks/prelude.dart b/frontend/appflowy_flutter/lib/startup/tasks/prelude.dart index 2c3aced3ab..4be5f0f6f7 100644 --- a/frontend/appflowy_flutter/lib/startup/tasks/prelude.dart +++ b/frontend/appflowy_flutter/lib/startup/tasks/prelude.dart @@ -12,5 +12,4 @@ export 'platform_service.dart'; export 'recent_service_task.dart'; export 'rust_sdk.dart'; export 'sentry.dart'; -export 'supabase_task.dart'; export 'windows.dart'; diff --git a/frontend/appflowy_flutter/lib/startup/tasks/rust_sdk.dart b/frontend/appflowy_flutter/lib/startup/tasks/rust_sdk.dart index c02b450d79..58d6aacbc3 100644 --- a/frontend/appflowy_flutter/lib/startup/tasks/rust_sdk.dart +++ b/frontend/appflowy_flutter/lib/startup/tasks/rust_sdk.dart @@ -63,7 +63,6 @@ AppFlowyConfiguration _makeAppFlowyConfiguration( device_id: deviceId, platform: Platform.operatingSystem, authenticator_type: env.authenticatorType.value, - supabase_config: env.supabaseConfig, appflowy_cloud_config: env.appflowyCloudConfig, envs: rustEnvs, ); diff --git a/frontend/appflowy_flutter/lib/startup/tasks/supabase_task.dart b/frontend/appflowy_flutter/lib/startup/tasks/supabase_task.dart deleted file mode 100644 index cb8981acdd..0000000000 --- a/frontend/appflowy_flutter/lib/startup/tasks/supabase_task.dart +++ /dev/null @@ -1,118 +0,0 @@ -import 'dart:async'; -import 'dart:io'; - -import 'package:appflowy/env/cloud_env.dart'; -import 'package:appflowy/user/application/supabase_realtime.dart'; -import 'package:appflowy/workspace/application/settings/application_data_storage.dart'; -import 'package:flutter/foundation.dart'; -import 'package:hive_flutter/hive_flutter.dart'; -import 'package:path/path.dart' as p; -import 'package:supabase_flutter/supabase_flutter.dart'; -import 'package:url_protocol/url_protocol.dart'; - -import '../startup.dart'; - -// ONLY supports in macOS and Windows now. -// -// If you need to update the schema, please update the following files: -// - appflowy_flutter/macos/Runner/Info.plist (macOS) -// - the callback url in Supabase dashboard -const appflowyDeepLinkSchema = 'appflowy-flutter'; -const supabaseLoginCallback = '$appflowyDeepLinkSchema://login-callback'; - -const hiveBoxName = 'appflowy_supabase_authentication'; - -// Used to store the session of the supabase in case of the user switch the different folder. -Supabase? supabase; -SupabaseRealtimeService? realtimeService; - -class InitSupabaseTask extends LaunchTask { - @override - Future initialize(LaunchContext context) async { - if (!isSupabaseEnabled) { - return; - } - - await supabase?.dispose(); - supabase = null; - final initializedSupabase = await Supabase.initialize( - url: getIt().supabaseConfig.url, - anonKey: getIt().supabaseConfig.anon_key, - debug: kDebugMode, - authOptions: const FlutterAuthClientOptions( - localStorage: SupabaseLocalStorage(), - ), - ); - - if (realtimeService != null) { - await realtimeService?.dispose(); - realtimeService = null; - } - realtimeService = SupabaseRealtimeService(supabase: initializedSupabase); - - supabase = initializedSupabase; - - if (Platform.isWindows) { - // register deep link for Windows - registerProtocolHandler(appflowyDeepLinkSchema); - } - } - - @override - Future dispose() async { - await realtimeService?.dispose(); - realtimeService = null; - await supabase?.dispose(); - supabase = null; - } -} - -/// customize the supabase auth storage -/// -/// We don't use the default one because it always save the session in the document directory. -/// When we switch to the different folder, the session still exists. -class SupabaseLocalStorage extends LocalStorage { - const SupabaseLocalStorage(); - - @override - Future initialize() async { - HiveCipher? encryptionCipher; - - // customize the path for Hive - final path = await getIt().getPath(); - Hive.init(p.join(path, 'supabase_auth')); - await Hive.openBox( - hiveBoxName, - encryptionCipher: encryptionCipher, - ); - } - - @override - Future hasAccessToken() { - return Future.value( - Hive.box(hiveBoxName).containsKey( - supabasePersistSessionKey, - ), - ); - } - - @override - Future accessToken() { - return Future.value( - Hive.box(hiveBoxName).get(supabasePersistSessionKey) as String?, - ); - } - - @override - Future removePersistedSession() { - return Hive.box(hiveBoxName).delete(supabasePersistSessionKey); - } - - @override - Future persistSession(String persistSessionString) { - return Hive.box(hiveBoxName).put( - supabasePersistSessionKey, - persistSessionString, - ); - } -} diff --git a/frontend/appflowy_flutter/lib/user/application/auth/af_cloud_mock_auth_service.dart b/frontend/appflowy_flutter/lib/user/application/auth/af_cloud_mock_auth_service.dart index 7c33143ff0..fac655b7fc 100644 --- a/frontend/appflowy_flutter/lib/user/application/auth/af_cloud_mock_auth_service.dart +++ b/frontend/appflowy_flutter/lib/user/application/auth/af_cloud_mock_auth_service.dart @@ -20,7 +20,7 @@ class AppFlowyCloudMockAuthService implements AuthService { final String userEmail; final BackendAuthService _appFlowyAuthService = - BackendAuthService(AuthenticatorPB.Supabase); + BackendAuthService(AuthenticatorPB.AppFlowyCloud); @override Future> signUp({ diff --git a/frontend/appflowy_flutter/lib/user/application/auth/supabase_auth_service.dart b/frontend/appflowy_flutter/lib/user/application/auth/supabase_auth_service.dart deleted file mode 100644 index 0dc48d7ef7..0000000000 --- a/frontend/appflowy_flutter/lib/user/application/auth/supabase_auth_service.dart +++ /dev/null @@ -1,252 +0,0 @@ -import 'dart:async'; - -import 'package:appflowy/startup/tasks/prelude.dart'; -import 'package:appflowy/user/application/auth/auth_service.dart'; -import 'package:appflowy/user/application/auth/backend_auth_service.dart'; -import 'package:appflowy/user/application/auth/device_id.dart'; -import 'package:appflowy/user/application/user_service.dart'; -import 'package:appflowy_backend/dispatch/dispatch.dart'; -import 'package:appflowy_backend/log.dart'; -import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; -import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'; -import 'package:appflowy_result/appflowy_result.dart'; -import 'package:flutter/foundation.dart'; -import 'package:supabase_flutter/supabase_flutter.dart'; - -import 'auth_error.dart'; - -class SupabaseAuthService implements AuthService { - SupabaseAuthService(); - - SupabaseClient get _client => Supabase.instance.client; - GoTrueClient get _auth => _client.auth; - - final BackendAuthService _backendAuthService = BackendAuthService( - AuthenticatorPB.Supabase, - ); - - @override - Future> signUp({ - required String name, - required String email, - required String password, - Map params = const {}, - }) async { - // fetch the uuid from supabase. - final response = await _auth.signUp( - email: email, - password: password, - ); - final uuid = response.user?.id; - if (uuid == null) { - return FlowyResult.failure(AuthError.supabaseSignUpError); - } - // assign the uuid to our backend service. - // and will transfer this logic to backend later. - return _backendAuthService.signUp( - name: name, - email: email, - password: password, - params: { - AuthServiceMapKeys.uuid: uuid, - }, - ); - } - - @override - Future> signInWithEmailPassword({ - required String email, - required String password, - Map params = const {}, - }) async { - try { - final response = await _auth.signInWithPassword( - email: email, - password: password, - ); - final uuid = response.user?.id; - if (uuid == null) { - return FlowyResult.failure(AuthError.supabaseSignInError); - } - return _backendAuthService.signInWithEmailPassword( - email: email, - password: password, - params: { - AuthServiceMapKeys.uuid: uuid, - }, - ); - } on AuthException catch (e) { - Log.error(e); - return FlowyResult.failure(AuthError.supabaseSignInError); - } - } - - @override - Future> signUpWithOAuth({ - required String platform, - Map params = const {}, - }) async { - // Before signing in, sign out any existing users. Otherwise, the callback will be triggered even if the user doesn't click the 'Sign In' button on the website - if (_auth.currentUser != null) { - await _auth.signOut(); - } - - final provider = platform.toProvider(); - final completer = supabaseLoginCompleter( - onSuccess: (userId, userEmail) async { - return _setupAuth( - map: { - AuthServiceMapKeys.uuid: userId, - AuthServiceMapKeys.email: userEmail, - AuthServiceMapKeys.deviceId: await getDeviceId(), - }, - ); - }, - ); - - final response = await _auth.signInWithOAuth( - provider, - queryParams: queryParamsForProvider(provider), - redirectTo: supabaseLoginCallback, - ); - if (!response) { - completer.complete( - FlowyResult.failure(AuthError.supabaseSignInWithOauthError), - ); - } - return completer.future; - } - - @override - Future signOut() async { - await _auth.signOut(); - await _backendAuthService.signOut(); - } - - @override - Future> signUpAsGuest({ - Map params = const {}, - }) async { - // supabase don't support guest login. - // so, just forward to our backend. - return _backendAuthService.signUpAsGuest(); - } - - @override - Future> signInWithMagicLink({ - required String email, - Map params = const {}, - }) async { - final completer = supabaseLoginCompleter( - onSuccess: (userId, userEmail) async { - return _setupAuth( - map: { - AuthServiceMapKeys.uuid: userId, - AuthServiceMapKeys.email: userEmail, - AuthServiceMapKeys.deviceId: await getDeviceId(), - }, - ); - }, - ); - - await _auth.signInWithOtp( - email: email, - emailRedirectTo: kIsWeb ? null : supabaseLoginCallback, - ); - return completer.future; - } - - @override - Future> getUser() async { - return UserBackendService.getCurrentUserProfile(); - } - - Future> getSupabaseUser() async { - final user = _auth.currentUser; - if (user == null) { - return FlowyResult.failure(AuthError.supabaseGetUserError); - } - return FlowyResult.success(user); - } - - Future> _setupAuth({ - required Map map, - }) async { - final payload = OauthSignInPB( - authenticator: AuthenticatorPB.Supabase, - map: map, - ); - - return UserEventOauthSignIn(payload).send().then((value) => value); - } -} - -extension on String { - OAuthProvider toProvider() { - switch (this) { - case 'github': - return OAuthProvider.github; - case 'google': - return OAuthProvider.google; - case 'discord': - return OAuthProvider.discord; - default: - throw UnimplementedError(); - } - } -} - -/// Creates a completer that listens to Supabase authentication state changes and -/// completes when a user signs in. -/// -/// This function sets up a listener on Supabase's authentication state. When a user -/// signs in, it triggers the provided [onSuccess] callback with the user's `id` and -/// `email`. Once the [onSuccess] callback is executed and a response is received, -/// the completer completes with the response, and the listener is canceled. -/// -/// Parameters: -/// - [onSuccess]: A callback function that's executed when a user signs in. It -/// should take in a user's `id` and `email` and return a `Future` containing either -/// a `FlowyError` or a `UserProfilePB`. -/// -/// Returns: -/// A completer of type `FlowyResult`. This completer completes -/// with the response from the [onSuccess] callback when a user signs in. -Completer> supabaseLoginCompleter({ - required Future> Function( - String userId, - String userEmail, - ) onSuccess, -}) { - final completer = Completer>(); - late final StreamSubscription subscription; - final auth = Supabase.instance.client.auth; - - subscription = auth.onAuthStateChange.listen((event) async { - final user = event.session?.user; - if (event.event == AuthChangeEvent.signedIn && user != null) { - final response = await onSuccess( - user.id, - user.email ?? user.newEmail ?? '', - ); - // Only cancel the subscription if the Event is signedIn. - await subscription.cancel(); - completer.complete(response); - } - }); - return completer; -} - -Map queryParamsForProvider(OAuthProvider provider) { - switch (provider) { - case OAuthProvider.google: - return { - 'access_type': 'offline', - 'prompt': 'consent', - }; - case OAuthProvider.github: - case OAuthProvider.discord: - default: - return {}; - } -} diff --git a/frontend/appflowy_flutter/lib/user/application/auth/supabase_mock_auth_service.dart b/frontend/appflowy_flutter/lib/user/application/auth/supabase_mock_auth_service.dart deleted file mode 100644 index bd2620caaa..0000000000 --- a/frontend/appflowy_flutter/lib/user/application/auth/supabase_mock_auth_service.dart +++ /dev/null @@ -1,113 +0,0 @@ -import 'dart:async'; - -import 'package:appflowy/user/application/auth/auth_service.dart'; -import 'package:appflowy/user/application/auth/backend_auth_service.dart'; -import 'package:appflowy/user/application/user_service.dart'; -import 'package:appflowy_backend/dispatch/dispatch.dart'; -import 'package:appflowy_backend/log.dart'; -import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; -import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'; -import 'package:appflowy_result/appflowy_result.dart'; -import 'package:supabase_flutter/supabase_flutter.dart'; - -import 'auth_error.dart'; - -/// Only used for testing. -class SupabaseMockAuthService implements AuthService { - SupabaseMockAuthService(); - static OauthSignInPB? signInPayload; - - SupabaseClient get _client => Supabase.instance.client; - GoTrueClient get _auth => _client.auth; - - final BackendAuthService _appFlowyAuthService = - BackendAuthService(AuthenticatorPB.Supabase); - - @override - Future> signUp({ - required String name, - required String email, - required String password, - Map params = const {}, - }) async { - throw UnimplementedError(); - } - - @override - Future> signInWithEmailPassword({ - required String email, - required String password, - Map params = const {}, - }) async { - throw UnimplementedError(); - } - - @override - Future> signUpWithOAuth({ - required String platform, - Map params = const {}, - }) async { - const password = "AppFlowyTest123!"; - const email = "supabase_integration_test@appflowy.io"; - try { - if (_auth.currentSession == null) { - try { - await _auth.signInWithPassword( - password: password, - email: email, - ); - } catch (e) { - Log.error(e); - return FlowyResult.failure(AuthError.supabaseSignUpError); - } - } - // Check if the user is already logged in. - final session = _auth.currentSession!; - final uuid = session.user.id; - - // Create the OAuth sign-in payload. - final payload = OauthSignInPB( - authenticator: AuthenticatorPB.Supabase, - map: { - AuthServiceMapKeys.uuid: uuid, - AuthServiceMapKeys.email: email, - AuthServiceMapKeys.deviceId: 'MockDeviceId', - }, - ); - - // Send the sign-in event and handle the response. - return UserEventOauthSignIn(payload).send().then((value) => value); - } on AuthException catch (e) { - Log.error(e); - return FlowyResult.failure(AuthError.supabaseSignInError); - } - } - - @override - Future signOut() async { - // await _auth.signOut(); - await _appFlowyAuthService.signOut(); - } - - @override - Future> signUpAsGuest({ - Map params = const {}, - }) async { - // supabase don't support guest login. - // so, just forward to our backend. - return _appFlowyAuthService.signUpAsGuest(); - } - - @override - Future> signInWithMagicLink({ - required String email, - Map params = const {}, - }) async { - throw UnimplementedError(); - } - - @override - Future> getUser() async { - return UserBackendService.getCurrentUserProfile(); - } -} diff --git a/frontend/appflowy_flutter/lib/workspace/application/settings/settings_dialog_bloc.dart b/frontend/appflowy_flutter/lib/workspace/application/settings/settings_dialog_bloc.dart index 36f2603dda..f28900d18a 100644 --- a/frontend/appflowy_flutter/lib/workspace/application/settings/settings_dialog_bloc.dart +++ b/frontend/appflowy_flutter/lib/workspace/application/settings/settings_dialog_bloc.dart @@ -90,7 +90,6 @@ class SettingsDialogBloc ]) async { if ([ AuthenticatorPB.Local, - AuthenticatorPB.Supabase, ].contains(userProfile.authenticator)) { return false; } diff --git a/frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_setting_bloc.dart b/frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_setting_bloc.dart deleted file mode 100644 index 9308a06a98..0000000000 --- a/frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_setting_bloc.dart +++ /dev/null @@ -1,103 +0,0 @@ -import 'package:appflowy/env/backend_env.dart'; -import 'package:appflowy/env/cloud_env.dart'; -import 'package:appflowy/plugins/database/application/defines.dart'; -import 'package:appflowy/startup/startup.dart'; -import 'package:appflowy_backend/dispatch/dispatch.dart'; -import 'package:appflowy_backend/log.dart'; -import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'; -import 'package:appflowy_result/appflowy_result.dart'; -import 'package:flutter_bloc/flutter_bloc.dart'; -import 'package:freezed_annotation/freezed_annotation.dart'; - -import 'cloud_setting_listener.dart'; - -part 'supabase_cloud_setting_bloc.freezed.dart'; - -class SupabaseCloudSettingBloc - extends Bloc { - SupabaseCloudSettingBloc({ - required CloudSettingPB setting, - }) : _listener = UserCloudConfigListener(), - super(SupabaseCloudSettingState.initial(setting)) { - _dispatch(); - } - - final UserCloudConfigListener _listener; - - @override - Future close() async { - await _listener.stop(); - return super.close(); - } - - void _dispatch() { - on( - (event, emit) async { - await event.when( - initial: () async { - _listener.start( - onSettingChanged: (result) { - if (isClosed) { - return; - } - result.fold( - (setting) => - add(SupabaseCloudSettingEvent.didReceiveSetting(setting)), - (error) => Log.error(error), - ); - }, - ); - }, - enableSync: (bool enable) async { - final update = UpdateCloudConfigPB.create()..enableSync = enable; - await updateCloudConfig(update); - }, - didReceiveSetting: (CloudSettingPB setting) { - emit( - state.copyWith( - setting: setting, - loadingState: LoadingState.finish(FlowyResult.success(null)), - ), - ); - }, - enableEncrypt: (bool enable) { - final update = UpdateCloudConfigPB.create()..enableEncrypt = enable; - updateCloudConfig(update); - emit(state.copyWith(loadingState: const LoadingState.loading())); - }, - ); - }, - ); - } - - Future updateCloudConfig(UpdateCloudConfigPB setting) async { - await UserEventSetCloudConfig(setting).send(); - } -} - -@freezed -class SupabaseCloudSettingEvent with _$SupabaseCloudSettingEvent { - const factory SupabaseCloudSettingEvent.initial() = _Initial; - const factory SupabaseCloudSettingEvent.didReceiveSetting( - CloudSettingPB setting, - ) = _DidSyncSupabaseConfig; - const factory SupabaseCloudSettingEvent.enableSync(bool enable) = _EnableSync; - const factory SupabaseCloudSettingEvent.enableEncrypt(bool enable) = - _EnableEncrypt; -} - -@freezed -class SupabaseCloudSettingState with _$SupabaseCloudSettingState { - const factory SupabaseCloudSettingState({ - required LoadingState loadingState, - required SupabaseConfiguration config, - required CloudSettingPB setting, - }) = _SupabaseCloudSettingState; - - factory SupabaseCloudSettingState.initial(CloudSettingPB setting) => - SupabaseCloudSettingState( - loadingState: LoadingState.finish(FlowyResult.success(null)), - setting: setting, - config: getIt().supabaseConfig, - ); -} diff --git a/frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_urls_bloc.dart b/frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_urls_bloc.dart deleted file mode 100644 index fdd4cbef21..0000000000 --- a/frontend/appflowy_flutter/lib/workspace/application/settings/supabase_cloud_urls_bloc.dart +++ /dev/null @@ -1,128 +0,0 @@ -import 'package:appflowy/env/backend_env.dart'; -import 'package:appflowy/env/cloud_env.dart'; -import 'package:appflowy/generated/locale_keys.g.dart'; -import 'package:appflowy/startup/startup.dart'; -import 'package:appflowy_backend/dispatch/dispatch.dart'; -import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'; -import 'package:easy_localization/easy_localization.dart'; -import 'package:flutter_bloc/flutter_bloc.dart'; -import 'package:freezed_annotation/freezed_annotation.dart'; - -import 'appflowy_cloud_setting_bloc.dart'; - -part 'supabase_cloud_urls_bloc.freezed.dart'; - -class SupabaseCloudURLsBloc - extends Bloc { - SupabaseCloudURLsBloc() : super(SupabaseCloudURLsState.initial()) { - on((event, emit) async { - await event.when( - updateUrl: (String url) { - emit( - state.copyWith( - updatedUrl: url, - showRestartHint: url.isNotEmpty && state.upatedAnonKey.isNotEmpty, - urlError: null, - ), - ); - }, - updateAnonKey: (String anonKey) { - emit( - state.copyWith( - upatedAnonKey: anonKey, - showRestartHint: - anonKey.isNotEmpty && state.updatedUrl.isNotEmpty, - anonKeyError: null, - ), - ); - }, - confirmUpdate: () async { - if (state.updatedUrl.isEmpty) { - emit( - state.copyWith( - urlError: - LocaleKeys.settings_menu_cloudSupabaseUrlCanNotBeEmpty.tr(), - anonKeyError: null, - restartApp: false, - ), - ); - return; - } - - if (state.upatedAnonKey.isEmpty) { - emit( - state.copyWith( - urlError: null, - anonKeyError: LocaleKeys - .settings_menu_cloudSupabaseAnonKeyCanNotBeEmpty - .tr(), - restartApp: false, - ), - ); - return; - } - - validateUrl(state.updatedUrl).fold( - (_) async { - await useSupabaseCloud( - url: state.updatedUrl, - anonKey: state.upatedAnonKey, - ); - - add(const SupabaseCloudURLsEvent.didSaveConfig()); - }, - (error) => emit(state.copyWith(urlError: error)), - ); - }, - didSaveConfig: () { - emit( - state.copyWith( - urlError: null, - anonKeyError: null, - restartApp: true, - ), - ); - }, - ); - }); - } - - Future updateCloudConfig(UpdateCloudConfigPB setting) async { - await UserEventSetCloudConfig(setting).send(); - } -} - -@freezed -class SupabaseCloudURLsEvent with _$SupabaseCloudURLsEvent { - const factory SupabaseCloudURLsEvent.updateUrl(String text) = _UpdateUrl; - const factory SupabaseCloudURLsEvent.updateAnonKey(String text) = - _UpdateAnonKey; - const factory SupabaseCloudURLsEvent.confirmUpdate() = _UpdateConfig; - const factory SupabaseCloudURLsEvent.didSaveConfig() = _DidSaveConfig; -} - -@freezed -class SupabaseCloudURLsState with _$SupabaseCloudURLsState { - const factory SupabaseCloudURLsState({ - required SupabaseConfiguration config, - required String updatedUrl, - required String upatedAnonKey, - required String? urlError, - required String? anonKeyError, - required bool restartApp, - required bool showRestartHint, - }) = _SupabaseCloudURLsState; - - factory SupabaseCloudURLsState.initial() { - final config = getIt().supabaseConfig; - return SupabaseCloudURLsState( - updatedUrl: config.url, - upatedAnonKey: config.anon_key, - urlError: null, - anonKeyError: null, - restartApp: false, - showRestartHint: config.url.isNotEmpty && config.anon_key.isNotEmpty, - config: config, - ); - } -} diff --git a/frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_cloud.dart b/frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_cloud.dart index 7191e2cc9d..1b8248d376 100644 --- a/frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_cloud.dart +++ b/frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_cloud.dart @@ -22,7 +22,6 @@ import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:go_router/go_router.dart'; import 'setting_appflowy_cloud.dart'; -import 'setting_supabase_cloud.dart'; class SettingCloud extends StatelessWidget { const SettingCloud({required this.restartAppFlowy, super.key}); @@ -80,8 +79,6 @@ class SettingCloud extends StatelessWidget { switch (cloudType) { case AuthenticatorType.local: return SettingLocalCloud(restartAppFlowy: restartAppFlowy); - case AuthenticatorType.supabase: - return SettingSupabaseCloudView(restartAppFlowy: restartAppFlowy); case AuthenticatorType.appflowyCloud: return AppFlowyCloudViewSetting(restartAppFlowy: restartAppFlowy); case AuthenticatorType.appflowyCloudSelfHost: @@ -112,9 +109,6 @@ class CloudTypeSwitcher extends StatelessWidget { // Only show the appflowyCloudDevelop in develop mode final values = AuthenticatorType.values.where((element) { // Supabase will going to be removed in the future - if (element == AuthenticatorType.supabase) { - return false; - } return isDevelopMode || element != AuthenticatorType.appflowyCloudDevelop; }).toList(); @@ -218,8 +212,6 @@ String titleFromCloudType(AuthenticatorType cloudType) { switch (cloudType) { case AuthenticatorType.local: return LocaleKeys.settings_menu_cloudLocal.tr(); - case AuthenticatorType.supabase: - return LocaleKeys.settings_menu_cloudSupabase.tr(); case AuthenticatorType.appflowyCloud: return LocaleKeys.settings_menu_cloudAppFlowy.tr(); case AuthenticatorType.appflowyCloudSelfHost: diff --git a/frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_supabase_cloud.dart b/frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_supabase_cloud.dart deleted file mode 100644 index 6751213251..0000000000 --- a/frontend/appflowy_flutter/lib/workspace/presentation/settings/widgets/setting_supabase_cloud.dart +++ /dev/null @@ -1,339 +0,0 @@ -import 'package:flutter/gestures.dart'; -import 'package:flutter/material.dart'; -import 'package:flutter/services.dart'; - -import 'package:appflowy/core/helpers/url_launcher.dart'; -import 'package:appflowy/generated/locale_keys.g.dart'; -import 'package:appflowy/workspace/application/settings/supabase_cloud_setting_bloc.dart'; -import 'package:appflowy/workspace/application/settings/supabase_cloud_urls_bloc.dart'; -import 'package:appflowy/workspace/presentation/home/toast.dart'; -import 'package:appflowy/workspace/presentation/settings/widgets/_restart_app_button.dart'; -import 'package:appflowy/workspace/presentation/widgets/dialogs.dart'; -import 'package:appflowy_backend/dispatch/dispatch.dart'; -import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; -import 'package:appflowy_backend/protobuf/flowy-user/user_setting.pb.dart'; -import 'package:appflowy_result/appflowy_result.dart'; -import 'package:easy_localization/easy_localization.dart'; -import 'package:flowy_infra/size.dart'; -import 'package:flowy_infra/theme_extension.dart'; -import 'package:flowy_infra_ui/flowy_infra_ui.dart'; -import 'package:flowy_infra_ui/widget/error_page.dart'; -import 'package:flutter_bloc/flutter_bloc.dart'; - -class SettingSupabaseCloudView extends StatelessWidget { - const SettingSupabaseCloudView({required this.restartAppFlowy, super.key}); - - final VoidCallback restartAppFlowy; - - @override - Widget build(BuildContext context) { - return FutureBuilder>( - future: UserEventGetCloudConfig().send(), - builder: (context, snapshot) { - if (snapshot.data != null && - snapshot.connectionState == ConnectionState.done) { - return snapshot.data!.fold( - (setting) { - return BlocProvider( - create: (context) => SupabaseCloudSettingBloc( - setting: setting, - )..add(const SupabaseCloudSettingEvent.initial()), - child: Column( - children: [ - BlocBuilder( - builder: (context, state) { - return const Column( - children: [ - SupabaseEnableSync(), - EnableEncrypt(), - ], - ); - }, - ), - const VSpace(40), - const SupabaseSelfhostTip(), - SupabaseCloudURLs( - didUpdateUrls: restartAppFlowy, - ), - ], - ), - ); - }, - (err) { - return FlowyErrorPage.message(err.toString(), howToFix: ""); - }, - ); - } else { - return const Center( - child: CircularProgressIndicator(), - ); - } - }, - ); - } -} - -class SupabaseCloudURLs extends StatelessWidget { - const SupabaseCloudURLs({super.key, required this.didUpdateUrls}); - - final VoidCallback didUpdateUrls; - - @override - Widget build(BuildContext context) { - return BlocProvider( - create: (context) => SupabaseCloudURLsBloc(), - child: BlocListener( - listener: (context, state) async { - if (state.restartApp) { - didUpdateUrls(); - } - }, - child: BlocBuilder( - builder: (context, state) { - return Column( - children: [ - SupabaseInput( - title: LocaleKeys.settings_menu_cloudSupabaseUrl.tr(), - url: state.config.url, - hint: LocaleKeys.settings_menu_cloudURLHint.tr(), - onChanged: (text) { - context - .read() - .add(SupabaseCloudURLsEvent.updateUrl(text)); - }, - error: state.urlError, - ), - SupabaseInput( - title: LocaleKeys.settings_menu_cloudSupabaseAnonKey.tr(), - url: state.config.anon_key, - hint: LocaleKeys.settings_menu_cloudURLHint.tr(), - onChanged: (text) { - context - .read() - .add(SupabaseCloudURLsEvent.updateAnonKey(text)); - }, - error: state.anonKeyError, - ), - const VSpace(20), - RestartButton( - onClick: () => _restartApp(context), - showRestartHint: state.showRestartHint, - ), - ], - ); - }, - ), - ), - ); - } - - void _restartApp(BuildContext context) { - NavigatorAlertDialog( - title: LocaleKeys.settings_menu_restartAppTip.tr(), - confirm: () => context - .read() - .add(const SupabaseCloudURLsEvent.confirmUpdate()), - ).show(context); - } -} - -class EnableEncrypt extends StatelessWidget { - const EnableEncrypt({super.key}); - - @override - Widget build(BuildContext context) { - return BlocBuilder( - builder: (context, state) { - final indicator = state.loadingState.when( - loading: () => const CircularProgressIndicator.adaptive(), - finish: (successOrFail) => const SizedBox.shrink(), - idle: () => const SizedBox.shrink(), - ); - - return Column( - children: [ - Row( - children: [ - FlowyText.medium(LocaleKeys.settings_menu_enableEncrypt.tr()), - const Spacer(), - indicator, - const HSpace(3), - Switch.adaptive( - activeColor: Theme.of(context).colorScheme.primary, - onChanged: state.setting.enableEncrypt - ? null - : (bool value) { - context.read().add( - SupabaseCloudSettingEvent.enableEncrypt(value), - ); - }, - value: state.setting.enableEncrypt, - ), - ], - ), - Column( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - IntrinsicHeight( - child: Opacity( - opacity: 0.6, - child: FlowyText.medium( - LocaleKeys.settings_menu_enableEncryptPrompt.tr(), - maxLines: 13, - ), - ), - ), - const VSpace(6), - SizedBox( - height: 40, - child: FlowyTooltip( - message: LocaleKeys.settings_menu_clickToCopySecret.tr(), - child: FlowyButton( - disable: !state.setting.enableEncrypt, - decoration: BoxDecoration( - borderRadius: Corners.s5Border, - border: Border.all( - color: Theme.of(context).colorScheme.secondary, - ), - ), - text: FlowyText.medium(state.setting.encryptSecret), - onTap: () async { - await Clipboard.setData( - ClipboardData(text: state.setting.encryptSecret), - ); - showMessageToast(LocaleKeys.message_copy_success.tr()); - }, - ), - ), - ), - ], - ), - ], - ); - }, - ); - } -} - -class SupabaseEnableSync extends StatelessWidget { - const SupabaseEnableSync({super.key}); - - @override - Widget build(BuildContext context) { - return BlocBuilder( - builder: (context, state) { - return Row( - children: [ - FlowyText.medium(LocaleKeys.settings_menu_enableSync.tr()), - const Spacer(), - Switch.adaptive( - activeColor: Theme.of(context).colorScheme.primary, - onChanged: (bool value) { - context.read().add( - SupabaseCloudSettingEvent.enableSync(value), - ); - }, - value: state.setting.enableSync, - ), - ], - ); - }, - ); - } -} - -@visibleForTesting -class SupabaseInput extends StatefulWidget { - const SupabaseInput({ - super.key, - required this.title, - required this.url, - required this.hint, - required this.error, - required this.onChanged, - }); - - final String title; - final String url; - final String hint; - final String? error; - final Function(String) onChanged; - - @override - SupabaseInputState createState() => SupabaseInputState(); -} - -class SupabaseInputState extends State { - late final _controller = TextEditingController(text: widget.url); - - @override - void dispose() { - _controller.dispose(); - super.dispose(); - } - - @override - Widget build(BuildContext context) { - return TextField( - controller: _controller, - style: const TextStyle(fontSize: 12.0), - decoration: InputDecoration( - contentPadding: const EdgeInsets.symmetric(vertical: 6), - labelText: widget.title, - labelStyle: Theme.of(context) - .textTheme - .titleMedium! - .copyWith(fontWeight: FontWeight.w400, fontSize: 16), - enabledBorder: UnderlineInputBorder( - borderSide: - BorderSide(color: AFThemeExtension.of(context).onBackground), - ), - focusedBorder: UnderlineInputBorder( - borderSide: BorderSide(color: Theme.of(context).colorScheme.primary), - ), - hintText: widget.hint, - errorText: widget.error, - ), - onChanged: widget.onChanged, - ); - } -} - -class SupabaseSelfhostTip extends StatelessWidget { - const SupabaseSelfhostTip({super.key}); - - final url = - "https://docs.appflowy.io/docs/guides/appflowy/self-hosting-appflowy-using-supabase"; - - @override - Widget build(BuildContext context) { - return Opacity( - opacity: 0.6, - child: RichText( - text: TextSpan( - children: [ - TextSpan( - text: LocaleKeys.settings_menu_selfHostStart.tr(), - style: Theme.of(context).textTheme.bodySmall!, - ), - TextSpan( - text: " ${LocaleKeys.settings_menu_selfHostContent.tr()} ", - style: Theme.of(context).textTheme.bodyMedium!.copyWith( - fontSize: FontSizes.s14, - color: Theme.of(context).colorScheme.primary, - decoration: TextDecoration.underline, - ), - recognizer: TapGestureRecognizer() - ..onTap = () => afLaunchUrlString(url), - ), - TextSpan( - text: LocaleKeys.settings_menu_selfHostEnd.tr(), - style: Theme.of(context).textTheme.bodySmall!, - ), - ], - ), - ), - ); - } -} diff --git a/frontend/appflowy_tauri/src-tauri/Cargo.lock b/frontend/appflowy_tauri/src-tauri/Cargo.lock index 64cc436a15..816bc62b34 100644 --- a/frontend/appflowy_tauri/src-tauri/Cargo.lock +++ b/frontend/appflowy_tauri/src-tauri/Cargo.lock @@ -172,7 +172,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "app-error" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bincode", @@ -192,7 +192,7 @@ dependencies = [ [[package]] name = "appflowy-ai-client" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bytes", @@ -826,11 +826,12 @@ dependencies = [ [[package]] name = "client-api" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "again", "anyhow", "app-error", + "arc-swap", "async-trait", "bincode", "brotli", @@ -876,7 +877,7 @@ dependencies = [ [[package]] name = "client-api-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "collab-entity", "collab-rt-entity", @@ -888,7 +889,7 @@ dependencies = [ [[package]] name = "client-websocket" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "futures-channel", "futures-util", @@ -962,15 +963,16 @@ dependencies = [ [[package]] name = "collab" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "async-trait", "bincode", "bytes", "chrono", "js-sys", - "parking_lot 0.12.1", + "lazy_static", "serde", "serde_json", "serde_repr", @@ -986,7 +988,7 @@ dependencies = [ [[package]] name = "collab-database" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "async-trait", @@ -995,11 +997,11 @@ dependencies = [ "collab-entity", "collab-plugins", "dashmap 5.5.3", + "futures", "getrandom 0.2.10", "js-sys", "lazy_static", "nanoid", - "parking_lot 0.12.1", "rayon", "serde", "serde_json", @@ -1016,14 +1018,14 @@ dependencies = [ [[package]] name = "collab-document" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "collab", "collab-entity", "getrandom 0.2.10", "nanoid", - "parking_lot 0.12.1", "serde", "serde_json", "thiserror", @@ -1036,7 +1038,7 @@ dependencies = [ [[package]] name = "collab-entity" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "bytes", @@ -1055,14 +1057,15 @@ dependencies = [ [[package]] name = "collab-folder" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "chrono", "collab", "collab-entity", + "dashmap 5.5.3", "getrandom 0.2.10", - "parking_lot 0.12.1", "serde", "serde_json", "serde_repr", @@ -1077,13 +1080,17 @@ name = "collab-integrate" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "async-trait", "collab", + "collab-database", + "collab-document", "collab-entity", + "collab-folder", "collab-plugins", + "collab-user", "futures", "lib-infra", - "parking_lot 0.12.1", "serde", "serde_json", "tokio", @@ -1093,7 +1100,7 @@ dependencies = [ [[package]] name = "collab-plugins" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "async-stream", @@ -1109,7 +1116,6 @@ dependencies = [ "indexed_db_futures", "js-sys", "lazy_static", - "parking_lot 0.12.1", "rand 0.8.5", "rocksdb", "serde", @@ -1132,7 +1138,7 @@ dependencies = [ [[package]] name = "collab-rt-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bincode", @@ -1157,7 +1163,7 @@ dependencies = [ [[package]] name = "collab-rt-protocol" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "async-trait", @@ -1174,13 +1180,12 @@ dependencies = [ [[package]] name = "collab-user" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "collab", "collab-entity", "getrandom 0.2.10", - "parking_lot 0.12.1", "serde", "serde_json", "tokio", @@ -1546,7 +1551,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" [[package]] name = "database-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", @@ -1972,6 +1977,7 @@ dependencies = [ "anyhow", "appflowy-local-ai", "appflowy-plugin", + "arc-swap", "base64 0.21.5", "bytes", "dashmap 6.0.1", @@ -1989,7 +1995,6 @@ dependencies = [ "log", "md5", "notify", - "parking_lot 0.12.1", "pin-project", "protobuf", "reqwest", @@ -2072,6 +2077,7 @@ version = "0.1.0" dependencies = [ "anyhow", "appflowy-local-ai", + "arc-swap", "base64 0.21.5", "bytes", "client-api", @@ -2079,6 +2085,7 @@ dependencies = [ "collab-entity", "collab-integrate", "collab-plugins", + "dashmap 6.0.1", "diesel", "flowy-ai", "flowy-ai-pub", @@ -2105,7 +2112,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "lib-log", - "parking_lot 0.12.1", "semver", "serde", "serde_json", @@ -2135,6 +2141,7 @@ name = "flowy-database2" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "async-stream", "async-trait", "bytes", @@ -2159,7 +2166,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "rayon", "rust_decimal", @@ -2231,7 +2237,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "scraper 0.18.1", "serde", @@ -2302,6 +2307,7 @@ dependencies = [ name = "flowy-folder" version = "0.1.0" dependencies = [ + "arc-swap", "async-trait", "bytes", "chrono", @@ -2323,7 +2329,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "regex", "serde", @@ -2418,6 +2423,7 @@ name = "flowy-server" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "bytes", "chrono", "client-api", @@ -2426,6 +2432,7 @@ dependencies = [ "collab-entity", "collab-folder", "collab-plugins", + "dashmap 6.0.1", "flowy-ai-pub", "flowy-database-pub", "flowy-document-pub", @@ -2445,7 +2452,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "mime_guess", - "parking_lot 0.12.1", "postgrest", "rand 0.8.5", "reqwest", @@ -2481,7 +2487,6 @@ dependencies = [ "diesel_derives", "diesel_migrations", "libsqlite3-sys", - "parking_lot 0.12.1", "r2d2", "scheduled-thread-pool", "serde", @@ -2539,6 +2544,7 @@ name = "flowy-user" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "base64 0.21.5", "bytes", "chrono", @@ -2551,6 +2557,7 @@ dependencies = [ "collab-integrate", "collab-plugins", "collab-user", + "dashmap 6.0.1", "diesel", "diesel_derives", "fancy-regex 0.11.0", @@ -2567,7 +2574,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "once_cell", - "parking_lot 0.12.1", "protobuf", "semver", "serde", @@ -3068,7 +3074,7 @@ dependencies = [ [[package]] name = "gotrue" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "futures-util", @@ -3085,7 +3091,7 @@ dependencies = [ [[package]] name = "gotrue-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", @@ -3517,7 +3523,7 @@ dependencies = [ [[package]] name = "infra" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bytes", @@ -3782,7 +3788,6 @@ dependencies = [ "futures-util", "getrandom 0.2.10", "nanoid", - "parking_lot 0.12.1", "pin-project", "protobuf", "serde", @@ -6115,7 +6120,7 @@ dependencies = [ [[package]] name = "shared-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", diff --git a/frontend/appflowy_tauri/src-tauri/Cargo.toml b/frontend/appflowy_tauri/src-tauri/Cargo.toml index 25aab8120f..eba3aa40de 100644 --- a/frontend/appflowy_tauri/src-tauri/Cargo.toml +++ b/frontend/appflowy_tauri/src-tauri/Cargo.toml @@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"] # To switch to the local path, run: # scripts/tool/update_collab_source.sh # ⚠️⚠️⚠️️ -collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } +collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } # Working directory: frontend # To update the commit ID, run: diff --git a/frontend/appflowy_tauri/src-tauri/src/init.rs b/frontend/appflowy_tauri/src-tauri/src/init.rs index 636735e5f4..72e60b4a41 100644 --- a/frontend/appflowy_tauri/src-tauri/src/init.rs +++ b/frontend/appflowy_tauri/src-tauri/src/init.rs @@ -1,9 +1,9 @@ +use dotenv::dotenv; use flowy_core::config::AppFlowyCoreConfig; -use flowy_core::{AppFlowyCore, MutexAppFlowyCore, DEFAULT_NAME}; +use flowy_core::{AppFlowyCore, DEFAULT_NAME}; use lib_dispatch::runtime::AFPluginRuntime; use std::rc::Rc; - -use dotenv::dotenv; +use std::sync::Mutex; pub fn read_env() { dotenv().ok(); @@ -25,7 +25,7 @@ pub fn read_env() { } } -pub fn init_flowy_core() -> MutexAppFlowyCore { +pub(crate) fn init_appflowy_core() -> MutexAppFlowyCore { let config_json = include_str!("../tauri.conf.json"); let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap(); @@ -67,3 +67,13 @@ pub fn init_flowy_core() -> MutexAppFlowyCore { MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await) }) } + +pub struct MutexAppFlowyCore(pub Rc>); + +impl MutexAppFlowyCore { + fn new(appflowy_core: AppFlowyCore) -> Self { + Self(Rc::new(Mutex::new(appflowy_core))) + } +} +unsafe impl Sync for MutexAppFlowyCore {} +unsafe impl Send for MutexAppFlowyCore {} diff --git a/frontend/appflowy_tauri/src-tauri/src/main.rs b/frontend/appflowy_tauri/src-tauri/src/main.rs index 6a69de07fd..5f12d1be81 100644 --- a/frontend/appflowy_tauri/src-tauri/src/main.rs +++ b/frontend/appflowy_tauri/src-tauri/src/main.rs @@ -11,17 +11,18 @@ mod init; mod notification; mod request; +use crate::init::init_appflowy_core; +use crate::request::invoke_request; use flowy_notification::{register_notification_sender, unregister_all_notification_sender}; -use init::*; use notification::*; -use request::*; use tauri::Manager; + extern crate dotenv; fn main() { tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME); - let flowy_core = init_flowy_core(); + let flowy_core = init_appflowy_core(); tauri::Builder::default() .invoke_handler(tauri::generate_handler![invoke_request]) .manage(flowy_core) diff --git a/frontend/appflowy_tauri/src-tauri/src/request.rs b/frontend/appflowy_tauri/src-tauri/src/request.rs index 146d303cc0..ff69a438c9 100644 --- a/frontend/appflowy_tauri/src-tauri/src/request.rs +++ b/frontend/appflowy_tauri/src-tauri/src/request.rs @@ -1,4 +1,4 @@ -use flowy_core::; +use crate::init::MutexAppFlowyCore; use lib_dispatch::prelude::{ AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode, }; @@ -39,7 +39,7 @@ pub async fn invoke_request( ) -> AFTauriResponse { let request: AFPluginRequest = request.into(); let state: State = app_handler.state(); - let dispatcher = state.0.lock().dispatcher(); + let dispatcher = state.0.lock().unwrap().dispatcher(); let response = AFPluginDispatcher::sync_send(dispatcher, request); response.into() } diff --git a/frontend/appflowy_web_app/src-tauri/Cargo.lock b/frontend/appflowy_web_app/src-tauri/Cargo.lock index 7af7287706..6a6d842bf7 100644 --- a/frontend/appflowy_web_app/src-tauri/Cargo.lock +++ b/frontend/appflowy_web_app/src-tauri/Cargo.lock @@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "app-error" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bincode", @@ -183,7 +183,7 @@ dependencies = [ [[package]] name = "appflowy-ai-client" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bytes", @@ -800,11 +800,12 @@ dependencies = [ [[package]] name = "client-api" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "again", "anyhow", "app-error", + "arc-swap", "async-trait", "bincode", "brotli", @@ -850,7 +851,7 @@ dependencies = [ [[package]] name = "client-api-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "collab-entity", "collab-rt-entity", @@ -862,7 +863,7 @@ dependencies = [ [[package]] name = "client-websocket" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "futures-channel", "futures-util", @@ -945,15 +946,16 @@ dependencies = [ [[package]] name = "collab" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "async-trait", "bincode", "bytes", "chrono", "js-sys", - "parking_lot 0.12.1", + "lazy_static", "serde", "serde_json", "serde_repr", @@ -969,7 +971,7 @@ dependencies = [ [[package]] name = "collab-database" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "async-trait", @@ -978,11 +980,11 @@ dependencies = [ "collab-entity", "collab-plugins", "dashmap 5.5.3", + "futures", "getrandom 0.2.12", "js-sys", "lazy_static", "nanoid", - "parking_lot 0.12.1", "rayon", "serde", "serde_json", @@ -999,14 +1001,14 @@ dependencies = [ [[package]] name = "collab-document" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "collab", "collab-entity", "getrandom 0.2.12", "nanoid", - "parking_lot 0.12.1", "serde", "serde_json", "thiserror", @@ -1019,7 +1021,7 @@ dependencies = [ [[package]] name = "collab-entity" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "bytes", @@ -1038,14 +1040,15 @@ dependencies = [ [[package]] name = "collab-folder" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "chrono", "collab", "collab-entity", + "dashmap 5.5.3", "getrandom 0.2.12", - "parking_lot 0.12.1", "serde", "serde_json", "serde_repr", @@ -1060,13 +1063,17 @@ name = "collab-integrate" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "async-trait", "collab", + "collab-database", + "collab-document", "collab-entity", + "collab-folder", "collab-plugins", + "collab-user", "futures", "lib-infra", - "parking_lot 0.12.1", "serde", "serde_json", "tokio", @@ -1076,7 +1083,7 @@ dependencies = [ [[package]] name = "collab-plugins" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "async-stream", @@ -1092,7 +1099,6 @@ dependencies = [ "indexed_db_futures", "js-sys", "lazy_static", - "parking_lot 0.12.1", "rand 0.8.5", "rocksdb", "serde", @@ -1115,7 +1121,7 @@ dependencies = [ [[package]] name = "collab-rt-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bincode", @@ -1140,7 +1146,7 @@ dependencies = [ [[package]] name = "collab-rt-protocol" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "async-trait", @@ -1157,13 +1163,12 @@ dependencies = [ [[package]] name = "collab-user" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "collab", "collab-entity", "getrandom 0.2.12", - "parking_lot 0.12.1", "serde", "serde_json", "tokio", @@ -1536,7 +1541,7 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "database-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", @@ -2002,6 +2007,7 @@ dependencies = [ "anyhow", "appflowy-local-ai", "appflowy-plugin", + "arc-swap", "base64 0.21.7", "bytes", "dashmap 6.0.1", @@ -2019,7 +2025,6 @@ dependencies = [ "log", "md5", "notify", - "parking_lot 0.12.1", "pin-project", "protobuf", "reqwest", @@ -2102,6 +2107,7 @@ version = "0.1.0" dependencies = [ "anyhow", "appflowy-local-ai", + "arc-swap", "base64 0.21.7", "bytes", "client-api", @@ -2109,6 +2115,7 @@ dependencies = [ "collab-entity", "collab-integrate", "collab-plugins", + "dashmap 6.0.1", "diesel", "flowy-ai", "flowy-ai-pub", @@ -2135,7 +2142,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "lib-log", - "parking_lot 0.12.1", "semver", "serde", "serde_json", @@ -2165,6 +2171,7 @@ name = "flowy-database2" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "async-stream", "async-trait", "bytes", @@ -2189,7 +2196,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "rayon", "rust_decimal", @@ -2261,7 +2267,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "scraper 0.18.1", "serde", @@ -2332,6 +2337,7 @@ dependencies = [ name = "flowy-folder" version = "0.1.0" dependencies = [ + "arc-swap", "async-trait", "bytes", "chrono", @@ -2353,7 +2359,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "regex", "serde", @@ -2448,6 +2453,7 @@ name = "flowy-server" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "bytes", "chrono", "client-api", @@ -2456,6 +2462,7 @@ dependencies = [ "collab-entity", "collab-folder", "collab-plugins", + "dashmap 6.0.1", "flowy-ai-pub", "flowy-database-pub", "flowy-document-pub", @@ -2475,7 +2482,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "mime_guess", - "parking_lot 0.12.1", "postgrest", "rand 0.8.5", "reqwest", @@ -2511,7 +2517,6 @@ dependencies = [ "diesel_derives", "diesel_migrations", "libsqlite3-sys", - "parking_lot 0.12.1", "r2d2", "scheduled-thread-pool", "serde", @@ -2569,6 +2574,7 @@ name = "flowy-user" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "base64 0.21.7", "bytes", "chrono", @@ -2581,6 +2587,7 @@ dependencies = [ "collab-integrate", "collab-plugins", "collab-user", + "dashmap 6.0.1", "diesel", "diesel_derives", "fancy-regex 0.11.0", @@ -2597,7 +2604,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "once_cell", - "parking_lot 0.12.1", "protobuf", "semver", "serde", @@ -3135,7 +3141,7 @@ dependencies = [ [[package]] name = "gotrue" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "futures-util", @@ -3152,7 +3158,7 @@ dependencies = [ [[package]] name = "gotrue-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", @@ -3589,7 +3595,7 @@ dependencies = [ [[package]] name = "infra" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bytes", @@ -3859,7 +3865,6 @@ dependencies = [ "futures-util", "getrandom 0.2.12", "nanoid", - "parking_lot 0.12.1", "pin-project", "protobuf", "serde", @@ -6179,7 +6184,7 @@ dependencies = [ [[package]] name = "shared-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", diff --git a/frontend/appflowy_web_app/src-tauri/Cargo.toml b/frontend/appflowy_web_app/src-tauri/Cargo.toml index 5d5dc9ec3a..b3d45657bc 100644 --- a/frontend/appflowy_web_app/src-tauri/Cargo.toml +++ b/frontend/appflowy_web_app/src-tauri/Cargo.toml @@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"] # To switch to the local path, run: # scripts/tool/update_collab_source.sh # ⚠️⚠️⚠️️ -collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } +collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } # Working directory: frontend # To update the commit ID, run: diff --git a/frontend/appflowy_web_app/src-tauri/src/init.rs b/frontend/appflowy_web_app/src-tauri/src/init.rs index 636735e5f4..b4c771b1b5 100644 --- a/frontend/appflowy_web_app/src-tauri/src/init.rs +++ b/frontend/appflowy_web_app/src-tauri/src/init.rs @@ -1,9 +1,9 @@ +use dotenv::dotenv; use flowy_core::config::AppFlowyCoreConfig; -use flowy_core::{AppFlowyCore, MutexAppFlowyCore, DEFAULT_NAME}; +use flowy_core::{AppFlowyCore, DEFAULT_NAME}; use lib_dispatch::runtime::AFPluginRuntime; use std::rc::Rc; - -use dotenv::dotenv; +use std::sync::Mutex; pub fn read_env() { dotenv().ok(); @@ -25,7 +25,7 @@ pub fn read_env() { } } -pub fn init_flowy_core() -> MutexAppFlowyCore { +pub fn init_appflowy_core() -> MutexAppFlowyCore { let config_json = include_str!("../tauri.conf.json"); let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap(); @@ -67,3 +67,13 @@ pub fn init_flowy_core() -> MutexAppFlowyCore { MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await) }) } + +pub struct MutexAppFlowyCore(pub Rc>); + +impl MutexAppFlowyCore { + pub(crate) fn new(appflowy_core: AppFlowyCore) -> Self { + Self(Rc::new(Mutex::new(appflowy_core))) + } +} +unsafe impl Sync for MutexAppFlowyCore {} +unsafe impl Send for MutexAppFlowyCore {} diff --git a/frontend/appflowy_web_app/src-tauri/src/main.rs b/frontend/appflowy_web_app/src-tauri/src/main.rs index 6a69de07fd..781ce55098 100644 --- a/frontend/appflowy_web_app/src-tauri/src/main.rs +++ b/frontend/appflowy_web_app/src-tauri/src/main.rs @@ -21,7 +21,7 @@ extern crate dotenv; fn main() { tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME); - let flowy_core = init_flowy_core(); + let flowy_core = init_appflowy_core(); tauri::Builder::default() .invoke_handler(tauri::generate_handler![invoke_request]) .manage(flowy_core) diff --git a/frontend/appflowy_web_app/src-tauri/src/request.rs b/frontend/appflowy_web_app/src-tauri/src/request.rs index 6d2d01fb6e..ff69a438c9 100644 --- a/frontend/appflowy_web_app/src-tauri/src/request.rs +++ b/frontend/appflowy_web_app/src-tauri/src/request.rs @@ -1,4 +1,4 @@ -use flowy_core::MutexAppFlowyCore; +use crate::init::MutexAppFlowyCore; use lib_dispatch::prelude::{ AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode, }; @@ -39,7 +39,7 @@ pub async fn invoke_request( ) -> AFTauriResponse { let request: AFPluginRequest = request.into(); let state: State = app_handler.state(); - let dispatcher = state.0.lock().dispatcher(); + let dispatcher = state.0.lock().unwrap().dispatcher(); let response = AFPluginDispatcher::sync_send(dispatcher, request); response.into() } diff --git a/frontend/rust-lib/Cargo.lock b/frontend/rust-lib/Cargo.lock index 3b5e6896f4..4f8ce47d41 100644 --- a/frontend/rust-lib/Cargo.lock +++ b/frontend/rust-lib/Cargo.lock @@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "app-error" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bincode", @@ -183,7 +183,7 @@ dependencies = [ [[package]] name = "appflowy-ai-client" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bytes", @@ -718,11 +718,12 @@ dependencies = [ [[package]] name = "client-api" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "again", "anyhow", "app-error", + "arc-swap", "async-trait", "bincode", "brotli", @@ -768,7 +769,7 @@ dependencies = [ [[package]] name = "client-api-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "collab-entity", "collab-rt-entity", @@ -780,7 +781,7 @@ dependencies = [ [[package]] name = "client-websocket" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "futures-channel", "futures-util", @@ -823,15 +824,16 @@ dependencies = [ [[package]] name = "collab" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "async-trait", "bincode", "bytes", "chrono", "js-sys", - "parking_lot 0.12.1", + "lazy_static", "serde", "serde_json", "serde_repr", @@ -847,7 +849,7 @@ dependencies = [ [[package]] name = "collab-database" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "async-trait", @@ -856,11 +858,11 @@ dependencies = [ "collab-entity", "collab-plugins", "dashmap 5.5.3", + "futures", "getrandom 0.2.10", "js-sys", "lazy_static", "nanoid", - "parking_lot 0.12.1", "rayon", "serde", "serde_json", @@ -877,14 +879,14 @@ dependencies = [ [[package]] name = "collab-document" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "collab", "collab-entity", "getrandom 0.2.10", "nanoid", - "parking_lot 0.12.1", "serde", "serde_json", "thiserror", @@ -897,7 +899,7 @@ dependencies = [ [[package]] name = "collab-entity" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "bytes", @@ -916,14 +918,15 @@ dependencies = [ [[package]] name = "collab-folder" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", + "arc-swap", "chrono", "collab", "collab-entity", + "dashmap 5.5.3", "getrandom 0.2.10", - "parking_lot 0.12.1", "serde", "serde_json", "serde_repr", @@ -938,13 +941,17 @@ name = "collab-integrate" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "async-trait", "collab", + "collab-database", + "collab-document", "collab-entity", + "collab-folder", "collab-plugins", + "collab-user", "futures", "lib-infra", - "parking_lot 0.12.1", "serde", "serde_json", "tokio", @@ -954,7 +961,7 @@ dependencies = [ [[package]] name = "collab-plugins" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "async-stream", @@ -970,7 +977,6 @@ dependencies = [ "indexed_db_futures", "js-sys", "lazy_static", - "parking_lot 0.12.1", "rand 0.8.5", "rocksdb", "serde", @@ -993,7 +999,7 @@ dependencies = [ [[package]] name = "collab-rt-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bincode", @@ -1018,7 +1024,7 @@ dependencies = [ [[package]] name = "collab-rt-protocol" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "async-trait", @@ -1035,13 +1041,12 @@ dependencies = [ [[package]] name = "collab-user" version = "0.2.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9" dependencies = [ "anyhow", "collab", "collab-entity", "getrandom 0.2.10", - "parking_lot 0.12.1", "serde", "serde_json", "tokio", @@ -1323,7 +1328,6 @@ dependencies = [ "lazy_static", "lib-dispatch", "lib-log", - "parking_lot 0.12.1", "protobuf", "semver", "serde", @@ -1370,7 +1374,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" [[package]] name = "database-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", @@ -1662,7 +1666,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "rand 0.8.5", "semver", @@ -1795,6 +1798,7 @@ dependencies = [ "anyhow", "appflowy-local-ai", "appflowy-plugin", + "arc-swap", "base64 0.21.5", "bytes", "dashmap 6.0.1", @@ -1813,7 +1817,6 @@ dependencies = [ "log", "md5", "notify", - "parking_lot 0.12.1", "pin-project", "protobuf", "reqwest", @@ -1898,6 +1901,7 @@ version = "0.1.0" dependencies = [ "anyhow", "appflowy-local-ai", + "arc-swap", "base64 0.21.5", "bytes", "client-api", @@ -1906,6 +1910,7 @@ dependencies = [ "collab-integrate", "collab-plugins", "console-subscriber", + "dashmap 6.0.1", "diesel", "flowy-ai", "flowy-ai-pub", @@ -1932,7 +1937,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "lib-log", - "parking_lot 0.12.1", "semver", "serde", "serde_json", @@ -1962,6 +1966,7 @@ name = "flowy-database2" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "async-stream", "async-trait", "bytes", @@ -1987,7 +1992,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "rayon", "rust_decimal", @@ -2059,7 +2063,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "scraper 0.18.1", "serde", @@ -2132,6 +2135,7 @@ dependencies = [ name = "flowy-folder" version = "0.1.0" dependencies = [ + "arc-swap", "async-trait", "bytes", "chrono", @@ -2153,7 +2157,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "nanoid", - "parking_lot 0.12.1", "protobuf", "regex", "serde", @@ -2249,6 +2252,7 @@ name = "flowy-server" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "assert-json-diff", "bytes", "chrono", @@ -2258,6 +2262,7 @@ dependencies = [ "collab-entity", "collab-folder", "collab-plugins", + "dashmap 6.0.1", "dotenv", "flowy-ai-pub", "flowy-database-pub", @@ -2278,7 +2283,6 @@ dependencies = [ "lib-dispatch", "lib-infra", "mime_guess", - "parking_lot 0.12.1", "postgrest", "rand 0.8.5", "reqwest", @@ -2317,7 +2321,6 @@ dependencies = [ "libsqlite3-sys", "openssl", "openssl-sys", - "parking_lot 0.12.1", "r2d2", "scheduled-thread-pool", "serde", @@ -2378,6 +2381,7 @@ name = "flowy-user" version = "0.1.0" dependencies = [ "anyhow", + "arc-swap", "base64 0.21.5", "bytes", "chrono", @@ -2390,6 +2394,7 @@ dependencies = [ "collab-integrate", "collab-plugins", "collab-user", + "dashmap 6.0.1", "diesel", "diesel_derives", "fake", @@ -2408,7 +2413,6 @@ dependencies = [ "lib-infra", "nanoid", "once_cell", - "parking_lot 0.12.1", "protobuf", "quickcheck", "quickcheck_macros", @@ -2747,7 +2751,7 @@ dependencies = [ [[package]] name = "gotrue" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "futures-util", @@ -2764,7 +2768,7 @@ dependencies = [ [[package]] name = "gotrue-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", @@ -3129,7 +3133,7 @@ dependencies = [ [[package]] name = "infra" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "bytes", @@ -3295,7 +3299,6 @@ dependencies = [ "futures-util", "getrandom 0.2.10", "nanoid", - "parking_lot 0.12.1", "pin-project", "protobuf", "serde", @@ -5338,7 +5341,7 @@ dependencies = [ [[package]] name = "shared-entity" version = "0.1.0" -source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" +source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0" dependencies = [ "anyhow", "app-error", diff --git a/frontend/rust-lib/Cargo.toml b/frontend/rust-lib/Cargo.toml index 3875726ff9..da234b004e 100644 --- a/frontend/rust-lib/Cargo.toml +++ b/frontend/rust-lib/Cargo.toml @@ -68,6 +68,7 @@ flowy-date = { workspace = true, path = "flowy-date" } flowy-ai = { workspace = true, path = "flowy-ai" } flowy-ai-pub = { workspace = true, path = "flowy-ai-pub" } anyhow = "1.0" +arc-swap = "1.7" tracing = "0.1.40" bytes = "1.5.0" serde_json = "1.0.108" @@ -76,7 +77,6 @@ protobuf = { version = "2.28.0" } diesel = { version = "2.1.0", features = ["sqlite", "chrono", "r2d2", "serde_json"] } uuid = { version = "1.5.0", features = ["serde", "v4", "v5"] } serde_repr = "0.1" -parking_lot = "0.12" futures = "0.3.29" tokio = "1.38.0" tokio-stream = "0.1.14" @@ -100,8 +100,8 @@ dashmap = "6.0.1" # Run the script.add_workspace_members: # scripts/tool/update_client_api_rev.sh new_rev_id # ⚠️⚠️⚠️️ -client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "7878a018a18553e3d8201e572a0c066c14ba3b35" } -client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "7878a018a18553e3d8201e572a0c066c14ba3b35" } +client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "d503905" } +client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "d503905" } [profile.dev] opt-level = 0 @@ -136,13 +136,13 @@ rocksdb = { git = "https://github.com/rust-rocksdb/rust-rocksdb", rev = "1710120 # To switch to the local path, run: # scripts/tool/update_collab_source.sh # ⚠️⚠️⚠️️ -collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } -collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } +collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } +collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" } # Working directory: frontend # To update the commit ID, run: diff --git a/frontend/rust-lib/collab-integrate/Cargo.toml b/frontend/rust-lib/collab-integrate/Cargo.toml index ffddb6a911..8b0a530b19 100644 --- a/frontend/rust-lib/collab-integrate/Cargo.toml +++ b/frontend/rust-lib/collab-integrate/Cargo.toml @@ -11,15 +11,19 @@ crate-type = ["cdylib", "rlib"] collab = { workspace = true } collab-plugins = { workspace = true } collab-entity = { workspace = true } +collab-document = { workspace = true } +collab-folder = { workspace = true } +collab-user = { workspace = true } +collab-database = { workspace = true } serde.workspace = true serde_json.workspace = true anyhow.workspace = true tracing.workspace = true -parking_lot.workspace = true async-trait.workspace = true tokio = { workspace = true, features = ["sync"] } lib-infra = { workspace = true } futures = "0.3" +arc-swap = "1.7" [features] default = [] diff --git a/frontend/rust-lib/collab-integrate/src/collab_builder.rs b/frontend/rust-lib/collab-integrate/src/collab_builder.rs index 571264d1d2..3742b2fc72 100644 --- a/frontend/rust-lib/collab-integrate/src/collab_builder.rs +++ b/frontend/rust-lib/collab-integrate/src/collab_builder.rs @@ -1,11 +1,18 @@ +use std::borrow::BorrowMut; use std::fmt::{Debug, Display}; use std::sync::{Arc, Weak}; use crate::CollabKVDB; use anyhow::Error; -use collab::core::collab::{DataSource, MutexCollab}; -use collab::preclude::CollabBuilder; +use arc_swap::{ArcSwap, ArcSwapOption}; +use collab::core::collab::DataSource; +use collab::core::collab_plugin::CollabPersistence; +use collab::preclude::{Collab, CollabBuilder}; +use collab_database::workspace_database::{DatabaseCollabService, WorkspaceDatabase}; +use collab_document::blocks::DocumentData; +use collab_document::document::Document; use collab_entity::{CollabObject, CollabType}; +use collab_folder::{Folder, FolderData, FolderNotify}; use collab_plugins::connect_state::{CollabConnectReachability, CollabConnectState}; use collab_plugins::local_storage::kv::snapshot::SnapshotPersistence; if_native! { @@ -17,17 +24,19 @@ use collab_plugins::local_storage::indexeddb::IndexeddbDiskPlugin; } pub use crate::plugin_provider::CollabCloudPluginProvider; +use collab_plugins::local_storage::kv::doc::CollabKVAction; +use collab_plugins::local_storage::kv::KVTransactionDB; use collab_plugins::local_storage::CollabPersistenceConfig; +use collab_user::core::{UserAwareness, UserAwarenessNotifier}; +use tokio::sync::RwLock; use lib_infra::{if_native, if_wasm}; -use parking_lot::{Mutex, RwLock}; -use tracing::{instrument, trace}; +use tracing::{error, instrument, trace, warn}; #[derive(Clone, Debug)] pub enum CollabPluginProviderType { Local, AppFlowyCloud, - Supabase, } pub enum CollabPluginProviderContext { @@ -35,13 +44,7 @@ pub enum CollabPluginProviderContext { AppFlowyCloud { uid: i64, collab_object: CollabObject, - local_collab: Weak, - }, - Supabase { - uid: i64, - collab_object: CollabObject, - local_collab: Weak, - local_collab_db: Weak, + local_collab: Weak + Send + Sync + 'static>>, }, } @@ -52,13 +55,7 @@ impl Display for CollabPluginProviderContext { CollabPluginProviderContext::AppFlowyCloud { uid: _, collab_object, - local_collab: _, - } => collab_object.to_string(), - CollabPluginProviderContext::Supabase { - uid: _, - collab_object, - local_collab: _, - local_collab_db: _, + .. } => collab_object.to_string(), }; write!(f, "{}", str) @@ -72,10 +69,10 @@ pub trait WorkspaceCollabIntegrate: Send + Sync { pub struct AppFlowyCollabBuilder { network_reachability: CollabConnectReachability, - plugin_provider: RwLock>, - snapshot_persistence: Mutex>>, + plugin_provider: ArcSwap>, + snapshot_persistence: ArcSwapOption>, #[cfg(not(target_arch = "wasm32"))] - rocksdb_backup: Mutex>>, + rocksdb_backup: ArcSwapOption>, workspace_integrate: Arc, } @@ -86,7 +83,7 @@ impl AppFlowyCollabBuilder { ) -> Self { Self { network_reachability: CollabConnectReachability::new(), - plugin_provider: RwLock::new(Arc::new(storage_provider)), + plugin_provider: ArcSwap::new(Arc::new(Arc::new(storage_provider))), snapshot_persistence: Default::default(), #[cfg(not(target_arch = "wasm32"))] rocksdb_backup: Default::default(), @@ -95,12 +92,14 @@ impl AppFlowyCollabBuilder { } pub fn set_snapshot_persistence(&self, snapshot_persistence: Arc) { - *self.snapshot_persistence.lock() = Some(snapshot_persistence); + self + .snapshot_persistence + .store(Some(snapshot_persistence.into())); } #[cfg(not(target_arch = "wasm32"))] pub fn set_rocksdb_backup(&self, rocksdb_backup: Arc) { - *self.rocksdb_backup.lock() = Some(rocksdb_backup); + self.rocksdb_backup.store(Some(rocksdb_backup.into())); } pub fn update_network(&self, reachable: bool) { @@ -115,12 +114,23 @@ impl AppFlowyCollabBuilder { } } - fn collab_object( + pub fn collab_object( &self, + workspace_id: &str, uid: i64, object_id: &str, collab_type: CollabType, ) -> Result { + // Compare the workspace_id with the currently opened workspace_id. Return an error if they do not match. + // This check is crucial in asynchronous code contexts where the workspace_id might change during operation. + let actual_workspace_id = self.workspace_integrate.workspace_id()?; + if workspace_id != actual_workspace_id { + return Err(anyhow::anyhow!( + "workspace_id not match when build collab. expect workspace_id: {}, actual workspace_id: {}", + workspace_id, + actual_workspace_id + )); + } let device_id = self.workspace_integrate.device_id()?; let workspace_id = self.workspace_integrate.workspace_id()?; Ok(CollabObject::new( @@ -132,170 +142,155 @@ impl AppFlowyCollabBuilder { )) } - /// Creates a new collaboration builder with the default configuration. - /// - /// This function will initiate the creation of a [MutexCollab] object if it does not already exist. - /// To check for the existence of the object prior to creation, you should utilize a transaction - /// returned by the [read_txn] method of the [CollabKVDB]. Then, invoke the [is_exist] method - /// to confirm the object's presence. - /// - /// # Parameters - /// - `uid`: The user ID associated with the collaboration. - /// - `object_id`: A string reference representing the ID of the object. - /// - `object_type`: The type of the collaboration, defined by the [CollabType] enum. - /// - `raw_data`: The raw data of the collaboration object, defined by the [CollabDocState] type. - /// - `collab_db`: A weak reference to the [CollabKVDB]. - /// #[allow(clippy::too_many_arguments)] - pub async fn build( + #[instrument( + level = "trace", + skip(self, data_source, collab_db, builder_config, data) + )] + pub fn create_document( &self, - workspace_id: &str, - uid: i64, - object_id: &str, - object_type: CollabType, - collab_doc_state: DataSource, + object: CollabObject, + data_source: DataSource, collab_db: Weak, - build_config: CollabBuilderConfig, - ) -> Result, Error> { - self.build_with_config( - workspace_id, - uid, - object_id, - object_type, - collab_db, - collab_doc_state, - build_config, - ) + builder_config: CollabBuilderConfig, + data: Option, + ) -> Result>, Error> { + assert_eq!(object.collab_type, CollabType::Document); + let collab = self.build_collab(&object, &collab_db, data_source)?; + let document = Document::open_with(collab, data)?; + let document = Arc::new(RwLock::new(document)); + self.finalize(object, builder_config, document) } - /// Creates a new collaboration builder with the custom configuration. - /// - /// This function will initiate the creation of a [MutexCollab] object if it does not already exist. - /// To check for the existence of the object prior to creation, you should utilize a transaction - /// returned by the [read_txn] method of the [CollabKVDB]. Then, invoke the [is_exist] method - /// to confirm the object's presence. - /// - /// # Parameters - /// - `uid`: The user ID associated with the collaboration. - /// - `object_id`: A string reference representing the ID of the object. - /// - `object_type`: The type of the collaboration, defined by the [CollabType] enum. - /// - `raw_data`: The raw data of the collaboration object, defined by the [CollabDocState] type. - /// - `collab_db`: A weak reference to the [CollabKVDB]. - /// #[allow(clippy::too_many_arguments)] - #[instrument(level = "trace", skip(self, collab_db, collab_doc_state, build_config))] - pub fn build_with_config( + #[instrument( + level = "trace", + skip(self, object, doc_state, collab_db, builder_config, folder_notifier) + )] + pub fn create_folder( &self, - workspace_id: &str, - uid: i64, - object_id: &str, - object_type: CollabType, + object: CollabObject, + doc_state: DataSource, collab_db: Weak, - collab_doc_state: DataSource, - build_config: CollabBuilderConfig, - ) -> Result, Error> { - let collab = CollabBuilder::new(uid, object_id) - .with_doc_state(collab_doc_state) + builder_config: CollabBuilderConfig, + folder_notifier: Option, + folder_data: Option, + ) -> Result>, Error> { + assert_eq!(object.collab_type, CollabType::Folder); + let collab = self.build_collab(&object, &collab_db, doc_state)?; + let folder = Folder::open_with(object.uid, collab, folder_notifier, folder_data); + let folder = Arc::new(RwLock::new(folder)); + self.finalize(object, builder_config, folder) + } + + #[allow(clippy::too_many_arguments)] + #[instrument( + level = "trace", + skip(self, object, doc_state, collab_db, builder_config, notifier) + )] + pub fn create_user_awareness( + &self, + object: CollabObject, + doc_state: DataSource, + collab_db: Weak, + builder_config: CollabBuilderConfig, + notifier: Option, + ) -> Result>, Error> { + assert_eq!(object.collab_type, CollabType::UserAwareness); + let collab = self.build_collab(&object, &collab_db, doc_state)?; + let user_awareness = UserAwareness::open(collab, notifier); + let user_awareness = Arc::new(RwLock::new(user_awareness)); + self.finalize(object, builder_config, user_awareness) + } + + #[allow(clippy::too_many_arguments)] + #[instrument( + level = "trace", + skip(self, object, doc_state, collab_db, builder_config, collab_service) + )] + pub fn create_workspace_database( + &self, + object: CollabObject, + doc_state: DataSource, + collab_db: Weak, + builder_config: CollabBuilderConfig, + collab_service: impl DatabaseCollabService, + ) -> Result>, Error> { + assert_eq!(object.collab_type, CollabType::WorkspaceDatabase); + let collab = self.build_collab(&object, &collab_db, doc_state)?; + let workspace = WorkspaceDatabase::open(object.uid, collab, collab_db.clone(), collab_service); + let workspace = Arc::new(RwLock::new(workspace)); + self.finalize(object, builder_config, workspace) + } + + pub fn build_collab( + &self, + object: &CollabObject, + collab_db: &Weak, + data_source: DataSource, + ) -> Result { + let collab = CollabBuilder::new(object.uid, &object.object_id, data_source) .with_device_id(self.workspace_integrate.device_id()?) .build()?; - // Compare the workspace_id with the currently opened workspace_id. Return an error if they do not match. - // This check is crucial in asynchronous code contexts where the workspace_id might change during operation. - let actual_workspace_id = self.workspace_integrate.workspace_id()?; - if workspace_id != actual_workspace_id { - return Err(anyhow::anyhow!( - "workspace_id not match when build collab. expect workspace_id: {}, actual workspace_id: {}", - workspace_id, - actual_workspace_id - )); - } let persistence_config = CollabPersistenceConfig::default(); + let db_plugin = RocksdbDiskPlugin::new_with_config( + object.uid, + object.object_id.to_string(), + object.collab_type.clone(), + collab_db.clone(), + persistence_config.clone(), + ); + collab.add_plugin(Box::new(db_plugin)); - #[cfg(target_arch = "wasm32")] - { - collab.lock().add_plugin(Box::new(IndexeddbDiskPlugin::new( - uid, - object_id.to_string(), - object_type.clone(), - collab_db.clone(), - ))); + Ok(collab) + } + + pub fn finalize( + &self, + object: CollabObject, + build_config: CollabBuilderConfig, + collab: Arc>, + ) -> Result>, Error> + where + T: BorrowMut + Send + Sync + 'static, + { + let mut write_collab = collab.try_write()?; + if !write_collab.borrow().get_state().is_uninitialized() { + drop(write_collab); + return Ok(collab); } + trace!("🚀finalize collab:{}", object); + if build_config.sync_enable { + let plugin_provider = self.plugin_provider.load_full(); + let provider_type = plugin_provider.provider_type(); + let span = + tracing::span!(tracing::Level::TRACE, "collab_builder", object_id = %object.object_id); + let _enter = span.enter(); + match provider_type { + CollabPluginProviderType::AppFlowyCloud => { + let local_collab = Arc::downgrade(&collab); + let plugins = plugin_provider.get_plugins(CollabPluginProviderContext::AppFlowyCloud { + uid: object.uid, + collab_object: object, + local_collab, + }); - #[cfg(not(target_arch = "wasm32"))] - { - collab - .lock() - .add_plugin(Box::new(RocksdbDiskPlugin::new_with_config( - uid, - object_id.to_string(), - object_type.clone(), - collab_db.clone(), - persistence_config.clone(), - None, - ))); - } - - let arc_collab = Arc::new(collab); - - { - let collab_object = self.collab_object(uid, object_id, object_type.clone())?; - if build_config.sync_enable { - let provider_type = self.plugin_provider.read().provider_type(); - let span = tracing::span!(tracing::Level::TRACE, "collab_builder", object_id = %object_id); - let _enter = span.enter(); - match provider_type { - CollabPluginProviderType::AppFlowyCloud => { - let local_collab = Arc::downgrade(&arc_collab); - let plugins = - self - .plugin_provider - .read() - .get_plugins(CollabPluginProviderContext::AppFlowyCloud { - uid, - collab_object, - local_collab, - }); - - for plugin in plugins { - arc_collab.lock().add_plugin(plugin); - } - }, - CollabPluginProviderType::Supabase => { - #[cfg(not(target_arch = "wasm32"))] - { - trace!("init supabase collab plugins"); - let local_collab = Arc::downgrade(&arc_collab); - let local_collab_db = collab_db.clone(); - let plugins = - self - .plugin_provider - .read() - .get_plugins(CollabPluginProviderContext::Supabase { - uid, - collab_object, - local_collab, - local_collab_db, - }); - for plugin in plugins { - arc_collab.lock().add_plugin(plugin); - } - } - }, - CollabPluginProviderType::Local => {}, - } + // at the moment when we get the lock, the collab object is not yet exposed outside + for plugin in plugins { + write_collab.borrow().add_plugin(plugin); + } + }, + CollabPluginProviderType::Local => {}, } } if build_config.auto_initialize { - #[cfg(target_arch = "wasm32")] - futures::executor::block_on(arc_collab.lock().initialize()); - - #[cfg(not(target_arch = "wasm32"))] - arc_collab.lock().initialize(); + // at the moment when we get the lock, the collab object is not yet exposed outside + (*write_collab).borrow_mut().initialize(); } - - trace!("collab initialized: {}:{}", object_type, object_id); - Ok(arc_collab) + drop(write_collab); + Ok(collab) } } @@ -328,3 +323,39 @@ impl CollabBuilderConfig { self } } + +pub struct KVDBCollabPersistenceImpl { + pub db: Weak, + pub uid: i64, +} + +impl KVDBCollabPersistenceImpl { + pub fn new(db: Weak, uid: i64) -> Self { + Self { db, uid } + } + + pub fn into_data_source(self) -> DataSource { + DataSource::Disk(Some(Box::new(self))) + } +} + +impl CollabPersistence for KVDBCollabPersistenceImpl { + fn load_collab(&self, collab: &mut Collab) { + if let Some(collab_db) = self.db.upgrade() { + let object_id = collab.object_id().to_string(); + let rocksdb_read = collab_db.read_txn(); + + if rocksdb_read.is_exist(self.uid, &object_id) { + let mut txn = collab.transact_mut(); + if let Err(err) = rocksdb_read.load_doc_with_txn(self.uid, &object_id, &mut txn) { + error!("🔴 load doc:{} failed: {}", object_id, err); + } + drop(rocksdb_read); + txn.commit(); + drop(txn); + } + } else { + warn!("collab_db is dropped"); + } + } +} diff --git a/frontend/rust-lib/collab-integrate/src/lib.rs b/frontend/rust-lib/collab-integrate/src/lib.rs index a7df75d72e..d24700f8d5 100644 --- a/frontend/rust-lib/collab-integrate/src/lib.rs +++ b/frontend/rust-lib/collab-integrate/src/lib.rs @@ -1,4 +1,3 @@ -pub use collab::core::collab::MutexCollab; pub use collab::preclude::Snapshot; pub use collab_plugins::local_storage::CollabPersistenceConfig; pub use collab_plugins::CollabKVDB; diff --git a/frontend/rust-lib/collab-integrate/src/native/plugin_provider.rs b/frontend/rust-lib/collab-integrate/src/native/plugin_provider.rs index a26fb8d933..b5b3b1f6e6 100644 --- a/frontend/rust-lib/collab-integrate/src/native/plugin_provider.rs +++ b/frontend/rust-lib/collab-integrate/src/native/plugin_provider.rs @@ -1,6 +1,7 @@ -use crate::collab_builder::{CollabPluginProviderContext, CollabPluginProviderType}; use collab::preclude::CollabPlugin; +use crate::collab_builder::{CollabPluginProviderContext, CollabPluginProviderType}; + #[cfg(target_arch = "wasm32")] pub trait CollabCloudPluginProvider: 'static { fn provider_type(&self) -> CollabPluginProviderType; @@ -11,9 +12,9 @@ pub trait CollabCloudPluginProvider: 'static { } #[cfg(target_arch = "wasm32")] -impl CollabCloudPluginProvider for std::rc::Rc +impl CollabCloudPluginProvider for std::rc::Rc where - T: CollabCloudPluginProvider, + U: CollabCloudPluginProvider, { fn provider_type(&self) -> CollabPluginProviderType { (**self).provider_type() @@ -38,9 +39,9 @@ pub trait CollabCloudPluginProvider: Send + Sync + 'static { } #[cfg(not(target_arch = "wasm32"))] -impl CollabCloudPluginProvider for std::sync::Arc +impl CollabCloudPluginProvider for std::sync::Arc where - T: CollabCloudPluginProvider, + U: CollabCloudPluginProvider, { fn provider_type(&self) -> CollabPluginProviderType { (**self).provider_type() diff --git a/frontend/rust-lib/dart-ffi/Cargo.toml b/frontend/rust-lib/dart-ffi/Cargo.toml index 22e07f3483..c60c09e0c9 100644 --- a/frontend/rust-lib/dart-ffi/Cargo.toml +++ b/frontend/rust-lib/dart-ffi/Cargo.toml @@ -22,7 +22,6 @@ serde_json.workspace = true bytes.workspace = true crossbeam-utils = "0.8.15" lazy_static = "1.4.0" -parking_lot.workspace = true tracing.workspace = true lib-log.workspace = true semver = "1.0.22" diff --git a/frontend/rust-lib/dart-ffi/src/env_serde.rs b/frontend/rust-lib/dart-ffi/src/env_serde.rs index db443a78f7..476c27bb46 100644 --- a/frontend/rust-lib/dart-ffi/src/env_serde.rs +++ b/frontend/rust-lib/dart-ffi/src/env_serde.rs @@ -3,7 +3,6 @@ use std::collections::HashMap; use serde::Deserialize; use flowy_server_pub::af_cloud_config::AFCloudConfiguration; -use flowy_server_pub::supabase_config::SupabaseConfiguration; use flowy_server_pub::AuthenticatorType; #[derive(Deserialize, Debug)] @@ -17,7 +16,7 @@ pub struct AppFlowyDartConfiguration { pub device_id: String, pub platform: String, pub authenticator_type: AuthenticatorType, - pub(crate) supabase_config: SupabaseConfiguration, + //pub(crate) supabase_config: SupabaseConfiguration, pub(crate) appflowy_cloud_config: AFCloudConfiguration, #[serde(default)] pub(crate) envs: HashMap, @@ -31,7 +30,7 @@ impl AppFlowyDartConfiguration { pub fn write_env(&self) { self.authenticator_type.write_env(); self.appflowy_cloud_config.write_env(); - self.supabase_config.write_env(); + //self.supabase_config.write_env(); for (k, v) in self.envs.iter() { std::env::set_var(k, v); diff --git a/frontend/rust-lib/dart-ffi/src/lib.rs b/frontend/rust-lib/dart-ffi/src/lib.rs index 14b5a13a24..85281c8cb0 100644 --- a/frontend/rust-lib/dart-ffi/src/lib.rs +++ b/frontend/rust-lib/dart-ffi/src/lib.rs @@ -2,10 +2,9 @@ use allo_isolate::Isolate; use lazy_static::lazy_static; -use parking_lot::Mutex; use semver::Version; use std::rc::Rc; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; use std::{ffi::CStr, os::raw::c_char}; use tracing::{debug, error, info, trace, warn}; @@ -38,6 +37,10 @@ lazy_static! { static ref LOG_STREAM_ISOLATE: Mutex> = Mutex::new(None); } +unsafe impl Send for MutexAppFlowyCore {} +unsafe impl Sync for MutexAppFlowyCore {} + +///FIXME: I'm pretty sure that there's a better way to do this struct MutexAppFlowyCore(Rc>>); impl MutexAppFlowyCore { @@ -46,15 +49,12 @@ impl MutexAppFlowyCore { } fn dispatcher(&self) -> Option> { - let binding = self.0.lock(); + let binding = self.0.lock().unwrap(); let core = binding.as_ref(); core.map(|core| core.event_dispatcher.clone()) } } -unsafe impl Sync for MutexAppFlowyCore {} -unsafe impl Send for MutexAppFlowyCore {} - #[no_mangle] pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 { // and sent it the `Rust's` result @@ -87,7 +87,7 @@ pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 { // Ensure that the database is closed before initialization. Also, verify that the init_sdk function can be called // multiple times (is reentrant). Currently, only the database resource is exclusive. - if let Some(core) = &*APPFLOWY_CORE.0.lock() { + if let Some(core) = &*APPFLOWY_CORE.0.lock().unwrap() { core.close_db(); } @@ -96,11 +96,12 @@ pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 { let log_stream = LOG_STREAM_ISOLATE .lock() + .unwrap() .take() .map(|isolate| Arc::new(LogStreamSenderImpl { isolate }) as Arc); // let isolate = allo_isolate::Isolate::new(port); - *APPFLOWY_CORE.0.lock() = runtime.block_on(async move { + *APPFLOWY_CORE.0.lock().unwrap() = runtime.block_on(async move { Some(AppFlowyCore::new(config, cloned_runtime, log_stream).await) // isolate.post("".to_string()); }); @@ -168,7 +169,7 @@ pub extern "C" fn set_stream_port(notification_port: i64) -> i32 { #[no_mangle] pub extern "C" fn set_log_stream_port(port: i64) -> i32 { - *LOG_STREAM_ISOLATE.lock() = Some(Isolate::new(port)); + *LOG_STREAM_ISOLATE.lock().unwrap() = Some(Isolate::new(port)); 0 } diff --git a/frontend/rust-lib/event-integration-test/Cargo.toml b/frontend/rust-lib/event-integration-test/Cargo.toml index 01f2f2aad3..33e4f4b184 100644 --- a/frontend/rust-lib/event-integration-test/Cargo.toml +++ b/frontend/rust-lib/event-integration-test/Cargo.toml @@ -37,7 +37,6 @@ thread-id = "3.3.0" bytes.workspace = true nanoid = "0.4.0" tracing.workspace = true -parking_lot.workspace = true uuid.workspace = true collab = { workspace = true } collab-document = { workspace = true } diff --git a/frontend/rust-lib/event-integration-test/src/database_event.rs b/frontend/rust-lib/event-integration-test/src/database_event.rs index b16f9d5ab5..221734fb78 100644 --- a/frontend/rust-lib/event-integration-test/src/database_event.rs +++ b/frontend/rust-lib/event-integration-test/src/database_event.rs @@ -24,7 +24,7 @@ impl EventIntegrationTest { self .appflowy_core .database_manager - .get_database_with_view_id(database_view_id) + .get_database_editor_with_view_id(database_view_id) .await .unwrap() .export_csv(CSVFormat::Original) diff --git a/frontend/rust-lib/event-integration-test/src/document/document_event.rs b/frontend/rust-lib/event-integration-test/src/document/document_event.rs index a6cab721d7..71e779389e 100644 --- a/frontend/rust-lib/event-integration-test/src/document/document_event.rs +++ b/frontend/rust-lib/event-integration-test/src/document/document_event.rs @@ -42,10 +42,10 @@ impl DocumentEventTest { .event_test .appflowy_core .document_manager - .get_opened_document(doc_id) + .editable_document(doc_id) .await .unwrap(); - let guard = doc.lock(); + let guard = doc.read().await; guard.encode_collab().unwrap() } diff --git a/frontend/rust-lib/event-integration-test/src/document_event.rs b/frontend/rust-lib/event-integration-test/src/document_event.rs index 28f27bdedd..407dcfe066 100644 --- a/frontend/rust-lib/event-integration-test/src/document_event.rs +++ b/frontend/rust-lib/event-integration-test/src/document_event.rs @@ -1,6 +1,3 @@ -use std::sync::Arc; - -use collab::core::collab::MutexCollab; use collab::core::origin::CollabOrigin; use collab::preclude::updates::decoder::Decode; use collab::preclude::{Collab, Update}; @@ -107,17 +104,13 @@ impl EventIntegrationTest { } pub fn assert_document_data_equal(doc_state: &[u8], doc_id: &str, expected: DocumentData) { - let collab = MutexCollab::new(Collab::new_with_origin( - CollabOrigin::Server, - doc_id, - vec![], - false, - )); - collab.lock().with_origin_transact_mut(|txn| { + let mut collab = Collab::new_with_origin(CollabOrigin::Server, doc_id, vec![], false); + { let update = Update::decode_v1(doc_state).unwrap(); + let mut txn = collab.transact_mut(); txn.apply_update(update); - }); - let document = Document::open(Arc::new(collab)).unwrap(); + }; + let document = Document::open(collab).unwrap(); let actual = document.get_document_data().unwrap(); assert_eq!(actual, expected); } diff --git a/frontend/rust-lib/event-integration-test/src/event_builder.rs b/frontend/rust-lib/event-integration-test/src/event_builder.rs index 5168723981..c4149378e5 100644 --- a/frontend/rust-lib/event-integration-test/src/event_builder.rs +++ b/frontend/rust-lib/event-integration-test/src/event_builder.rs @@ -1,3 +1,4 @@ +use crate::EventIntegrationTest; use flowy_user::errors::{internal_error, FlowyError}; use lib_dispatch::prelude::{ AFPluginDispatcher, AFPluginEventResponse, AFPluginFromBytes, AFPluginRequest, ToBytes, *, @@ -9,8 +10,6 @@ use std::{ hash::Hash, }; -use crate::EventIntegrationTest; - #[derive(Clone)] pub struct EventBuilder { context: TestContext, diff --git a/frontend/rust-lib/event-integration-test/src/folder_event.rs b/frontend/rust-lib/event-integration-test/src/folder_event.rs index 0c554df4b2..a6a7683c78 100644 --- a/frontend/rust-lib/event-integration-test/src/folder_event.rs +++ b/frontend/rust-lib/event-integration-test/src/folder_event.rs @@ -166,10 +166,14 @@ impl EventIntegrationTest { .await; } - pub fn get_folder_data(&self) -> FolderData { - let mutex_folder = self.appflowy_core.folder_manager.get_mutex_folder().clone(); - let folder_lock_guard = mutex_folder.read(); - let folder = folder_lock_guard.as_ref().unwrap(); + pub async fn get_folder_data(&self) -> FolderData { + let mutex_folder = self + .appflowy_core + .folder_manager + .get_mutex_folder() + .clone() + .unwrap(); + let folder = mutex_folder.read().await; let workspace_id = self.appflowy_core.user_manager.workspace_id().unwrap(); folder.get_folder_data(&workspace_id).clone().unwrap() } diff --git a/frontend/rust-lib/event-integration-test/src/lib.rs b/frontend/rust-lib/event-integration-test/src/lib.rs index e368c4168c..88100b2f85 100644 --- a/frontend/rust-lib/event-integration-test/src/lib.rs +++ b/frontend/rust-lib/event-integration-test/src/lib.rs @@ -6,11 +6,11 @@ use collab_entity::CollabType; use std::env::temp_dir; use std::path::PathBuf; use std::rc::Rc; +use std::sync::atomic::{AtomicBool, AtomicU8, Ordering}; use std::sync::Arc; use std::time::Duration; use nanoid::nanoid; -use parking_lot::{Mutex, RwLock}; use semver::Version; use tokio::select; use tokio::time::sleep; @@ -35,10 +35,10 @@ pub mod user_event; #[derive(Clone)] pub struct EventIntegrationTest { - pub authenticator: Arc>, + pub authenticator: Arc, pub appflowy_core: AppFlowyCore, #[allow(dead_code)] - cleaner: Arc>, + cleaner: Arc, pub notification_sender: TestNotificationSender, } @@ -57,7 +57,7 @@ impl EventIntegrationTest { let clean_path = config.storage_path.clone(); let inner = init_core(config).await; let notification_sender = TestNotificationSender::new(); - let authenticator = Arc::new(RwLock::new(AuthenticatorPB::Local)); + let authenticator = Arc::new(AtomicU8::new(AuthenticatorPB::Local as u8)); register_notification_sender(notification_sender.clone()); // In case of dropping the runtime that runs the core, we need to forget the dispatcher @@ -66,7 +66,7 @@ impl EventIntegrationTest { appflowy_core: inner, authenticator, notification_sender, - cleaner: Arc::new(Mutex::new(Cleaner::new(PathBuf::from(clean_path)))), + cleaner: Arc::new(Cleaner::new(PathBuf::from(clean_path))), } } @@ -93,7 +93,7 @@ impl EventIntegrationTest { } pub fn skip_clean(&mut self) { - self.cleaner.lock().should_clean = false; + self.cleaner.should_clean.store(false, Ordering::Release); } pub fn instance_name(&self) -> String { @@ -154,7 +154,7 @@ pub fn document_data_from_document_doc_state(doc_id: &str, doc_state: Vec) - } pub fn document_from_document_doc_state(doc_id: &str, doc_state: Vec) -> Document { - Document::from_doc_state( + Document::open_with_options( CollabOrigin::Empty, DataSource::DocStateV1(doc_state), doc_id, @@ -177,17 +177,16 @@ impl std::ops::Deref for EventIntegrationTest { } } -#[derive(Clone)] pub struct Cleaner { dir: PathBuf, - should_clean: bool, + should_clean: AtomicBool, } impl Cleaner { pub fn new(dir: PathBuf) -> Self { Self { dir, - should_clean: true, + should_clean: AtomicBool::new(true), } } @@ -198,7 +197,7 @@ impl Cleaner { impl Drop for Cleaner { fn drop(&mut self) { - if self.should_clean { + if self.should_clean.load(Ordering::Acquire) { Self::cleanup(&self.dir) } } diff --git a/frontend/rust-lib/event-integration-test/src/user_event.rs b/frontend/rust-lib/event-integration-test/src/user_event.rs index 54e3673d34..d4de053426 100644 --- a/frontend/rust-lib/event-integration-test/src/user_event.rs +++ b/frontend/rust-lib/event-integration-test/src/user_event.rs @@ -1,11 +1,12 @@ use std::collections::HashMap; use std::convert::TryFrom; +use std::sync::atomic::Ordering; use std::sync::Arc; use bytes::Bytes; use flowy_folder::entities::{RepeatedViewPB, WorkspacePB}; -use nanoid::nanoid; + use protobuf::ProtobufError; use tokio::sync::broadcast::{channel, Sender}; use tracing::error; @@ -101,21 +102,6 @@ impl EventIntegrationTest { } } - pub async fn supabase_party_sign_up(&self) -> UserProfilePB { - let map = third_party_sign_up_param(Uuid::new_v4().to_string()); - let payload = OauthSignInPB { - map, - authenticator: AuthenticatorPB::Supabase, - }; - - EventBuilder::new(self.clone()) - .event(UserEvent::OauthSignIn) - .payload(payload) - .async_send() - .await - .parse::() - } - pub async fn sign_out(&self) { EventBuilder::new(self.clone()) .event(UserEvent::SignOut) @@ -124,7 +110,7 @@ impl EventIntegrationTest { } pub fn set_auth_type(&self, auth_type: AuthenticatorPB) { - *self.authenticator.write() = auth_type; + self.authenticator.store(auth_type as u8, Ordering::Release); } pub async fn init_anon_user(&self) -> UserProfilePB { @@ -178,33 +164,6 @@ impl EventIntegrationTest { Ok(user_profile) } - pub async fn supabase_sign_up_with_uuid( - &self, - uuid: &str, - email: Option, - ) -> FlowyResult { - let mut map = HashMap::new(); - map.insert(USER_UUID.to_string(), uuid.to_string()); - map.insert(USER_DEVICE_ID.to_string(), uuid.to_string()); - map.insert( - USER_EMAIL.to_string(), - email.unwrap_or_else(|| format!("{}@appflowy.io", nanoid!(10))), - ); - let payload = OauthSignInPB { - map, - authenticator: AuthenticatorPB::Supabase, - }; - - let user_profile = EventBuilder::new(self.clone()) - .event(UserEvent::OauthSignIn) - .payload(payload) - .async_send() - .await - .try_parse::()?; - - Ok(user_profile) - } - pub async fn import_appflowy_data( &self, path: String, diff --git a/frontend/rust-lib/event-integration-test/tests/folder/local_test/script.rs b/frontend/rust-lib/event-integration-test/tests/folder/local_test/script.rs index 49fbc01384..3b6d560a4e 100644 --- a/frontend/rust-lib/event-integration-test/tests/folder/local_test/script.rs +++ b/frontend/rust-lib/event-integration-test/tests/folder/local_test/script.rs @@ -207,6 +207,22 @@ impl FolderTest { }, } } + + // pub async fn duplicate_view(&self, view_id: &str) { + // let payload = DuplicateViewPayloadPB { + // view_id: view_id.to_string(), + // open_after_duplicate: false, + // include_children: false, + // parent_view_id: None, + // suffix: None, + // sync_after_create: false, + // }; + // EventBuilder::new(self.sdk.clone()) + // .event(DuplicateView) + // .payload(payload) + // .async_send() + // .await; + // } } pub async fn create_workspace(sdk: &EventIntegrationTest, name: &str, desc: &str) -> WorkspacePB { let request = CreateWorkspacePayloadPB { diff --git a/frontend/rust-lib/event-integration-test/tests/user/af_cloud_test/workspace_test.rs b/frontend/rust-lib/event-integration-test/tests/user/af_cloud_test/workspace_test.rs index 03c0930f74..60f4595f53 100644 --- a/frontend/rust-lib/event-integration-test/tests/user/af_cloud_test/workspace_test.rs +++ b/frontend/rust-lib/event-integration-test/tests/user/af_cloud_test/workspace_test.rs @@ -125,7 +125,7 @@ async fn af_cloud_open_workspace_test() { assert_eq!(views[2].name, "D"); // simulate open workspace and check if the views are correct - for i in 0..30 { + for i in 0..10 { if i % 2 == 0 { test.open_workspace(&first_workspace.id).await; sleep(Duration::from_millis(300)).await; @@ -142,16 +142,16 @@ async fn af_cloud_open_workspace_test() { } test.open_workspace(&first_workspace.id).await; - let views = test.get_all_workspace_views().await; - assert_eq!(views[0].name, default_document_name); - assert_eq!(views[1].name, "A"); - assert_eq!(views[2].name, "B"); + let views_1 = test.get_all_workspace_views().await; + assert_eq!(views_1[0].name, default_document_name); + assert_eq!(views_1[1].name, "A"); + assert_eq!(views_1[2].name, "B"); test.open_workspace(&second_workspace.id).await; - let views = test.get_all_workspace_views().await; - assert_eq!(views[0].name, default_document_name); - assert_eq!(views[1].name, "C"); - assert_eq!(views[2].name, "D"); + let views_2 = test.get_all_workspace_views().await; + assert_eq!(views_2[0].name, default_document_name); + assert_eq!(views_2[1].name, "C"); + assert_eq!(views_2[2].name, "D"); } #[tokio::test] @@ -240,7 +240,7 @@ async fn af_cloud_different_open_same_workspace_test() { // Retrieve and verify the views associated with the workspace. let views = folder.get_views_belong_to(&shared_workspace_id); let folder_workspace_id = folder.get_workspace_id(); - assert_eq!(folder_workspace_id, shared_workspace_id); + assert_eq!(folder_workspace_id, Some(shared_workspace_id)); assert_eq!(views.len(), 1, "only get: {:?}", views); // Expecting two views. assert_eq!(views[0].name, "Getting started"); diff --git a/frontend/rust-lib/event-integration-test/tests/util.rs b/frontend/rust-lib/event-integration-test/tests/util.rs index ad1a01bcff..aabc528fa7 100644 --- a/frontend/rust-lib/event-integration-test/tests/util.rs +++ b/frontend/rust-lib/event-integration-test/tests/util.rs @@ -2,16 +2,11 @@ use std::fs::{create_dir_all, File, OpenOptions}; use std::io::copy; use std::ops::Deref; use std::path::{Path, PathBuf}; -use std::sync::Arc; use std::time::Duration; use std::{fs, io}; -use anyhow::Error; -use collab_folder::FolderData; -use collab_plugins::cloud_storage::RemoteCollabStorage; use nanoid::nanoid; use tokio::sync::mpsc::Receiver; - use tokio::time::timeout; use uuid::Uuid; use walkdir::WalkDir; @@ -21,22 +16,9 @@ use zip::{CompressionMethod, ZipArchive, ZipWriter}; use event_integration_test::event_builder::EventBuilder; use event_integration_test::Cleaner; use event_integration_test::EventIntegrationTest; -use flowy_database_pub::cloud::DatabaseCloudService; -use flowy_folder_pub::cloud::{FolderCloudService, FolderSnapshot}; -use flowy_server::supabase::api::*; -use flowy_server::{AppFlowyEncryption, EncryptionImpl}; -use flowy_server_pub::supabase_config::SupabaseConfiguration; -use flowy_user::entities::{AuthenticatorPB, UpdateUserProfilePayloadPB}; +use flowy_user::entities::UpdateUserProfilePayloadPB; use flowy_user::errors::FlowyError; - use flowy_user::event_map::UserEvent::*; -use flowy_user_pub::cloud::UserCloudService; -use flowy_user_pub::entities::Authenticator; - -pub fn get_supabase_config() -> Option { - dotenv::from_path(".env.ci").ok()?; - SupabaseConfiguration::from_env().ok() -} pub struct FlowySupabaseTest { event_test: EventIntegrationTest, @@ -44,13 +26,7 @@ pub struct FlowySupabaseTest { impl FlowySupabaseTest { pub async fn new() -> Option { - let _ = get_supabase_config()?; let event_test = EventIntegrationTest::new().await; - event_test.set_auth_type(AuthenticatorPB::Supabase); - event_test - .server_provider - .set_authenticator(Authenticator::Supabase); - Some(Self { event_test }) } @@ -79,93 +55,6 @@ pub async fn receive_with_timeout(mut receiver: Receiver, duration: Durati timeout(duration, receiver.recv()).await.ok()? } -pub fn get_supabase_ci_config() -> Option { - dotenv::from_filename("./.env.ci").ok()?; - SupabaseConfiguration::from_env().ok() -} - -#[allow(dead_code)] -pub fn get_supabase_dev_config() -> Option { - dotenv::from_filename("./.env.dev").ok()?; - SupabaseConfiguration::from_env().ok() -} - -pub fn collab_service() -> Arc { - let (server, encryption_impl) = appflowy_server(None); - Arc::new(SupabaseCollabStorageImpl::new( - server, - None, - Arc::downgrade(&encryption_impl), - )) -} - -pub fn database_service() -> Arc { - let (server, _encryption_impl) = appflowy_server(None); - Arc::new(SupabaseDatabaseServiceImpl::new(server)) -} - -pub fn user_auth_service() -> Arc { - let (server, _encryption_impl) = appflowy_server(None); - Arc::new(SupabaseUserServiceImpl::new(server, vec![], None)) -} - -pub fn folder_service() -> Arc { - let (server, _encryption_impl) = appflowy_server(None); - Arc::new(SupabaseFolderServiceImpl::new(server)) -} - -#[allow(dead_code)] -pub fn encryption_folder_service( - secret: Option, -) -> (Arc, Arc) { - let (server, encryption_impl) = appflowy_server(secret); - let service = Arc::new(SupabaseFolderServiceImpl::new(server)); - (service, encryption_impl) -} - -pub fn encryption_collab_service( - secret: Option, -) -> (Arc, Arc) { - let (server, encryption_impl) = appflowy_server(secret); - let service = Arc::new(SupabaseCollabStorageImpl::new( - server, - None, - Arc::downgrade(&encryption_impl), - )); - (service, encryption_impl) -} - -pub async fn get_folder_data_from_server( - uid: &i64, - folder_id: &str, - encryption_secret: Option, -) -> Result, Error> { - let (cloud_service, _encryption) = encryption_folder_service(encryption_secret); - cloud_service.get_folder_data(folder_id, uid).await -} - -pub async fn get_folder_snapshots( - folder_id: &str, - encryption_secret: Option, -) -> Vec { - let (cloud_service, _encryption) = encryption_folder_service(encryption_secret); - cloud_service - .get_folder_snapshots(folder_id, 10) - .await - .unwrap() -} - -pub fn appflowy_server( - encryption_secret: Option, -) -> (SupabaseServerServiceImpl, Arc) { - let config = SupabaseConfiguration::from_env().unwrap(); - let encryption_impl: Arc = - Arc::new(EncryptionImpl::new(encryption_secret)); - let encryption = Arc::downgrade(&encryption_impl); - let server = Arc::new(RESTfulPostgresServer::new(config, encryption)); - (SupabaseServerServiceImpl::new(server), encryption_impl) -} - /// zip the asset to the destination /// Zips the specified directory into a zip file. /// diff --git a/frontend/rust-lib/flowy-ai/Cargo.toml b/frontend/rust-lib/flowy-ai/Cargo.toml index 3e26f38a5e..74af2731ee 100644 --- a/frontend/rust-lib/flowy-ai/Cargo.toml +++ b/frontend/rust-lib/flowy-ai/Cargo.toml @@ -19,6 +19,7 @@ uuid.workspace = true strum_macros = "0.21" protobuf.workspace = true bytes.workspace = true +arc-swap.workspace = true validator = { workspace = true, features = ["derive"] } lib-infra = { workspace = true, features = ["isolate_flutter"] } flowy-ai-pub.workspace = true @@ -33,7 +34,6 @@ serde_json = { workspace = true } anyhow = "1.0.86" tokio-stream = "0.1.15" tokio-util = { workspace = true, features = ["full"] } -parking_lot.workspace = true appflowy-local-ai = { version = "0.1.0", features = ["verbose"] } appflowy-plugin = { version = "0.1.0" } reqwest = "0.11.27" diff --git a/frontend/rust-lib/flowy-ai/src/ai_manager.rs b/frontend/rust-lib/flowy-ai/src/ai_manager.rs index 6adbd69813..8b3bf782a1 100644 --- a/frontend/rust-lib/flowy-ai/src/ai_manager.rs +++ b/frontend/rust-lib/flowy-ai/src/ai_manager.rs @@ -67,7 +67,8 @@ impl AIManager { } pub async fn initialize(&self, _workspace_id: &str) -> Result<(), FlowyError> { - self.local_ai_controller.refresh().await?; + // Ignore following error + let _ = self.local_ai_controller.refresh().await; Ok(()) } diff --git a/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_chat.rs b/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_chat.rs index af1d67b913..12466af8ca 100644 --- a/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_chat.rs +++ b/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_chat.rs @@ -17,8 +17,8 @@ use lib_infra::async_trait::async_trait; use std::collections::HashMap; use crate::stream_message::StreamMessage; +use arc_swap::ArcSwapOption; use futures_util::SinkExt; -use parking_lot::Mutex; use serde::{Deserialize, Serialize}; use serde_json::json; use std::ops::Deref; @@ -47,7 +47,7 @@ const LOCAL_AI_SETTING_KEY: &str = "appflowy_local_ai_setting:v0"; pub struct LocalAIController { local_ai: Arc, local_ai_resource: Arc, - current_chat_id: Mutex>, + current_chat_id: ArcSwapOption, store_preferences: Arc, user_service: Arc, } @@ -80,7 +80,7 @@ impl LocalAIController { res_impl, tx, )); - let current_chat_id = Mutex::new(None); + let current_chat_id = ArcSwapOption::default(); let mut running_state_rx = local_ai.subscribe_running_state(); let cloned_llm_res = llm_res.clone(); @@ -205,12 +205,14 @@ impl LocalAIController { // Only keep one chat open at a time. Since loading multiple models at the same time will cause // memory issues. - if let Some(current_chat_id) = self.current_chat_id.lock().as_ref() { + if let Some(current_chat_id) = self.current_chat_id.load().as_ref() { debug!("[AI Plugin] close previous chat: {}", current_chat_id); self.close_chat(current_chat_id); } - *self.current_chat_id.lock() = Some(chat_id.to_string()); + self + .current_chat_id + .store(Some(Arc::new(chat_id.to_string()))); let chat_id = chat_id.to_string(); let weak_ctrl = Arc::downgrade(&self.local_ai); tokio::spawn(async move { @@ -534,7 +536,7 @@ impl LLMResourceService for LLMResourceServiceImpl { fn store_setting(&self, setting: LLMSetting) -> Result<(), Error> { self .store_preferences - .set_object(LOCAL_AI_SETTING_KEY, setting)?; + .set_object(LOCAL_AI_SETTING_KEY, &setting)?; Ok(()) } diff --git a/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_resource.rs b/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_resource.rs index 457322a111..6d3ff1953b 100644 --- a/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_resource.rs +++ b/frontend/rust-lib/flowy-ai/src/local_ai/local_llm_resource.rs @@ -9,8 +9,8 @@ use flowy_error::{ErrorCode, FlowyError, FlowyResult}; use futures::Sink; use futures_util::SinkExt; use lib_infra::async_trait::async_trait; -use parking_lot::RwLock; +use arc_swap::ArcSwapOption; use lib_infra::util::{get_operating_system, OperatingSystem}; use std::path::PathBuf; use std::sync::Arc; @@ -64,10 +64,10 @@ impl DownloadTask { pub struct LocalAIResourceController { user_service: Arc, resource_service: Arc, - llm_setting: RwLock>, + llm_setting: ArcSwapOption, // The ai_config will be set when user try to get latest local ai config from server - ai_config: RwLock>, - download_task: Arc>>, + ai_config: ArcSwapOption, + download_task: Arc>, resource_notify: tokio::sync::mpsc::Sender<()>, #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))] #[allow(dead_code)] @@ -82,7 +82,7 @@ impl LocalAIResourceController { resource_notify: tokio::sync::mpsc::Sender<()>, ) -> Self { let (offline_app_state_sender, _) = tokio::sync::broadcast::channel(1); - let llm_setting = RwLock::new(resource_service.retrieve_setting()); + let llm_setting = resource_service.retrieve_setting().map(Arc::new); #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))] let mut offline_app_disk_watch: Option = None; @@ -109,7 +109,7 @@ impl LocalAIResourceController { Self { user_service, resource_service: Arc::new(resource_service), - llm_setting, + llm_setting: ArcSwapOption::new(llm_setting), ai_config: Default::default(), download_task: Default::default(), resource_notify, @@ -125,7 +125,7 @@ impl LocalAIResourceController { } fn set_llm_setting(&self, llm_setting: LLMSetting) { - *self.llm_setting.write() = Some(llm_setting); + self.llm_setting.store(Some(llm_setting.into())); } /// Returns true when all resources are downloaded and ready to use. @@ -153,7 +153,7 @@ impl LocalAIResourceController { return Err(FlowyError::local_ai().with_context("No model found")); } - *self.ai_config.write() = Some(ai_config.clone()); + self.ai_config.store(Some(ai_config.clone().into())); let selected_model = self.select_model(&ai_config)?; let llm_setting = LLMSetting { @@ -173,7 +173,7 @@ impl LocalAIResourceController { pub fn use_local_llm(&self, llm_id: i64) -> FlowyResult { let (app, llm_model) = self .ai_config - .read() + .load() .as_ref() .and_then(|config| { config @@ -209,7 +209,7 @@ impl LocalAIResourceController { let pending_resources = self.calculate_pending_resources().ok()?; let is_ready = pending_resources.is_empty(); - let is_downloading = self.download_task.read().is_some(); + let is_downloading = self.download_task.load().is_some(); let pending_resources: Vec<_> = pending_resources .into_iter() .flat_map(|res| match res { @@ -243,7 +243,7 @@ impl LocalAIResourceController { /// Returns true when all resources are downloaded and ready to use. pub fn calculate_pending_resources(&self) -> FlowyResult> { - match self.llm_setting.read().as_ref() { + match self.llm_setting.load().as_ref() { None => Err(FlowyError::local_ai().with_context("Can't find any llm config")), Some(llm_setting) => { let mut resources = vec![]; @@ -296,7 +296,7 @@ impl LocalAIResourceController { info!("notify download finish, need to reload resources"); let _ = resource_notify.send(()).await; if let Some(download_task) = weak_download_task.upgrade() { - if let Some(task) = download_task.write().take() { + if let Some(task) = download_task.swap(None) { task.cancel(); } } @@ -307,25 +307,27 @@ impl LocalAIResourceController { }; // return immediately if download task already exists - if let Some(download_task) = self.download_task.read().as_ref() { - trace!( - "Download task already exists, return the task id: {}", - task_id - ); - progress_notify(download_task.tx.subscribe()); - return Ok(task_id); + { + let guard = self.download_task.load(); + if let Some(download_task) = &*guard { + trace!( + "Download task already exists, return the task id: {}", + task_id + ); + progress_notify(download_task.tx.subscribe()); + return Ok(task_id); + } } // If download task is not exists, create a new download task. info!("[LLM Resource] Start new download task"); let llm_setting = self .llm_setting - .read() - .clone() + .load_full() .ok_or_else(|| FlowyError::local_ai().with_context("No local ai config found"))?; - let download_task = DownloadTask::new(); - *self.download_task.write() = Some(download_task.clone()); + let download_task = Arc::new(DownloadTask::new()); + self.download_task.store(Some(download_task.clone())); progress_notify(download_task.tx.subscribe()); let model_dir = self.user_model_folder()?; @@ -339,15 +341,15 @@ impl LocalAIResourceController { // After download the plugin, start downloading models let chat_model_file = ( model_dir.join(&llm_setting.llm_model.chat_model.file_name), - llm_setting.llm_model.chat_model.file_name, - llm_setting.llm_model.chat_model.name, - llm_setting.llm_model.chat_model.download_url, + &llm_setting.llm_model.chat_model.file_name, + &llm_setting.llm_model.chat_model.name, + &llm_setting.llm_model.chat_model.download_url, ); let embedding_model_file = ( model_dir.join(&llm_setting.llm_model.embedding_model.file_name), - llm_setting.llm_model.embedding_model.file_name, - llm_setting.llm_model.embedding_model.name, - llm_setting.llm_model.embedding_model.download_url, + &llm_setting.llm_model.embedding_model.file_name, + &llm_setting.llm_model.embedding_model.name, + &llm_setting.llm_model.embedding_model.download_url, ); for (file_path, file_name, model_name, url) in [chat_model_file, embedding_model_file] { if file_path.exists() { @@ -370,9 +372,9 @@ impl LocalAIResourceController { } }); match download_model( - &url, + url, &model_dir, - &file_name, + file_name, Some(progress), Some(download_task.cancel_token.clone()), ) @@ -400,7 +402,7 @@ impl LocalAIResourceController { } pub fn cancel_download(&self) -> FlowyResult<()> { - if let Some(cancel_token) = self.download_task.write().take() { + if let Some(cancel_token) = self.download_task.swap(None) { info!("[LLM Resource] Cancel download"); cancel_token.cancel(); } @@ -416,9 +418,7 @@ impl LocalAIResourceController { let llm_setting = self .llm_setting - .read() - .as_ref() - .cloned() + .load_full() .ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?; let model_dir = self.user_model_folder()?; @@ -475,16 +475,14 @@ impl LocalAIResourceController { } pub fn get_selected_model(&self) -> Option { - self - .llm_setting - .read() - .as_ref() - .map(|setting| setting.llm_model.clone()) + let setting = self.llm_setting.load(); + Some(setting.as_ref()?.llm_model.clone()) } /// Selects the appropriate model based on the current settings or defaults to the first model. fn select_model(&self, ai_config: &LocalAIConfig) -> FlowyResult { - let selected_model = match self.llm_setting.read().as_ref() { + let llm_setting = self.llm_setting.load(); + let selected_model = match &*llm_setting { None => ai_config.models[0].clone(), Some(llm_setting) => { match ai_config diff --git a/frontend/rust-lib/flowy-core/Cargo.toml b/frontend/rust-lib/flowy-core/Cargo.toml index 8b24a615dc..d6cd0c6635 100644 --- a/frontend/rust-lib/flowy-core/Cargo.toml +++ b/frontend/rust-lib/flowy-core/Cargo.toml @@ -44,8 +44,9 @@ bytes.workspace = true tokio = { workspace = true, features = ["full"] } tokio-stream = { workspace = true, features = ["sync"] } console-subscriber = { version = "0.2", optional = true } -parking_lot.workspace = true anyhow.workspace = true +dashmap.workspace = true +arc-swap.workspace = true base64 = "0.21.5" lib-infra = { workspace = true } diff --git a/frontend/rust-lib/flowy-core/src/config.rs b/frontend/rust-lib/flowy-core/src/config.rs index c910064a0a..395f3aebe2 100644 --- a/frontend/rust-lib/flowy-core/src/config.rs +++ b/frontend/rust-lib/flowy-core/src/config.rs @@ -6,7 +6,6 @@ use semver::Version; use tracing::{error, info}; use flowy_server_pub::af_cloud_config::AFCloudConfiguration; -use flowy_server_pub::supabase_config::SupabaseConfiguration; use flowy_user::services::entities::URL_SAFE_ENGINE; use lib_infra::file_util::copy_dir_recursive; use lib_infra::util::OperatingSystem; @@ -85,13 +84,7 @@ impl AppFlowyCoreConfig { ) -> Self { let cloud_config = AFCloudConfiguration::from_env().ok(); let storage_path = match &cloud_config { - None => { - let supabase_config = SupabaseConfiguration::from_env().ok(); - match &supabase_config { - None => custom_application_path, - Some(config) => make_user_data_folder(&custom_application_path, &config.url), - } - }, + None => custom_application_path, Some(config) => make_user_data_folder(&custom_application_path, &config.base_url), }; let log_filter = create_log_filter("info".to_owned(), vec![], OperatingSystem::from(&platform)); diff --git a/frontend/rust-lib/flowy-core/src/deps_resolve/folder_deps.rs b/frontend/rust-lib/flowy-core/src/deps_resolve/folder_deps.rs index 78994e8a34..62e192446a 100644 --- a/frontend/rust-lib/flowy-core/src/deps_resolve/folder_deps.rs +++ b/frontend/rust-lib/flowy-core/src/deps_resolve/folder_deps.rs @@ -11,7 +11,7 @@ use flowy_database2::DatabaseManager; use flowy_document::entities::DocumentDataPB; use flowy_document::manager::DocumentManager; use flowy_document::parser::json::parser::JsonToDocumentParser; -use flowy_error::FlowyError; +use flowy_error::{FlowyError, FlowyResult}; use flowy_folder::entities::{CreateViewParams, ViewLayoutPB}; use flowy_folder::manager::{FolderManager, FolderUser}; use flowy_folder::share::ImportType; @@ -26,7 +26,6 @@ use flowy_sqlite::kv::KVStorePreferences; use flowy_user::services::authenticate_user::AuthenticateUser; use flowy_user::services::data_import::{load_collab_by_object_id, load_collab_by_object_ids}; use lib_dispatch::prelude::ToBytes; - use std::collections::HashMap; use std::convert::TryFrom; use std::sync::{Arc, Weak}; @@ -111,6 +110,10 @@ impl FolderUser for FolderUserImpl { fn collab_db(&self, uid: i64) -> Result, FlowyError> { self.upgrade_user()?.get_collab_db(uid) } + + fn is_folder_exist_on_disk(&self, uid: i64, workspace_id: &str) -> FlowyResult { + self.upgrade_user()?.is_collab_on_disk(uid, workspace_id) + } } struct DocumentFolderOperation(Arc); diff --git a/frontend/rust-lib/flowy-core/src/integrate/server.rs b/frontend/rust-lib/flowy-core/src/integrate/server.rs index 86b85f16af..6314976c66 100644 --- a/frontend/rust-lib/flowy-core/src/integrate/server.rs +++ b/frontend/rust-lib/flowy-core/src/integrate/server.rs @@ -1,18 +1,17 @@ -use std::collections::HashMap; +use arc_swap::ArcSwapOption; +use dashmap::DashMap; use std::fmt::{Display, Formatter}; +use std::sync::atomic::{AtomicBool, AtomicU8, Ordering}; use std::sync::{Arc, Weak}; -use parking_lot::RwLock; use serde_repr::*; use flowy_error::{FlowyError, FlowyResult}; use flowy_server::af_cloud::define::ServerUser; use flowy_server::af_cloud::AppFlowyCloudServer; use flowy_server::local_server::{LocalServer, LocalServerDB}; -use flowy_server::supabase::SupabaseServer; use flowy_server::{AppFlowyEncryption, AppFlowyServer, EncryptionImpl}; use flowy_server_pub::af_cloud_config::AFCloudConfiguration; -use flowy_server_pub::supabase_config::SupabaseConfiguration; use flowy_server_pub::AuthenticatorType; use flowy_sqlite::kv::KVStorePreferences; use flowy_user_pub::entities::*; @@ -26,12 +25,8 @@ pub enum Server { /// Offline mode, no user authentication and the data is stored locally. Local = 0, /// AppFlowy Cloud server provider. - /// The [AppFlowy-Server](https://github.com/AppFlowy-IO/AppFlowy-Cloud) is still a work in - /// progress. + /// See: https://github.com/AppFlowy-IO/AppFlowy-Cloud AppFlowyCloud = 1, - /// Supabase server provider. - /// It uses supabase postgresql database to store data and user authentication. - Supabase = 2, } impl Server { @@ -45,7 +40,6 @@ impl Display for Server { match self { Server::Local => write!(f, "Local"), Server::AppFlowyCloud => write!(f, "AppFlowyCloud"), - Server::Supabase => write!(f, "Supabase"), } } } @@ -56,16 +50,16 @@ impl Display for Server { /// Each server implements the [AppFlowyServer] trait, which provides the [UserCloudService], etc. pub struct ServerProvider { config: AppFlowyCoreConfig, - providers: RwLock>>, - pub(crate) encryption: RwLock>, + providers: DashMap>, + pub(crate) encryption: Arc, #[allow(dead_code)] pub(crate) store_preferences: Weak, - pub(crate) user_enable_sync: RwLock, + pub(crate) user_enable_sync: AtomicBool, /// The authenticator type of the user. - authenticator: RwLock, + authenticator: AtomicU8, user: Arc, - pub(crate) uid: Arc>>, + pub(crate) uid: Arc>, } impl ServerProvider { @@ -79,10 +73,10 @@ impl ServerProvider { let encryption = EncryptionImpl::new(None); Self { config, - providers: RwLock::new(HashMap::new()), - user_enable_sync: RwLock::new(true), - authenticator: RwLock::new(Authenticator::from(server)), - encryption: RwLock::new(Arc::new(encryption)), + providers: DashMap::new(), + user_enable_sync: AtomicBool::new(true), + authenticator: AtomicU8::new(Authenticator::from(server) as u8), + encryption: Arc::new(encryption), store_preferences, uid: Default::default(), user, @@ -90,33 +84,34 @@ impl ServerProvider { } pub fn get_server_type(&self) -> Server { - match &*self.authenticator.read() { + match Authenticator::from(self.authenticator.load(Ordering::Acquire) as i32) { Authenticator::Local => Server::Local, Authenticator::AppFlowyCloud => Server::AppFlowyCloud, - Authenticator::Supabase => Server::Supabase, } } pub fn set_authenticator(&self, authenticator: Authenticator) { let old_server_type = self.get_server_type(); - *self.authenticator.write() = authenticator; + self + .authenticator + .store(authenticator as u8, Ordering::Release); let new_server_type = self.get_server_type(); if old_server_type != new_server_type { - self.providers.write().remove(&old_server_type); + self.providers.remove(&old_server_type); } } pub fn get_authenticator(&self) -> Authenticator { - self.authenticator.read().clone() + Authenticator::from(self.authenticator.load(Ordering::Acquire) as i32) } /// Returns a [AppFlowyServer] trait implementation base on the provider_type. pub fn get_server(&self) -> FlowyResult> { let server_type = self.get_server_type(); - if let Some(provider) = self.providers.read().get(&server_type) { - return Ok(provider.clone()); + if let Some(provider) = self.providers.get(&server_type) { + return Ok(provider.value().clone()); } let server = match server_type { @@ -131,7 +126,7 @@ impl ServerProvider { let config = AFCloudConfiguration::from_env()?; let server = Arc::new(AppFlowyCloudServer::new( config, - *self.user_enable_sync.read(), + self.user_enable_sync.load(Ordering::Acquire), self.config.device_id.clone(), self.config.app_version.clone(), self.user.clone(), @@ -139,25 +134,9 @@ impl ServerProvider { Ok::, FlowyError>(server) }, - Server::Supabase => { - let config = SupabaseConfiguration::from_env()?; - let uid = self.uid.clone(); - tracing::trace!("🔑Supabase config: {:?}", config); - let encryption = Arc::downgrade(&*self.encryption.read()); - Ok::, FlowyError>(Arc::new(SupabaseServer::new( - uid, - config, - *self.user_enable_sync.read(), - self.config.device_id.clone(), - encryption, - ))) - }, }?; - self - .providers - .write() - .insert(server_type.clone(), server.clone()); + self.providers.insert(server_type.clone(), server.clone()); Ok(server) } } @@ -167,7 +146,6 @@ impl From for Server { match auth_provider { Authenticator::Local => Server::Local, Authenticator::AppFlowyCloud => Server::AppFlowyCloud, - Authenticator::Supabase => Server::Supabase, } } } @@ -177,7 +155,6 @@ impl From for Authenticator { match ty { Server::Local => Authenticator::Local, Server::AppFlowyCloud => Authenticator::AppFlowyCloud, - Server::Supabase => Authenticator::Supabase, } } } @@ -190,7 +167,6 @@ impl From<&Authenticator> for Server { pub fn current_server_type() -> Server { match AuthenticatorType::from_env() { AuthenticatorType::Local => Server::Local, - AuthenticatorType::Supabase => Server::Supabase, AuthenticatorType::AppFlowyCloud => Server::AppFlowyCloud, } } diff --git a/frontend/rust-lib/flowy-core/src/integrate/trait_impls.rs b/frontend/rust-lib/flowy-core/src/integrate/trait_impls.rs index cabc5cd97f..963f7fe159 100644 --- a/frontend/rust-lib/flowy-core/src/integrate/trait_impls.rs +++ b/frontend/rust-lib/flowy-core/src/integrate/trait_impls.rs @@ -2,6 +2,7 @@ use client_api::entity::search_dto::SearchDocumentResponseItem; use flowy_search_pub::cloud::SearchCloudService; use std::collections::HashMap; use std::path::Path; +use std::sync::atomic::Ordering; use std::sync::Arc; use anyhow::Error; @@ -9,10 +10,9 @@ use client_api::collab_sync::{SinkConfig, SyncObject, SyncPlugin}; use client_api::entity::ai_dto::{CompletionType, RepeatedRelatedQuestion}; use client_api::entity::ChatMessageType; use collab::core::origin::{CollabClient, CollabOrigin}; - +use collab::entity::EncodedCollab; use collab::preclude::CollabPlugin; use collab_entity::CollabType; -use collab_plugins::cloud_storage::postgres::SupabaseDBPlugin; use serde_json::Value; use tokio_stream::wrappers::WatchStream; use tracing::{debug, info}; @@ -25,8 +25,8 @@ use flowy_ai_pub::cloud::{ RepeatedChatMessage, StreamAnswer, StreamComplete, }; use flowy_database_pub::cloud::{ - CollabDocStateByOid, DatabaseAIService, DatabaseCloudService, DatabaseSnapshot, - SummaryRowContent, TranslateRowContent, TranslateRowResponse, + DatabaseAIService, DatabaseCloudService, DatabaseSnapshot, EncodeCollabByOid, SummaryRowContent, + TranslateRowContent, TranslateRowResponse, }; use flowy_document::deps::DocumentData; use flowy_document_pub::cloud::{DocumentCloudService, DocumentSnapshot}; @@ -36,13 +36,11 @@ use flowy_folder_pub::cloud::{ }; use flowy_folder_pub::entities::{PublishInfoResponse, PublishPayload}; use flowy_server_pub::af_cloud_config::AFCloudConfiguration; -use flowy_server_pub::supabase_config::SupabaseConfiguration; use flowy_storage_pub::cloud::{ObjectIdentity, ObjectValue, StorageCloudService}; use flowy_storage_pub::storage::{CompletedPartRequest, CreateUploadResponse, UploadPartResponse}; use flowy_user_pub::cloud::{UserCloudService, UserCloudServiceProvider}; use flowy_user_pub::entities::{Authenticator, UserTokenState}; use lib_infra::async_trait::async_trait; -use lib_infra::future::FutureResult; use crate::integrate::server::{Server, ServerProvider}; @@ -168,8 +166,8 @@ impl UserCloudServiceProvider for ServerProvider { fn set_enable_sync(&self, uid: i64, enable_sync: bool) { if let Ok(server) = self.get_server() { server.set_enable_sync(uid, enable_sync); - *self.user_enable_sync.write() = enable_sync; - *self.uid.write() = Some(uid); + self.user_enable_sync.store(enable_sync, Ordering::Release); + self.uid.store(Some(uid.into())); } } @@ -195,7 +193,7 @@ impl UserCloudServiceProvider for ServerProvider { fn set_encrypt_secret(&self, secret: String) { tracing::info!("🔑Set encrypt secret"); - self.encryption.write().set_secret(secret); + self.encryption.set_secret(secret); } /// Returns the [UserCloudService] base on the current [Server]. @@ -211,93 +209,87 @@ impl UserCloudServiceProvider for ServerProvider { Server::AppFlowyCloud => AFCloudConfiguration::from_env() .map(|config| config.base_url) .unwrap_or_default(), - Server::Supabase => SupabaseConfiguration::from_env() - .map(|config| config.url) - .unwrap_or_default(), } } } +#[async_trait] impl FolderCloudService for ServerProvider { - fn create_workspace(&self, uid: i64, name: &str) -> FutureResult { - let server = self.get_server(); + async fn create_workspace(&self, uid: i64, name: &str) -> Result { + let server = self.get_server()?; let name = name.to_string(); - FutureResult::new(async move { server?.folder_service().create_workspace(uid, &name).await }) + server.folder_service().create_workspace(uid, &name).await } - fn open_workspace(&self, workspace_id: &str) -> FutureResult<(), Error> { + async fn open_workspace(&self, workspace_id: &str) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { server?.folder_service().open_workspace(&workspace_id).await }) + let server = self.get_server()?; + server.folder_service().open_workspace(&workspace_id).await } - fn get_all_workspace(&self) -> FutureResult, Error> { - let server = self.get_server(); - FutureResult::new(async move { server?.folder_service().get_all_workspace().await }) + async fn get_all_workspace(&self) -> Result, Error> { + let server = self.get_server()?; + server.folder_service().get_all_workspace().await } - fn get_folder_data( + async fn get_folder_data( &self, workspace_id: &str, uid: &i64, - ) -> FutureResult, Error> { + ) -> Result, Error> { let uid = *uid; - let server = self.get_server(); + let server = self.get_server()?; let workspace_id = workspace_id.to_string(); - FutureResult::new(async move { - server? - .folder_service() - .get_folder_data(&workspace_id, &uid) - .await - }) + + server + .folder_service() + .get_folder_data(&workspace_id, &uid) + .await } - fn get_folder_snapshots( + async fn get_folder_snapshots( &self, workspace_id: &str, limit: usize, - ) -> FutureResult, Error> { + ) -> Result, Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .folder_service() - .get_folder_snapshots(&workspace_id, limit) - .await - }) + let server = self.get_server()?; + + server + .folder_service() + .get_folder_snapshots(&workspace_id, limit) + .await } - fn get_folder_doc_state( + async fn get_folder_doc_state( &self, workspace_id: &str, uid: i64, collab_type: CollabType, object_id: &str, - ) -> FutureResult, Error> { + ) -> Result, Error> { let object_id = object_id.to_string(); let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .folder_service() - .get_folder_doc_state(&workspace_id, uid, collab_type, &object_id) - .await - }) + let server = self.get_server()?; + + server + .folder_service() + .get_folder_doc_state(&workspace_id, uid, collab_type, &object_id) + .await } - fn batch_create_folder_collab_objects( + async fn batch_create_folder_collab_objects( &self, workspace_id: &str, objects: Vec, - ) -> FutureResult<(), Error> { + ) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .folder_service() - .batch_create_folder_collab_objects(&workspace_id, objects) - .await - }) + let server = self.get_server()?; + + server + .folder_service() + .batch_create_folder_collab_objects(&workspace_id, objects) + .await } fn service_name(&self) -> String { @@ -307,114 +299,106 @@ impl FolderCloudService for ServerProvider { .unwrap_or_default() } - fn publish_view( + async fn publish_view( &self, workspace_id: &str, payload: Vec, - ) -> FutureResult<(), Error> { + ) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .folder_service() - .publish_view(&workspace_id, payload) - .await - }) + let server = self.get_server()?; + + server + .folder_service() + .publish_view(&workspace_id, payload) + .await } - fn unpublish_views(&self, workspace_id: &str, view_ids: Vec) -> FutureResult<(), Error> { + async fn unpublish_views(&self, workspace_id: &str, view_ids: Vec) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .folder_service() - .unpublish_views(&workspace_id, view_ids) - .await - }) + let server = self.get_server()?; + + server + .folder_service() + .unpublish_views(&workspace_id, view_ids) + .await } - fn get_publish_info(&self, view_id: &str) -> FutureResult { + async fn get_publish_info(&self, view_id: &str) -> Result { let view_id = view_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { server?.folder_service().get_publish_info(&view_id).await }) + let server = self.get_server()?; + server.folder_service().get_publish_info(&view_id).await } - fn set_publish_namespace( + async fn set_publish_namespace( &self, workspace_id: &str, new_namespace: &str, - ) -> FutureResult<(), Error> { + ) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); let new_namespace = new_namespace.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .folder_service() - .set_publish_namespace(&workspace_id, &new_namespace) - .await - }) + let server = self.get_server()?; + + server + .folder_service() + .set_publish_namespace(&workspace_id, &new_namespace) + .await } - fn get_publish_namespace(&self, workspace_id: &str) -> FutureResult { + async fn get_publish_namespace(&self, workspace_id: &str) -> Result { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .folder_service() - .get_publish_namespace(&workspace_id) - .await - }) + let server = self.get_server()?; + + server + .folder_service() + .get_publish_namespace(&workspace_id) + .await } } #[async_trait] impl DatabaseCloudService for ServerProvider { - fn get_database_object_doc_state( + async fn get_database_encode_collab( &self, object_id: &str, collab_type: CollabType, workspace_id: &str, - ) -> FutureResult>, Error> { + ) -> Result, Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); + let server = self.get_server()?; let database_id = object_id.to_string(); - FutureResult::new(async move { - server? - .database_service() - .get_database_object_doc_state(&database_id, collab_type, &workspace_id) - .await - }) + server + .database_service() + .get_database_encode_collab(&database_id, collab_type, &workspace_id) + .await } - fn batch_get_database_object_doc_state( + async fn batch_get_database_encode_collab( &self, object_ids: Vec, object_ty: CollabType, workspace_id: &str, - ) -> FutureResult { + ) -> Result { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .database_service() - .batch_get_database_object_doc_state(object_ids, object_ty, &workspace_id) - .await - }) + let server = self.get_server()?; + + server + .database_service() + .batch_get_database_encode_collab(object_ids, object_ty, &workspace_id) + .await } - fn get_database_collab_object_snapshots( + async fn get_database_collab_object_snapshots( &self, object_id: &str, limit: usize, - ) -> FutureResult, Error> { - let server = self.get_server(); + ) -> Result, Error> { + let server = self.get_server()?; let database_id = object_id.to_string(); - FutureResult::new(async move { - server? - .database_service() - .get_database_collab_object_snapshots(&database_id, limit) - .await - }) + + server + .database_service() + .get_database_collab_object_snapshots(&database_id, limit) + .await } } @@ -449,54 +433,52 @@ impl DatabaseAIService for ServerProvider { } } +#[async_trait] impl DocumentCloudService for ServerProvider { - fn get_document_doc_state( + async fn get_document_doc_state( &self, document_id: &str, workspace_id: &str, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { let workspace_id = workspace_id.to_string(); let document_id = document_id.to_string(); - let server = self.get_server(); - FutureResult::new(async move { - server? - .document_service() - .get_document_doc_state(&document_id, &workspace_id) - .await - }) + let server = self.get_server()?; + + server + .document_service() + .get_document_doc_state(&document_id, &workspace_id) + .await } - fn get_document_snapshots( + async fn get_document_snapshots( &self, document_id: &str, limit: usize, workspace_id: &str, - ) -> FutureResult, Error> { + ) -> Result, Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); + let server = self.get_server()?; let document_id = document_id.to_string(); - FutureResult::new(async move { - server? - .document_service() - .get_document_snapshots(&document_id, limit, &workspace_id) - .await - }) + + server + .document_service() + .get_document_snapshots(&document_id, limit, &workspace_id) + .await } - fn get_document_data( + async fn get_document_data( &self, document_id: &str, workspace_id: &str, - ) -> FutureResult, Error> { + ) -> Result, Error> { let workspace_id = workspace_id.to_string(); - let server = self.get_server(); + let server = self.get_server()?; let document_id = document_id.to_string(); - FutureResult::new(async move { - server? - .document_service() - .get_document_data(&document_id, &workspace_id) - .await - }) + + server + .document_service() + .get_document_data(&document_id, &workspace_id) + .await } } @@ -563,34 +545,11 @@ impl CollabCloudPluginProvider for ServerProvider { vec![] } }, - CollabPluginProviderContext::Supabase { - uid, - collab_object, - local_collab, - local_collab_db, - } => { - let mut plugins: Vec> = vec![]; - if let Some(remote_collab_storage) = self - .get_server() - .ok() - .and_then(|provider| provider.collab_storage(&collab_object)) - { - plugins.push(Box::new(SupabaseDBPlugin::new( - uid, - collab_object, - local_collab, - 1, - remote_collab_storage, - local_collab_db, - ))); - } - plugins - }, } } fn is_sync_enabled(&self) -> bool { - *self.user_enable_sync.read() + self.user_enable_sync.load(Ordering::Acquire) } } diff --git a/frontend/rust-lib/flowy-core/src/integrate/user.rs b/frontend/rust-lib/flowy-core/src/integrate/user.rs index f84a4ec6a8..c165eda9c2 100644 --- a/frontend/rust-lib/flowy-core/src/integrate/user.rs +++ b/frontend/rust-lib/flowy-core/src/integrate/user.rs @@ -131,21 +131,12 @@ impl UserStatusCallback for UserStatusCallbackImpl { create_if_not_exist: true, }, Server::AppFlowyCloud => FolderInitDataSource::Cloud(doc_state), - Server::Supabase => { - if is_new_user { - FolderInitDataSource::LocalDisk { - create_if_not_exist: true, - } - } else { - FolderInitDataSource::Cloud(doc_state) - } - }, }, Err(err) => match server_type { Server::Local => FolderInitDataSource::LocalDisk { create_if_not_exist: true, }, - Server::AppFlowyCloud | Server::Supabase => { + Server::AppFlowyCloud => { return Err(FlowyError::from(err)); }, }, diff --git a/frontend/rust-lib/flowy-core/src/lib.rs b/frontend/rust-lib/flowy-core/src/lib.rs index ae5b1d801d..55b4753c66 100644 --- a/frontend/rust-lib/flowy-core/src/lib.rs +++ b/frontend/rust-lib/flowy-core/src/lib.rs @@ -2,7 +2,6 @@ use flowy_search::folder::indexer::FolderIndexManagerImpl; use flowy_search::services::manager::SearchManager; -use parking_lot::Mutex; use std::rc::Rc; use std::sync::{Arc, Weak}; use std::time::Duration; @@ -302,7 +301,6 @@ impl From for CollabPluginProviderType { match server_type { Server::Local => CollabPluginProviderType::Local, Server::AppFlowyCloud => CollabPluginProviderType::AppFlowyCloud, - Server::Supabase => CollabPluginProviderType::Supabase, } } } @@ -323,13 +321,3 @@ impl ServerUser for ServerUserImpl { self.upgrade_user()?.workspace_id() } } - -pub struct MutexAppFlowyCore(pub Rc>); - -impl MutexAppFlowyCore { - pub fn new(appflowy_core: AppFlowyCore) -> Self { - Self(Rc::new(Mutex::new(appflowy_core))) - } -} -unsafe impl Sync for MutexAppFlowyCore {} -unsafe impl Send for MutexAppFlowyCore {} diff --git a/frontend/rust-lib/flowy-database-pub/src/cloud.rs b/frontend/rust-lib/flowy-database-pub/src/cloud.rs index 24da5c72a1..f35ef42cfb 100644 --- a/frontend/rust-lib/flowy-database-pub/src/cloud.rs +++ b/frontend/rust-lib/flowy-database-pub/src/cloud.rs @@ -1,13 +1,12 @@ use anyhow::Error; pub use client_api::entity::ai_dto::{TranslateItem, TranslateRowResponse}; -use collab::core::collab::DataSource; +use collab::entity::EncodedCollab; use collab_entity::CollabType; use flowy_error::FlowyError; use lib_infra::async_trait::async_trait; -use lib_infra::future::FutureResult; use std::collections::HashMap; -pub type CollabDocStateByOid = HashMap; +pub type EncodeCollabByOid = HashMap; pub type SummaryRowContent = HashMap; pub type TranslateRowContent = Vec; @@ -41,25 +40,25 @@ pub trait DatabaseAIService: Send + Sync { /// #[async_trait] pub trait DatabaseCloudService: Send + Sync { - fn get_database_object_doc_state( + async fn get_database_encode_collab( &self, object_id: &str, collab_type: CollabType, workspace_id: &str, - ) -> FutureResult>, Error>; + ) -> Result, Error>; - fn batch_get_database_object_doc_state( + async fn batch_get_database_encode_collab( &self, object_ids: Vec, object_ty: CollabType, workspace_id: &str, - ) -> FutureResult; + ) -> Result; - fn get_database_collab_object_snapshots( + async fn get_database_collab_object_snapshots( &self, object_id: &str, limit: usize, - ) -> FutureResult, Error>; + ) -> Result, Error>; } pub struct DatabaseSnapshot { diff --git a/frontend/rust-lib/flowy-database2/Cargo.toml b/frontend/rust-lib/flowy-database2/Cargo.toml index f4acee0d4d..ee05a8d73f 100644 --- a/frontend/rust-lib/flowy-database2/Cargo.toml +++ b/frontend/rust-lib/flowy-database2/Cargo.toml @@ -15,7 +15,6 @@ flowy-database-pub = { workspace = true } flowy-derive.workspace = true flowy-notification = { workspace = true } -parking_lot.workspace = true protobuf.workspace = true flowy-error = { path = "../flowy-error", features = [ "impl_from_dispatch_error", @@ -29,6 +28,7 @@ tracing.workspace = true serde.workspace = true serde_json.workspace = true serde_repr.workspace = true +arc-swap.workspace = true lib-infra = { workspace = true } chrono = { workspace = true, default-features = false, features = ["clock"] } rust_decimal = "1.28.1" diff --git a/frontend/rust-lib/flowy-database2/src/event_handler.rs b/frontend/rust-lib/flowy-database2/src/event_handler.rs index a051dcf63c..5f4f7456af 100644 --- a/frontend/rust-lib/flowy-database2/src/event_handler.rs +++ b/frontend/rust-lib/flowy-database2/src/event_handler.rs @@ -3,7 +3,7 @@ use std::sync::{Arc, Weak}; use collab_database::rows::RowId; use lib_infra::box_any::BoxAny; use tokio::sync::oneshot; -use tracing::error; +use tracing::{error, trace}; use flowy_error::{FlowyError, FlowyResult}; use lib_dispatch::prelude::{af_spawn, data_result_ok, AFPluginData, AFPluginState, DataResult}; @@ -33,8 +33,17 @@ pub(crate) async fn get_database_data_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id: DatabaseViewIdPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; + let database_id = manager + .get_database_id_with_view_id(view_id.as_ref()) + .await?; + let database_editor = manager.get_database_editor(&database_id).await?; let data = database_editor.get_database_data(view_id.as_ref()).await?; + trace!( + "layout: {:?}, rows: {}, fields: {}", + data.layout_type, + data.rows.len(), + data.fields.len() + ); data_result_ok(data) } @@ -72,7 +81,9 @@ pub(crate) async fn get_database_setting_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id: DatabaseViewIdPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; + let database_editor = manager + .get_database_editor_with_view_id(view_id.as_ref()) + .await?; let data = database_editor .get_database_view_setting(view_id.as_ref()) .await?; @@ -86,7 +97,9 @@ pub(crate) async fn update_database_setting_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params = data.try_into_inner()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; if let Some(payload) = params.insert_filter { database_editor @@ -139,7 +152,9 @@ pub(crate) async fn get_all_filters_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id: DatabaseViewIdPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; + let database_editor = manager + .get_database_editor_with_view_id(view_id.as_ref()) + .await?; let filters = database_editor.get_all_filters(view_id.as_ref()).await; data_result_ok(filters) } @@ -151,7 +166,9 @@ pub(crate) async fn get_all_sorts_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id: DatabaseViewIdPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; + let database_editor = manager + .get_database_editor_with_view_id(view_id.as_ref()) + .await?; let sorts = database_editor.get_all_sorts(view_id.as_ref()).await; data_result_ok(sorts) } @@ -163,7 +180,9 @@ pub(crate) async fn delete_all_sorts_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let view_id: DatabaseViewIdPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; + let database_editor = manager + .get_database_editor_with_view_id(view_id.as_ref()) + .await?; database_editor.delete_all_sorts(view_id.as_ref()).await; Ok(()) } @@ -175,9 +194,12 @@ pub(crate) async fn get_fields_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: GetFieldParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let fields = database_editor .get_fields(¶ms.view_id, params.field_ids) + .await .into_iter() .map(FieldPB::new) .collect::>() @@ -192,9 +214,10 @@ pub(crate) async fn get_primary_field_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id = data.into_inner().value; - let database_editor = manager.get_database_with_view_id(&view_id).await?; + let database_editor = manager.get_database_editor_with_view_id(&view_id).await?; let mut fields = database_editor .get_fields(&view_id, None) + .await .into_iter() .filter(|field| field.is_primary) .map(FieldPB::new) @@ -221,7 +244,9 @@ pub(crate) async fn update_field_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: FieldChangesetParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor.update_field(params).await?; Ok(()) } @@ -233,8 +258,10 @@ pub(crate) async fn update_field_type_option_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: TypeOptionChangesetParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; - if let Some(old_field) = database_editor.get_field(¶ms.field_id) { + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; + if let Some(old_field) = database_editor.get_field(¶ms.field_id).await { let field_type = FieldType::from(old_field.field_type); let type_option_data = type_option_data_from_pb(params.type_option_data, &field_type)?; database_editor @@ -251,7 +278,9 @@ pub(crate) async fn delete_field_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: FieldIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor.delete_field(¶ms.field_id).await?; Ok(()) } @@ -263,7 +292,9 @@ pub(crate) async fn clear_field_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: FieldIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .clear_field(¶ms.view_id, ¶ms.field_id) .await?; @@ -277,14 +308,17 @@ pub(crate) async fn switch_to_field_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: EditFieldParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; - let old_field = database_editor.get_field(¶ms.field_id); + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; + let old_field = database_editor.get_field(¶ms.field_id).await; database_editor .switch_to_field_type(¶ms.field_id, params.field_type) .await?; if let Some(new_type_option) = database_editor .get_field(¶ms.field_id) + .await .map(|field| field.get_any_type_option(field.field_type)) { match (old_field, new_type_option) { @@ -308,7 +342,9 @@ pub(crate) async fn duplicate_field_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: DuplicateFieldPayloadPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .duplicate_field(¶ms.view_id, ¶ms.field_id) .await?; @@ -323,7 +359,9 @@ pub(crate) async fn create_field_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: CreateFieldParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let data = database_editor .create_field_with_type_option(params) .await?; @@ -338,7 +376,9 @@ pub(crate) async fn move_field_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: MoveFieldParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor.move_field(params).await?; Ok(()) } @@ -350,21 +390,42 @@ pub(crate) async fn get_row_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: RowIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let row = database_editor .get_row(¶ms.view_id, ¶ms.row_id) + .await .map(RowPB::from); data_result_ok(OptionalRowPB { row }) } +pub(crate) async fn init_row_handler( + data: AFPluginData, + manager: AFPluginState>, +) -> Result<(), FlowyError> { + let manager = upgrade_manager(manager)?; + let params: RowIdParams = data.into_inner().try_into()?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; + database_editor.init_database_row(¶ms.row_id).await?; + Ok(()) +} + pub(crate) async fn get_row_meta_handler( data: AFPluginData, manager: AFPluginState>, ) -> DataResult { let manager = upgrade_manager(manager)?; let params: RowIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; - match database_editor.get_row_meta(¶ms.view_id, ¶ms.row_id) { + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; + match database_editor + .get_row_meta(¶ms.view_id, ¶ms.row_id) + .await + { None => Err(FlowyError::record_not_found()), Some(row) => data_result_ok(row), } @@ -376,7 +437,9 @@ pub(crate) async fn update_row_meta_handler( ) -> FlowyResult<()> { let manager = upgrade_manager(manager)?; let params: UpdateRowMetaParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let row_id = RowId::from(params.id.clone()); database_editor .update_row_meta(&row_id.clone(), params) @@ -391,7 +454,9 @@ pub(crate) async fn delete_rows_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: RepeatedRowIdPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let row_ids = params .row_ids .into_iter() @@ -408,7 +473,9 @@ pub(crate) async fn duplicate_row_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: RowIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .duplicate_row(¶ms.view_id, ¶ms.row_id) .await?; @@ -422,7 +489,9 @@ pub(crate) async fn move_row_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: MoveRowParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .move_row(¶ms.view_id, params.from_row_id, params.to_row_id) .await?; @@ -436,7 +505,9 @@ pub(crate) async fn create_row_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params = data.try_into_inner()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; match database_editor.create_row(params).await? { Some(row) => data_result_ok(RowMetaPB::from(row)), @@ -451,7 +522,9 @@ pub(crate) async fn get_cell_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: CellIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let cell = database_editor .get_cell_pb(¶ms.field_id, ¶ms.row_id) .await @@ -466,7 +539,9 @@ pub(crate) async fn update_cell_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: CellChangesetPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .update_cell_with_changeset( ¶ms.view_id, @@ -485,7 +560,9 @@ pub(crate) async fn new_select_option_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: CreateSelectOptionParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let result = database_editor .create_select_option(¶ms.field_id, params.option_name) .await; @@ -505,7 +582,9 @@ pub(crate) async fn insert_or_update_select_option_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params = data.into_inner(); - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .insert_select_options( ¶ms.view_id, @@ -524,7 +603,9 @@ pub(crate) async fn delete_select_option_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params = data.into_inner(); - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .delete_select_options( ¶ms.view_id, @@ -544,7 +625,7 @@ pub(crate) async fn update_select_option_cell_handler( let manager = upgrade_manager(manager)?; let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?; let database_editor = manager - .get_database_with_view_id(¶ms.cell_identifier.view_id) + .get_database_editor_with_view_id(¶ms.cell_identifier.view_id) .await?; let changeset = SelectOptionCellChangeset { insert_option_ids: params.insert_option_ids, @@ -568,7 +649,9 @@ pub(crate) async fn update_checklist_cell_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: ChecklistCellDataChangesetParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let changeset = ChecklistCellChangeset { insert_options: params .insert_options @@ -609,7 +692,9 @@ pub(crate) async fn update_date_cell_handler( reminder_id: data.reminder_id, }; - let database_editor = manager.get_database_with_view_id(&cell_id.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(&cell_id.view_id) + .await?; database_editor .update_cell_with_changeset( &cell_id.view_id, @@ -628,7 +713,9 @@ pub(crate) async fn get_groups_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: DatabaseViewIdPB = data.into_inner(); - let database_editor = manager.get_database_with_view_id(params.as_ref()).await?; + let database_editor = manager + .get_database_editor_with_view_id(params.as_ref()) + .await?; let groups = database_editor.load_groups(params.as_ref()).await?; data_result_ok(groups) } @@ -640,7 +727,9 @@ pub(crate) async fn get_group_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: DatabaseGroupIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let group = database_editor .get_group(¶ms.view_id, ¶ms.group_id) .await?; @@ -654,7 +743,9 @@ pub(crate) async fn set_group_by_field_handler( ) -> FlowyResult<()> { let manager = upgrade_manager(manager)?; let params: GroupByFieldParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .set_group_by_field(¶ms.view_id, ¶ms.field_id, params.setting_content) .await?; @@ -669,17 +760,11 @@ pub(crate) async fn update_group_handler( let manager = upgrade_manager(manager)?; let params: UpdateGroupParams = data.into_inner().try_into()?; let view_id = params.view_id.clone(); - let database_editor = manager.get_database_with_view_id(&view_id).await?; + let database_editor = manager.get_database_editor_with_view_id(&view_id).await?; let group_changeset = GroupChangeset::from(params); - let (tx, rx) = oneshot::channel(); - af_spawn(async move { - let result = database_editor - .update_group(&view_id, vec![group_changeset]) - .await; - let _ = tx.send(result); - }); - - let _ = rx.await?; + database_editor + .update_group(&view_id, vec![group_changeset]) + .await?; Ok(()) } @@ -690,7 +775,9 @@ pub(crate) async fn move_group_handler( ) -> FlowyResult<()> { let manager = upgrade_manager(manager)?; let params: MoveGroupParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .move_group(¶ms.view_id, ¶ms.from_group_id, ¶ms.to_group_id) .await?; @@ -704,7 +791,9 @@ pub(crate) async fn move_group_row_handler( ) -> FlowyResult<()> { let manager = upgrade_manager(manager)?; let params: MoveGroupRowParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .move_group_row( ¶ms.view_id, @@ -724,7 +813,9 @@ pub(crate) async fn create_group_handler( ) -> FlowyResult<()> { let manager = upgrade_manager(manager)?; let params: CreateGroupParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .create_group(¶ms.view_id, ¶ms.name) .await?; @@ -738,7 +829,9 @@ pub(crate) async fn delete_group_handler( ) -> FlowyResult<()> { let manager = upgrade_manager(manager)?; let params: DeleteGroupParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor.delete_group(params).await?; Ok(()) } @@ -792,7 +885,7 @@ pub(crate) async fn set_layout_setting_handler( let changeset = data.into_inner(); let view_id = changeset.view_id.clone(); let params: LayoutSettingChangeset = changeset.try_into()?; - let database_editor = manager.get_database_with_view_id(&view_id).await?; + let database_editor = manager.get_database_editor_with_view_id(&view_id).await?; database_editor.set_layout_setting(&view_id, params).await?; Ok(()) } @@ -803,7 +896,9 @@ pub(crate) async fn get_layout_setting_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: DatabaseLayoutMeta = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let layout_setting_pb = database_editor .get_layout_setting(¶ms.view_id, params.layout) .await @@ -819,7 +914,9 @@ pub(crate) async fn get_calendar_events_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: CalendarEventRequestParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let events = database_editor .get_all_calendar_events(¶ms.view_id) .await; @@ -833,7 +930,9 @@ pub(crate) async fn get_no_date_calendar_events_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: CalendarEventRequestParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let _events = database_editor .get_all_no_date_calendar_events(¶ms.view_id) .await; @@ -847,7 +946,9 @@ pub(crate) async fn get_calendar_event_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: RowIdParams = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; let event = database_editor .get_calendar_event(¶ms.view_id, params.row_id) .await; @@ -869,7 +970,9 @@ pub(crate) async fn move_calendar_event_handler( date: Some(data.timestamp), ..Default::default() }; - let database_editor = manager.get_database_with_view_id(&cell_id.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(&cell_id.view_id) + .await?; database_editor .update_cell_with_changeset( &cell_id.view_id, @@ -897,7 +1000,7 @@ pub(crate) async fn export_csv_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id = data.into_inner().value; - let database = manager.get_database_with_view_id(&view_id).await?; + let database = manager.get_database_editor_with_view_id(&view_id).await?; let data = database.export_csv(CSVFormat::Original).await?; data_result_ok(DatabaseExportDataPB { export_type: DatabaseExportDataType::CSV, @@ -923,7 +1026,7 @@ pub(crate) async fn get_field_settings_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let (view_id, field_ids) = data.into_inner().try_into()?; - let database_editor = manager.get_database_with_view_id(&view_id).await?; + let database_editor = manager.get_database_editor_with_view_id(&view_id).await?; let field_settings = database_editor .get_field_settings(&view_id, field_ids.clone()) @@ -944,7 +1047,9 @@ pub(crate) async fn get_all_field_settings_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id = data.into_inner(); - let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; + let database_editor = manager + .get_database_editor_with_view_id(view_id.as_ref()) + .await?; let field_settings = database_editor .get_all_field_settings(view_id.as_ref()) @@ -965,7 +1070,9 @@ pub(crate) async fn update_field_settings_handler( ) -> FlowyResult<()> { let manager = upgrade_manager(manager)?; let params = data.try_into_inner()?; - let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let database_editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; database_editor .update_field_settings_with_changeset(params) .await?; @@ -979,7 +1086,9 @@ pub(crate) async fn get_all_calculations_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let view_id = data.into_inner(); - let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; + let database_editor = manager + .get_database_editor_with_view_id(view_id.as_ref()) + .await?; let calculations = database_editor.get_all_calculations(view_id.as_ref()).await; @@ -993,7 +1102,9 @@ pub(crate) async fn update_calculation_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: UpdateCalculationChangesetPB = data.into_inner(); - let editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; editor.update_calculation(params).await?; @@ -1007,7 +1118,9 @@ pub(crate) async fn remove_calculation_handler( ) -> Result<(), FlowyError> { let manager = upgrade_manager(manager)?; let params: RemoveCalculationChangesetPB = data.into_inner(); - let editor = manager.get_database_with_view_id(¶ms.view_id).await?; + let editor = manager + .get_database_editor_with_view_id(¶ms.view_id) + .await?; editor.remove_calculation(params).await?; @@ -1041,7 +1154,7 @@ pub(crate) async fn update_relation_cell_handler( removed_row_ids: params.removed_row_ids.into_iter().map(Into::into).collect(), }; - let database_editor = manager.get_database_with_view_id(&view_id).await?; + let database_editor = manager.get_database_editor_with_view_id(&view_id).await?; // // get the related database // let related_database_id = database_editor @@ -1072,7 +1185,7 @@ pub(crate) async fn get_related_row_datas_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let params: GetRelatedRowDataPB = data.into_inner(); - let database_editor = manager.get_database(¶ms.database_id).await?; + let database_editor = manager.get_database_editor(¶ms.database_id).await?; let row_datas = database_editor .get_related_rows(Some(¶ms.row_ids)) .await?; @@ -1086,7 +1199,7 @@ pub(crate) async fn get_related_database_rows_handler( ) -> DataResult { let manager = upgrade_manager(manager)?; let database_id = data.into_inner().value; - let database_editor = manager.get_database(&database_id).await?; + let database_editor = manager.get_database_editor(&database_id).await?; let row_datas = database_editor.get_related_rows(None).await?; data_result_ok(RepeatedRelatedRowDataPB { rows: row_datas }) diff --git a/frontend/rust-lib/flowy-database2/src/event_map.rs b/frontend/rust-lib/flowy-database2/src/event_map.rs index 02c64da785..03f263d16d 100644 --- a/frontend/rust-lib/flowy-database2/src/event_map.rs +++ b/frontend/rust-lib/flowy-database2/src/event_map.rs @@ -13,85 +13,86 @@ pub fn init(database_manager: Weak) -> AFPlugin { .name(env!("CARGO_PKG_NAME")) .state(database_manager); plugin - .event(DatabaseEvent::GetDatabase, get_database_data_handler) - .event(DatabaseEvent::GetDatabaseData, get_database_data_handler) - .event(DatabaseEvent::GetDatabaseId, get_database_id_handler) - .event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler) - .event(DatabaseEvent::UpdateDatabaseSetting, update_database_setting_handler) - .event(DatabaseEvent::GetAllFilters, get_all_filters_handler) - .event(DatabaseEvent::GetAllSorts, get_all_sorts_handler) - .event(DatabaseEvent::DeleteAllSorts, delete_all_sorts_handler) - // Field - .event(DatabaseEvent::GetFields, get_fields_handler) - .event(DatabaseEvent::GetPrimaryField, get_primary_field_handler) - .event(DatabaseEvent::UpdateField, update_field_handler) - .event(DatabaseEvent::UpdateFieldTypeOption, update_field_type_option_handler) - .event(DatabaseEvent::DeleteField, delete_field_handler) - .event(DatabaseEvent::ClearField, clear_field_handler) - .event(DatabaseEvent::UpdateFieldType, switch_to_field_handler) - .event(DatabaseEvent::DuplicateField, duplicate_field_handler) - .event(DatabaseEvent::MoveField, move_field_handler) - .event(DatabaseEvent::CreateField, create_field_handler) - // Row - .event(DatabaseEvent::CreateRow, create_row_handler) - .event(DatabaseEvent::GetRow, get_row_handler) - .event(DatabaseEvent::GetRowMeta, get_row_meta_handler) - .event(DatabaseEvent::UpdateRowMeta, update_row_meta_handler) - .event(DatabaseEvent::DeleteRows, delete_rows_handler) - .event(DatabaseEvent::DuplicateRow, duplicate_row_handler) - .event(DatabaseEvent::MoveRow, move_row_handler) - // Cell - .event(DatabaseEvent::GetCell, get_cell_handler) - .event(DatabaseEvent::UpdateCell, update_cell_handler) - // SelectOption - .event(DatabaseEvent::CreateSelectOption, new_select_option_handler) - .event(DatabaseEvent::InsertOrUpdateSelectOption, insert_or_update_select_option_handler) - .event(DatabaseEvent::DeleteSelectOption, delete_select_option_handler) - .event(DatabaseEvent::UpdateSelectOptionCell, update_select_option_cell_handler) - // Checklist - .event(DatabaseEvent::UpdateChecklistCell, update_checklist_cell_handler) - // Date - .event(DatabaseEvent::UpdateDateCell, update_date_cell_handler) - // Group - .event(DatabaseEvent::SetGroupByField, set_group_by_field_handler) - .event(DatabaseEvent::MoveGroup, move_group_handler) - .event(DatabaseEvent::MoveGroupRow, move_group_row_handler) - .event(DatabaseEvent::GetGroups, get_groups_handler) - .event(DatabaseEvent::GetGroup, get_group_handler) - .event(DatabaseEvent::UpdateGroup, update_group_handler) - .event(DatabaseEvent::CreateGroup, create_group_handler) - .event(DatabaseEvent::DeleteGroup, delete_group_handler) - // Database - .event(DatabaseEvent::GetDatabaseMeta, get_database_meta_handler) - .event(DatabaseEvent::GetDatabases, get_databases_handler) - // Calendar - .event(DatabaseEvent::GetAllCalendarEvents, get_calendar_events_handler) - .event(DatabaseEvent::GetNoDateCalendarEvents, get_no_date_calendar_events_handler) - .event(DatabaseEvent::GetCalendarEvent, get_calendar_event_handler) - .event(DatabaseEvent::MoveCalendarEvent, move_calendar_event_handler) - // Layout setting - .event(DatabaseEvent::SetLayoutSetting, set_layout_setting_handler) - .event(DatabaseEvent::GetLayoutSetting, get_layout_setting_handler) - .event(DatabaseEvent::CreateDatabaseView, create_database_view) - // Export - .event(DatabaseEvent::ExportCSV, export_csv_handler) - .event(DatabaseEvent::GetDatabaseSnapshots, get_snapshots_handler) - // Field settings - .event(DatabaseEvent::GetFieldSettings, get_field_settings_handler) - .event(DatabaseEvent::GetAllFieldSettings, get_all_field_settings_handler) - .event(DatabaseEvent::UpdateFieldSettings, update_field_settings_handler) - // Calculations - .event(DatabaseEvent::GetAllCalculations, get_all_calculations_handler) - .event(DatabaseEvent::UpdateCalculation, update_calculation_handler) - .event(DatabaseEvent::RemoveCalculation, remove_calculation_handler) - // Relation - .event(DatabaseEvent::GetRelatedDatabaseIds, get_related_database_ids_handler) - .event(DatabaseEvent::UpdateRelationCell, update_relation_cell_handler) - .event(DatabaseEvent::GetRelatedRowDatas, get_related_row_datas_handler) - .event(DatabaseEvent::GetRelatedDatabaseRows, get_related_database_rows_handler) - // AI - .event(DatabaseEvent::SummarizeRow, summarize_row_handler) - .event(DatabaseEvent::TranslateRow, translate_row_handler) + .event(DatabaseEvent::GetDatabase, get_database_data_handler) + .event(DatabaseEvent::GetDatabaseData, get_database_data_handler) + .event(DatabaseEvent::GetDatabaseId, get_database_id_handler) + .event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler) + .event(DatabaseEvent::UpdateDatabaseSetting, update_database_setting_handler) + .event(DatabaseEvent::GetAllFilters, get_all_filters_handler) + .event(DatabaseEvent::GetAllSorts, get_all_sorts_handler) + .event(DatabaseEvent::DeleteAllSorts, delete_all_sorts_handler) + // Field + .event(DatabaseEvent::GetFields, get_fields_handler) + .event(DatabaseEvent::GetPrimaryField, get_primary_field_handler) + .event(DatabaseEvent::UpdateField, update_field_handler) + .event(DatabaseEvent::UpdateFieldTypeOption, update_field_type_option_handler) + .event(DatabaseEvent::DeleteField, delete_field_handler) + .event(DatabaseEvent::ClearField, clear_field_handler) + .event(DatabaseEvent::UpdateFieldType, switch_to_field_handler) + .event(DatabaseEvent::DuplicateField, duplicate_field_handler) + .event(DatabaseEvent::MoveField, move_field_handler) + .event(DatabaseEvent::CreateField, create_field_handler) + // Row + .event(DatabaseEvent::CreateRow, create_row_handler) + .event(DatabaseEvent::GetRow, get_row_handler) + .event(DatabaseEvent::InitRow, init_row_handler) + .event(DatabaseEvent::GetRowMeta, get_row_meta_handler) + .event(DatabaseEvent::UpdateRowMeta, update_row_meta_handler) + .event(DatabaseEvent::DeleteRows, delete_rows_handler) + .event(DatabaseEvent::DuplicateRow, duplicate_row_handler) + .event(DatabaseEvent::MoveRow, move_row_handler) + // Cell + .event(DatabaseEvent::GetCell, get_cell_handler) + .event(DatabaseEvent::UpdateCell, update_cell_handler) + // SelectOption + .event(DatabaseEvent::CreateSelectOption, new_select_option_handler) + .event(DatabaseEvent::InsertOrUpdateSelectOption, insert_or_update_select_option_handler) + .event(DatabaseEvent::DeleteSelectOption, delete_select_option_handler) + .event(DatabaseEvent::UpdateSelectOptionCell, update_select_option_cell_handler) + // Checklist + .event(DatabaseEvent::UpdateChecklistCell, update_checklist_cell_handler) + // Date + .event(DatabaseEvent::UpdateDateCell, update_date_cell_handler) + // Group + .event(DatabaseEvent::SetGroupByField, set_group_by_field_handler) + .event(DatabaseEvent::MoveGroup, move_group_handler) + .event(DatabaseEvent::MoveGroupRow, move_group_row_handler) + .event(DatabaseEvent::GetGroups, get_groups_handler) + .event(DatabaseEvent::GetGroup, get_group_handler) + .event(DatabaseEvent::UpdateGroup, update_group_handler) + .event(DatabaseEvent::CreateGroup, create_group_handler) + .event(DatabaseEvent::DeleteGroup, delete_group_handler) + // Database + .event(DatabaseEvent::GetDatabaseMeta, get_database_meta_handler) + .event(DatabaseEvent::GetDatabases, get_databases_handler) + // Calendar + .event(DatabaseEvent::GetAllCalendarEvents, get_calendar_events_handler) + .event(DatabaseEvent::GetNoDateCalendarEvents, get_no_date_calendar_events_handler) + .event(DatabaseEvent::GetCalendarEvent, get_calendar_event_handler) + .event(DatabaseEvent::MoveCalendarEvent, move_calendar_event_handler) + // Layout setting + .event(DatabaseEvent::SetLayoutSetting, set_layout_setting_handler) + .event(DatabaseEvent::GetLayoutSetting, get_layout_setting_handler) + .event(DatabaseEvent::CreateDatabaseView, create_database_view) + // Export + .event(DatabaseEvent::ExportCSV, export_csv_handler) + .event(DatabaseEvent::GetDatabaseSnapshots, get_snapshots_handler) + // Field settings + .event(DatabaseEvent::GetFieldSettings, get_field_settings_handler) + .event(DatabaseEvent::GetAllFieldSettings, get_all_field_settings_handler) + .event(DatabaseEvent::UpdateFieldSettings, update_field_settings_handler) + // Calculations + .event(DatabaseEvent::GetAllCalculations, get_all_calculations_handler) + .event(DatabaseEvent::UpdateCalculation, update_calculation_handler) + .event(DatabaseEvent::RemoveCalculation, remove_calculation_handler) + // Relation + .event(DatabaseEvent::GetRelatedDatabaseIds, get_related_database_ids_handler) + .event(DatabaseEvent::UpdateRelationCell, update_relation_cell_handler) + .event(DatabaseEvent::GetRelatedRowDatas, get_related_row_datas_handler) + .event(DatabaseEvent::GetRelatedDatabaseRows, get_related_database_rows_handler) + // AI + .event(DatabaseEvent::SummarizeRow, summarize_row_handler) + .event(DatabaseEvent::TranslateRow, translate_row_handler) } /// [DatabaseEvent] defines events that are used to interact with the Grid. You could check [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/backend/protobuf) @@ -377,4 +378,7 @@ pub enum DatabaseEvent { #[event(input = "TranslateRowPB")] TranslateRow = 175, + + #[event(input = "RowIdPB")] + InitRow = 176, } diff --git a/frontend/rust-lib/flowy-database2/src/manager.rs b/frontend/rust-lib/flowy-database2/src/manager.rs index 4aa3f643fc..c37321fc92 100644 --- a/frontend/rust-lib/flowy-database2/src/manager.rs +++ b/frontend/rust-lib/flowy-database2/src/manager.rs @@ -1,22 +1,28 @@ use anyhow::anyhow; +use arc_swap::ArcSwapOption; +use async_trait::async_trait; +use std::borrow::BorrowMut; use std::collections::HashMap; use std::sync::{Arc, Weak}; -use collab::core::collab::{DataSource, MutexCollab}; -use collab_database::database::{DatabaseData, MutexDatabase}; +use collab::core::collab::DataSource; +use collab::preclude::Collab; +use collab_database::database::{Database, DatabaseData}; use collab_database::error::DatabaseError; use collab_database::rows::RowId; use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout}; use collab_database::workspace_database::{ - CollabDocStateByOid, CollabFuture, DatabaseCollabService, DatabaseMeta, WorkspaceDatabase, + DatabaseCollabService, DatabaseMeta, EncodeCollabByOid, WorkspaceDatabase, }; use collab_entity::{CollabType, EncodedCollab}; use collab_plugins::local_storage::kv::KVTransactionDB; use tokio::sync::{Mutex, RwLock}; use tracing::{event, instrument, trace}; -use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig}; -use collab_integrate::{CollabKVAction, CollabKVDB, CollabPersistenceConfig}; +use collab_integrate::collab_builder::{ + AppFlowyCollabBuilder, CollabBuilderConfig, KVDBCollabPersistenceImpl, +}; +use collab_integrate::{CollabKVAction, CollabKVDB}; use flowy_database_pub::cloud::{ DatabaseAIService, DatabaseCloudService, SummaryRowContent, TranslateItem, TranslateRowContent, }; @@ -42,7 +48,7 @@ pub trait DatabaseUser: Send + Sync { pub struct DatabaseManager { user: Arc, - workspace_database: Arc>>>, + workspace_database: ArcSwapOption>, task_scheduler: Arc>, editors: Mutex>>, collab_builder: Arc, @@ -89,10 +95,10 @@ impl DatabaseManager { } self.editors.lock().await.clear(); // 3. Clear the workspace database - if let Some(old_workspace_database) = self.workspace_database.write().await.take() { - old_workspace_database.close(); + if let Some(old_workspace_database) = self.workspace_database.swap(None) { + let wdb = old_workspace_database.read().await; + wdb.close(); } - *self.workspace_database.write().await = None; let collab_db = self.user.collab_db(uid)?; let collab_builder = UserDatabaseCollabServiceImpl { @@ -100,30 +106,27 @@ impl DatabaseManager { collab_builder: self.collab_builder.clone(), cloud_service: self.cloud_service.clone(), }; - let config = CollabPersistenceConfig::new().snapshot_per_update(100); let workspace_id = self.user.workspace_id()?; let workspace_database_object_id = self.user.workspace_database_object_id()?; - let mut workspace_database_doc_state = DataSource::Disk; + let mut workspace_database_doc_state = + KVDBCollabPersistenceImpl::new(collab_db.clone(), uid).into_data_source(); // If the workspace database not exist in disk, try to fetch from remote. if !self.is_collab_exist(uid, &collab_db, &workspace_database_object_id) { trace!("workspace database not exist, try to fetch from remote"); match self .cloud_service - .get_database_object_doc_state( + .get_database_encode_collab( &workspace_database_object_id, CollabType::WorkspaceDatabase, &workspace_id, ) .await { - Ok(doc_state) => match doc_state { - Some(doc_state) => { - workspace_database_doc_state = DataSource::DocStateV1(doc_state); - }, - None => { - workspace_database_doc_state = DataSource::Disk; - }, + Ok(value) => { + if let Some(encode_collab) = value { + workspace_database_doc_state = DataSource::from(encode_collab); + } }, Err(err) => { return Err(FlowyError::record_not_found().with_context(format!( @@ -140,20 +143,64 @@ impl DatabaseManager { "open aggregate database views object: {}", &workspace_database_object_id ); - let collab = collab_builder.build_collab_with_config( + + let workspace_id = self + .user + .workspace_id() + .map_err(|err| DatabaseError::Internal(err.into()))?; + let collab_object = self.collab_builder.collab_object( + &workspace_id, uid, &workspace_database_object_id, CollabType::WorkspaceDatabase, - collab_db.clone(), - workspace_database_doc_state, - config.clone(), )?; - let workspace_database = - WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder); - *self.workspace_database.write().await = Some(Arc::new(workspace_database)); + let workspace_database = self.collab_builder.create_workspace_database( + collab_object, + workspace_database_doc_state, + collab_db, + CollabBuilderConfig::default().sync_enable(true), + collab_builder, + )?; + self.workspace_database.store(Some(workspace_database)); Ok(()) } + //FIXME: we need to initialize sync plugin for newly created collabs + #[allow(dead_code)] + fn initialize_plugins( + &self, + uid: i64, + object_id: &str, + collab_type: CollabType, + collab: Arc>, + ) -> FlowyResult>> + where + T: BorrowMut + Send + Sync + 'static, + { + //FIXME: unfortunately UserDatabaseCollabService::build_collab_with_config is broken by + // design as it assumes that we can split collab building process, which we cannot because: + // 1. We should not be able to run plugins ie. SyncPlugin over not-fully initialized collab, + // and that's what originally build_collab_with_config did. + // 2. We cannot fully initialize collab from UserDatabaseCollabService, because + // WorkspaceDatabase itself requires UserDatabaseCollabService as constructor parameter. + // Ideally we should never need to initialize plugins that require collab instance as part of + // that collab construction process itself - it means that we should redesign SyncPlugin to only + // be fired once a collab is fully initialized. + let workspace_id = self + .user + .workspace_id() + .map_err(|err| DatabaseError::Internal(err.into()))?; + let object = self + .collab_builder + .collab_object(&workspace_id, uid, object_id, collab_type)?; + let collab = self.collab_builder.finalize( + object, + CollabBuilderConfig::default().sync_enable(true), + collab, + )?; + Ok(collab) + } + #[instrument( name = "database_initialize_with_new_user", level = "debug", @@ -166,19 +213,24 @@ impl DatabaseManager { } pub async fn get_database_inline_view_id(&self, database_id: &str) -> FlowyResult { - let wdb = self.get_database_indexer().await?; - let database_collab = wdb.get_database(database_id).await.ok_or_else(|| { - FlowyError::record_not_found().with_context(format!("The database:{} not found", database_id)) - })?; - - let lock_guard = database_collab.lock(); + let lock = self.workspace_database()?; + let wdb = lock.read().await; + let database_collab = wdb + .get_or_create_database(database_id) + .await + .ok_or_else(|| { + FlowyError::record_not_found() + .with_context(format!("The database:{} not found", database_id)) + })?; + let lock_guard = database_collab.read().await; Ok(lock_guard.get_inline_view_id()) } pub async fn get_all_databases_meta(&self) -> Vec { let mut items = vec![]; - if let Ok(wdb) = self.get_database_indexer().await { + if let Some(lock) = self.workspace_database.load_full() { + let wdb = lock.read().await; items = wdb.get_all_database_meta() } items @@ -188,7 +240,8 @@ impl DatabaseManager { &self, view_ids_by_database_id: HashMap>, ) -> FlowyResult<()> { - let wdb = self.get_database_indexer().await?; + let lock = self.workspace_database()?; + let mut wdb = lock.write().await; view_ids_by_database_id .into_iter() .for_each(|(database_id, view_ids)| { @@ -197,13 +250,9 @@ impl DatabaseManager { Ok(()) } - pub async fn get_database_with_view_id(&self, view_id: &str) -> FlowyResult> { - let database_id = self.get_database_id_with_view_id(view_id).await?; - self.get_database(&database_id).await - } - pub async fn get_database_id_with_view_id(&self, view_id: &str) -> FlowyResult { - let wdb = self.get_database_indexer().await?; + let lock = self.workspace_database()?; + let wdb = lock.read().await; wdb.get_database_id_with_view_id(view_id).ok_or_else(|| { FlowyError::record_not_found() .with_context(format!("The database for view id: {} not found", view_id)) @@ -211,28 +260,44 @@ impl DatabaseManager { } pub async fn get_database_row_ids_with_view_id(&self, view_id: &str) -> FlowyResult> { - let database = self.get_database_with_view_id(view_id).await?; - Ok(database.get_row_ids()) + let database = self.get_database_editor_with_view_id(view_id).await?; + Ok(database.get_row_ids().await) } - pub async fn get_database(&self, database_id: &str) -> FlowyResult> { + pub async fn get_database_editor_with_view_id( + &self, + view_id: &str, + ) -> FlowyResult> { + let database_id = self.get_database_id_with_view_id(view_id).await?; + self.get_database_editor(&database_id).await + } + + pub async fn get_database_editor(&self, database_id: &str) -> FlowyResult> { if let Some(editor) = self.editors.lock().await.get(database_id).cloned() { return Ok(editor); } - // TODO(nathan): refactor the get_database that split the database creation and database opening. self.open_database(database_id).await } pub async fn open_database(&self, database_id: &str) -> FlowyResult> { trace!("open database editor:{}", database_id); - let database = self - .get_database_indexer() - .await? - .get_database(database_id) + let lock = self.workspace_database()?; + let database = lock + .read() + .await + .get_or_create_database(database_id) .await .ok_or_else(|| FlowyError::collab_not_sync().with_context("open database error"))?; - let editor = Arc::new(DatabaseEditor::new(database, self.task_scheduler.clone()).await?); + let editor = Arc::new( + DatabaseEditor::new( + self.user.clone(), + database, + self.task_scheduler.clone(), + self.collab_builder.clone(), + ) + .await?, + ); self .editors .lock() @@ -241,17 +306,14 @@ impl DatabaseManager { Ok(editor) } + /// Open the database view pub async fn open_database_view>(&self, view_id: T) -> FlowyResult<()> { let view_id = view_id.as_ref(); - let wdb = self.get_database_indexer().await?; - if let Some(database_id) = wdb.get_database_id_with_view_id(view_id) { - if let Some(database) = wdb.open_database(&database_id) { - if let Some(lock_database) = database.try_lock() { - if let Some(lock_collab) = lock_database.get_collab().try_lock() { - trace!("{} database start init sync", view_id); - lock_collab.start_init_sync(); - } - } + let lock = self.workspace_database()?; + let workspace_database = lock.read().await; + if let Some(database_id) = workspace_database.get_database_id_with_view_id(view_id) { + if self.editors.lock().await.get(&database_id).is_none() { + self.open_database(&database_id).await?; } } Ok(()) @@ -259,20 +321,23 @@ impl DatabaseManager { pub async fn close_database_view>(&self, view_id: T) -> FlowyResult<()> { let view_id = view_id.as_ref(); - let wdb = self.get_database_indexer().await?; - let database_id = wdb.get_database_id_with_view_id(view_id); + let lock = self.workspace_database()?; + let workspace_database = lock.read().await; + let database_id = workspace_database.get_database_id_with_view_id(view_id); if let Some(database_id) = database_id { let mut editors = self.editors.lock().await; let mut should_remove = false; + if let Some(editor) = editors.get(&database_id) { editor.close_view(view_id).await; - should_remove = editor.num_views().await == 0; + // when there is no opening views, mark the database to be removed. + should_remove = editor.num_of_opening_views().await == 0; } if should_remove { trace!("remove database editor:{}", database_id); editors.remove(&database_id); - wdb.close_database(&database_id); + workspace_database.close_database(&database_id); } } @@ -280,13 +345,14 @@ impl DatabaseManager { } pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult<()> { - let database = self.get_database_with_view_id(view_id).await?; + let database = self.get_database_editor_with_view_id(view_id).await?; let _ = database.delete_database_view(view_id).await?; Ok(()) } pub async fn duplicate_database(&self, view_id: &str) -> FlowyResult> { - let wdb = self.get_database_indexer().await?; + let lock = self.workspace_database()?; + let wdb = lock.read().await; let data = wdb.get_database_data(view_id).await?; let json_bytes = data.to_json_bytes()?; Ok(json_bytes) @@ -313,12 +379,12 @@ impl DatabaseManager { create_view_params.view_id = view_id.to_string(); } - let wdb = self.get_database_indexer().await?; + let lock = self.workspace_database()?; + let mut wdb = lock.write().await; let database = wdb.create_database(create_database_params)?; let encoded_collab = database - .lock() - .get_collab() - .lock() + .read() + .await .encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?; Ok(encoded_collab) } @@ -326,9 +392,11 @@ impl DatabaseManager { pub async fn create_database_with_params( &self, params: CreateDatabaseParams, - ) -> FlowyResult> { - let wdb = self.get_database_indexer().await?; + ) -> FlowyResult>> { + let lock = self.workspace_database()?; + let mut wdb = lock.write().await; let database = wdb.create_database(params)?; + Ok(database) } @@ -342,12 +410,14 @@ impl DatabaseManager { database_view_id: String, database_parent_view_id: String, ) -> FlowyResult<()> { - let wdb = self.get_database_indexer().await?; + let lock = self.workspace_database()?; + let mut wdb = lock.write().await; let mut params = CreateViewParams::new(database_id.clone(), database_view_id, name, layout); - if let Some(database) = wdb.get_database(&database_id).await { + if let Some(database) = wdb.get_or_create_database(&database_id).await { let (field, layout_setting, field_settings_map) = DatabaseLayoutDepsResolver::new(database, layout) - .resolve_deps_when_create_database_linked_view(&database_parent_view_id); + .resolve_deps_when_create_database_linked_view(&database_parent_view_id) + .await; if let Some(field) = field { params = params.with_deps_fields(vec![field], vec![default_field_settings_by_layout_map()]); } @@ -374,18 +444,12 @@ impl DatabaseManager { .await .map_err(internal_error)??; - // Currently, we only support importing up to 500 rows. We can support more rows in the future. - if !cfg!(debug_assertions) && params.rows.len() > 500 { - return Err(FlowyError::internal().with_context("The number of rows exceeds the limit")); - } - let view_id = params.inline_view_id.clone(); let database_id = params.database_id.clone(); let database = self.create_database_with_params(params).await?; let encoded_collab = database - .lock() - .get_collab() - .lock() + .read() + .await .encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?; let result = ImportResult { database_id, @@ -405,7 +469,7 @@ impl DatabaseManager { } pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult { - let database = self.get_database_with_view_id(view_id).await?; + let database = self.get_database_editor_with_view_id(view_id).await?; database.export_csv(style).await } @@ -414,7 +478,7 @@ impl DatabaseManager { view_id: &str, layout: DatabaseLayoutPB, ) -> FlowyResult<()> { - let database = self.get_database_with_view_id(view_id).await?; + let database = self.get_database_editor_with_view_id(view_id).await?; database.update_view_layout(view_id, layout.into()).await } @@ -440,14 +504,11 @@ impl DatabaseManager { Ok(snapshots) } - /// Return the database indexer. - /// Each workspace has itw own Database indexer that manages all the databases and database views - async fn get_database_indexer(&self) -> FlowyResult> { - let database = self.workspace_database.read().await; - match &*database { - None => Err(FlowyError::internal().with_context("Workspace database not initialized")), - Some(user_database) => Ok(user_database.clone()), - } + fn workspace_database(&self) -> FlowyResult>> { + self + .workspace_database + .load_full() + .ok_or_else(|| FlowyError::internal().with_context("Workspace database not initialized")) } #[instrument(level = "debug", skip_all)] @@ -457,10 +518,10 @@ impl DatabaseManager { row_id: RowId, field_id: String, ) -> FlowyResult<()> { - let database = self.get_database_with_view_id(&view_id).await?; + let database = self.get_database_editor_with_view_id(&view_id).await?; let mut summary_row_content = SummaryRowContent::new(); - if let Some(row) = database.get_row(&view_id, &row_id) { - let fields = database.get_fields(&view_id, None); + if let Some(row) = database.get_row(&view_id, &row_id).await { + let fields = database.get_fields(&view_id, None).await; for field in fields { // When summarizing a row, skip the content in the "AI summary" cell; it does not need to // be summarized. @@ -501,12 +562,12 @@ impl DatabaseManager { row_id: RowId, field_id: String, ) -> FlowyResult<()> { - let database = self.get_database_with_view_id(&view_id).await?; + let database = self.get_database_editor_with_view_id(&view_id).await?; let mut translate_row_content = TranslateRowContent::new(); let mut language = "english".to_string(); - if let Some(row) = database.get_row(&view_id, &row_id) { - let fields = database.get_fields(&view_id, None); + if let Some(row) = database.get_row(&view_id, &row_id).await { + let fields = database.get_fields(&view_id, None).await; for field in fields { // When translate a row, skip the content in the "AI Translate" cell; it does not need to // be translated. @@ -582,79 +643,73 @@ struct UserDatabaseCollabServiceImpl { cloud_service: Arc, } +#[async_trait] impl DatabaseCollabService for UserDatabaseCollabServiceImpl { - fn get_collab_doc_state( + async fn get_encode_collab( &self, object_id: &str, object_ty: CollabType, - ) -> CollabFuture> { + ) -> Result, DatabaseError> { let workspace_id = self.user.workspace_id().unwrap(); let object_id = object_id.to_string(); let weak_cloud_service = Arc::downgrade(&self.cloud_service); - Box::pin(async move { - match weak_cloud_service.upgrade() { - None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))), - Some(cloud_service) => { - let doc_state = cloud_service - .get_database_object_doc_state(&object_id, object_ty, &workspace_id) - .await?; - match doc_state { - None => Ok(DataSource::Disk), - Some(doc_state) => Ok(DataSource::DocStateV1(doc_state)), - } - }, - } - }) + + match weak_cloud_service.upgrade() { + None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))), + Some(cloud_service) => { + let encode_collab = cloud_service + .get_database_encode_collab(&object_id, object_ty, &workspace_id) + .await?; + Ok(encode_collab) + }, + } } - fn batch_get_collab_update( + async fn batch_get_encode_collab( &self, object_ids: Vec, object_ty: CollabType, - ) -> CollabFuture> { + ) -> Result { let cloned_user = self.user.clone(); let weak_cloud_service = Arc::downgrade(&self.cloud_service); - Box::pin(async move { - let workspace_id = cloned_user - .workspace_id() - .map_err(|err| DatabaseError::Internal(err.into()))?; - match weak_cloud_service.upgrade() { - None => { - tracing::warn!("Cloud service is dropped"); - Ok(CollabDocStateByOid::default()) - }, - Some(cloud_service) => { - let updates = cloud_service - .batch_get_database_object_doc_state(object_ids, object_ty, &workspace_id) - .await?; - Ok(updates) - }, - } - }) + + let workspace_id = cloned_user + .workspace_id() + .map_err(|err| DatabaseError::Internal(err.into()))?; + match weak_cloud_service.upgrade() { + None => { + tracing::warn!("Cloud service is dropped"); + Ok(EncodeCollabByOid::default()) + }, + Some(cloud_service) => { + let updates = cloud_service + .batch_get_database_encode_collab(object_ids, object_ty, &workspace_id) + .await?; + Ok(updates) + }, + } } - fn build_collab_with_config( + ///NOTE: this method doesn't initialize plugins, however it is passed into WorkspaceDatabase, + /// therefore all Database/DatabaseRow creation methods must initialize plugins thmselves. + fn build_collab( &self, uid: i64, object_id: &str, object_type: CollabType, collab_db: Weak, - collab_raw_data: DataSource, - _persistence_config: CollabPersistenceConfig, - ) -> Result, DatabaseError> { + data_source: DataSource, + ) -> Result { let workspace_id = self .user .workspace_id() .map_err(|err| DatabaseError::Internal(err.into()))?; - let collab = self.collab_builder.build_with_config( - &workspace_id, - uid, - object_id, - object_type.clone(), - collab_db.clone(), - collab_raw_data, - CollabBuilderConfig::default().sync_enable(true), - )?; + let object = self + .collab_builder + .collab_object(&workspace_id, uid, object_id, object_type)?; + let collab = self + .collab_builder + .build_collab(&object, &collab_db, data_source)?; Ok(collab) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/calculations/cache.rs b/frontend/rust-lib/flowy-database2/src/services/calculations/cache.rs index d406c88f04..4b6307b095 100644 --- a/frontend/rust-lib/flowy-database2/src/services/calculations/cache.rs +++ b/frontend/rust-lib/flowy-database2/src/services/calculations/cache.rs @@ -1,6 +1,5 @@ -use parking_lot::RwLock; use std::sync::Arc; use crate::utils::cache::AnyTypeCache; -pub type CalculationsByFieldIdCache = Arc>>; +pub type CalculationsByFieldIdCache = Arc>; diff --git a/frontend/rust-lib/flowy-database2/src/services/calculations/controller.rs b/frontend/rust-lib/flowy-database2/src/services/calculations/controller.rs index 5e199b84ad..ad6cb71e6d 100644 --- a/frontend/rust-lib/flowy-database2/src/services/calculations/controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/calculations/controller.rs @@ -1,3 +1,4 @@ +use async_trait::async_trait; use std::str::FromStr; use std::sync::Arc; @@ -7,7 +8,6 @@ use flowy_error::FlowyResult; use serde::{Deserialize, Serialize}; use tokio::sync::RwLock; -use lib_infra::future::Fut; use lib_infra::priority_task::{QualityOfService, Task, TaskContent, TaskDispatcher}; use crate::entities::{ @@ -19,13 +19,14 @@ use crate::utils::cache::AnyTypeCache; use super::{Calculation, CalculationChangeset, CalculationsService}; +#[async_trait] pub trait CalculationsDelegate: Send + Sync + 'static { - fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut>>; - fn get_field(&self, field_id: &str) -> Option; - fn get_calculation(&self, view_id: &str, field_id: &str) -> Fut>>; - fn get_all_calculations(&self, view_id: &str) -> Fut>>>; - fn update_calculation(&self, view_id: &str, calculation: Calculation); - fn remove_calculation(&self, view_id: &str, calculation_id: &str); + async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec>; + async fn get_field(&self, field_id: &str) -> Option; + async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option>; + async fn get_all_calculations(&self, view_id: &str) -> Arc>>; + async fn update_calculation(&self, view_id: &str, calculation: Calculation); + async fn remove_calculation(&self, view_id: &str, calculation_id: &str); } pub struct CalculationsController { @@ -45,7 +46,7 @@ impl Drop for CalculationsController { } impl CalculationsController { - pub async fn new( + pub fn new( view_id: &str, handler_id: &str, delegate: T, @@ -65,7 +66,7 @@ impl CalculationsController { calculations_service: CalculationsService::new(), notifier, }; - this.update_cache(calculations).await; + this.update_cache(calculations); this } @@ -130,7 +131,8 @@ impl CalculationsController { if let Some(calculation) = calculation { self .delegate - .remove_calculation(&self.view_id, &calculation.id); + .remove_calculation(&self.view_id, &calculation.id) + .await; let notification = CalculationChangesetNotificationPB::from_delete( &self.view_id, @@ -165,7 +167,8 @@ impl CalculationsController { if !calc_type.is_allowed(new_field_type) { self .delegate - .remove_calculation(&self.view_id, &calculation.id); + .remove_calculation(&self.view_id, &calculation.id) + .await; let notification = CalculationChangesetNotificationPB::from_delete( &self.view_id, @@ -201,7 +204,8 @@ impl CalculationsController { if let Some(update) = update { self .delegate - .update_calculation(&self.view_id, update.clone()); + .update_calculation(&self.view_id, update.clone()) + .await; let notification = CalculationChangesetNotificationPB::from_update( &self.view_id, @@ -238,7 +242,10 @@ impl CalculationsController { let update = self.get_updated_calculation(calculation.clone()).await; if let Some(update) = update { updates.push(CalculationPB::from(&update)); - self.delegate.update_calculation(&self.view_id, update); + self + .delegate + .update_calculation(&self.view_id, update) + .await; } } } @@ -252,7 +259,10 @@ impl CalculationsController { if let Some(update) = update { updates.push(CalculationPB::from(&update)); - self.delegate.update_calculation(&self.view_id, update); + self + .delegate + .update_calculation(&self.view_id, update) + .await; } } } @@ -273,7 +283,7 @@ impl CalculationsController { .delegate .get_cells_for_field(&self.view_id, &calculation.field_id) .await; - let field = self.delegate.get_field(&calculation.field_id)?; + let field = self.delegate.get_field(&calculation.field_id).await?; let value = self @@ -299,7 +309,7 @@ impl CalculationsController { .get_cells_for_field(&self.view_id, &insert.field_id) .await; - let field = self.delegate.get_field(&insert.field_id)?; + let field = self.delegate.get_field(&insert.field_id).await?; let value = self .calculations_service @@ -331,12 +341,11 @@ impl CalculationsController { notification } - async fn update_cache(&self, calculations: Vec>) { + fn update_cache(&self, calculations: Vec>) { for calculation in calculations { let field_id = &calculation.field_id; self .calculations_by_field_cache - .write() .insert(field_id, calculation.clone()); } } diff --git a/frontend/rust-lib/flowy-database2/src/services/calculations/entities.rs b/frontend/rust-lib/flowy-database2/src/services/calculations/entities.rs index f4502020ac..2a2613230d 100644 --- a/frontend/rust-lib/flowy-database2/src/services/calculations/entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/calculations/entities.rs @@ -1,14 +1,17 @@ -use anyhow::bail; -use collab::core::any_map::AnyMapExtension; +use collab::preclude::encoding::serde::from_any; +use collab::preclude::Any; use collab_database::views::{CalculationMap, CalculationMapBuilder}; +use serde::Deserialize; use crate::entities::CalculationPB; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Deserialize)] pub struct Calculation { pub id: String, pub field_id: String, + #[serde(default, rename = "ty")] pub calculation_type: i64, + #[serde(default, rename = "calculation_value")] pub value: String, } @@ -19,12 +22,12 @@ const CALCULATION_VALUE: &str = "calculation_value"; impl From for CalculationMap { fn from(data: Calculation) -> Self { - CalculationMapBuilder::new() - .insert_str_value(CALCULATION_ID, data.id) - .insert_str_value(FIELD_ID, data.field_id) - .insert_i64_value(CALCULATION_TYPE, data.calculation_type) - .insert_str_value(CALCULATION_VALUE, data.value) - .build() + CalculationMapBuilder::from([ + (CALCULATION_ID.into(), data.id.into()), + (FIELD_ID.into(), data.field_id.into()), + (CALCULATION_TYPE.into(), data.calculation_type.into()), + (CALCULATION_VALUE.into(), data.value.into()), + ]) } } @@ -45,29 +48,7 @@ impl TryFrom for Calculation { type Error = anyhow::Error; fn try_from(calculation: CalculationMap) -> Result { - match ( - calculation.get_str_value(CALCULATION_ID), - calculation.get_str_value(FIELD_ID), - ) { - (Some(id), Some(field_id)) => { - let value = calculation - .get_str_value(CALCULATION_VALUE) - .unwrap_or_default(); - let calculation_type = calculation - .get_i64_value(CALCULATION_TYPE) - .unwrap_or_default(); - - Ok(Calculation { - id, - field_id, - calculation_type, - value, - }) - }, - _ => { - bail!("Invalid calculation data") - }, - } + from_any(&Any::from(calculation)).map_err(|e| e.into()) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/cell/cell_data_cache.rs b/frontend/rust-lib/flowy-database2/src/services/cell/cell_data_cache.rs index 07864351d4..b7606fedbd 100644 --- a/frontend/rust-lib/flowy-database2/src/services/cell/cell_data_cache.rs +++ b/frontend/rust-lib/flowy-database2/src/services/cell/cell_data_cache.rs @@ -1,6 +1,5 @@ -use parking_lot::RwLock; use std::sync::Arc; use crate::utils::cache::AnyTypeCache; -pub type CellCache = Arc>>; +pub type CellCache = Arc>; diff --git a/frontend/rust-lib/flowy-database2/src/services/database/database_editor.rs b/frontend/rust-lib/flowy-database2/src/services/database/database_editor.rs index 40ce8db243..bf913db50c 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database/database_editor.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database/database_editor.rs @@ -5,7 +5,7 @@ use crate::services::cell::{apply_cell_changeset, get_cell_protobuf, CellCache}; use crate::services::database::database_observe::*; use crate::services::database::util::database_view_setting_pb_from_view; use crate::services::database_view::{ - DatabaseViewChanged, DatabaseViewEditor, DatabaseViewOperation, DatabaseViews, EditorByViewId, + DatabaseViewChanged, DatabaseViewOperation, DatabaseViews, EditorByViewId, }; use crate::services::field::{ default_type_option_data_from_type, select_type_option_from_field, transform_type_option, @@ -19,42 +19,48 @@ use crate::services::group::{default_group_setting, GroupChangeset, GroupSetting use crate::services::share::csv::{CSVExport, CSVFormat}; use crate::services::sort::Sort; use crate::utils::cache::AnyTypeCache; -use collab_database::database::MutexDatabase; +use crate::DatabaseUser; +use async_trait::async_trait; +use collab_database::database::Database; use collab_database::fields::{Field, TypeOptionData}; use collab_database::rows::{Cell, Cells, Row, RowCell, RowDetail, RowId}; use collab_database::views::{ DatabaseLayout, DatabaseView, FilterMap, LayoutSetting, OrderObjectPosition, }; +use collab_entity::CollabType; +use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig}; use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult}; use flowy_notification::DebounceNotificationSender; use lib_infra::box_any::BoxAny; -use lib_infra::future::{to_fut, Fut, FutureResult}; use lib_infra::priority_task::TaskDispatcher; use lib_infra::util::timestamp; use std::collections::HashMap; use std::sync::Arc; use tokio::sync::{broadcast, RwLock}; -use tracing::{event, instrument, warn}; +use tracing::{error, event, instrument, trace, warn}; #[derive(Clone)] pub struct DatabaseEditor { - database: Arc, + pub(crate) database: Arc>, pub cell_cache: CellCache, database_views: Arc, #[allow(dead_code)] /// Used to send notification to the frontend. notification_sender: Arc, + user: Arc, + collab_builder: Arc, } impl DatabaseEditor { pub async fn new( - database: Arc, + user: Arc, + database: Arc>, task_scheduler: Arc>, + collab_builder: Arc, ) -> FlowyResult { let notification_sender = Arc::new(DebounceNotificationSender::new(200)); let cell_cache = AnyTypeCache::::new(); - let database_id = database.lock().get_database_id(); - + let database_id = database.read().await.get_database_id(); // Receive database sync state and send to frontend via the notification observe_sync_state(&database_id, &database).await; // observe_view_change(&database_id, &database).await; @@ -81,11 +87,26 @@ impl DatabaseEditor { .await?, ); + let collab_object = collab_builder.collab_object( + &user.workspace_id()?, + user.user_id()?, + &database_id, + CollabType::Database, + )?; + + let database = collab_builder.finalize( + collab_object, + CollabBuilderConfig::default(), + database.clone(), + )?; + Ok(Self { + user, database, cell_cache, database_views, notification_sender, + collab_builder, }) } @@ -93,18 +114,19 @@ impl DatabaseEditor { self.database_views.close_view(view_id).await; } - pub fn get_row_ids(&self) -> Vec { + pub async fn get_row_ids(&self) -> Vec { self .database - .lock() - .block - .rows - .iter() - .map(|entry| entry.key().clone()) + .read() + .await + .get_database_rows() + .await + .into_iter() + .map(|entry| entry.id) .collect() } - pub async fn num_views(&self) -> usize { + pub async fn num_of_opening_views(&self) -> usize { self.database_views.num_editors().await } @@ -143,8 +165,8 @@ impl DatabaseEditor { Ok(view_editor.notifier.subscribe()) } - pub fn get_field(&self, field_id: &str) -> Option { - self.database.lock().fields.get_field(field_id) + pub async fn get_field(&self, field_id: &str) -> Option { + self.database.read().await.get_field(field_id) } pub async fn set_group_by_field( @@ -156,15 +178,15 @@ impl DatabaseEditor { let old_group_settings: Vec; let mut setting_content = "".to_string(); { - let database = self.database.lock(); - let field = database.fields.get_field(field_id); + let mut database = self.database.write().await; + let field = database.get_field(field_id); old_group_settings = database.get_all_group_setting(view_id); if let Some(field) = field { let field_type = FieldType::from(field.field_type); setting_content = group_config_pb_to_json_str(data, &field_type)?; let mut group_setting = default_group_setting(&field); group_setting.content = setting_content.clone(); - database.views.update_database_view(view_id, |view| { + database.update_database_view(view_id, |view| { view.set_groups(vec![group_setting.into()]); }); } @@ -201,7 +223,7 @@ impl DatabaseEditor { /// will be the reference view ids and the inline view id. Otherwise, the return value will /// be the view id. pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult> { - Ok(self.database.lock().delete_view(view_id)) + Ok(self.database.write().await.delete_view(view_id)) } pub async fn update_group( @@ -295,11 +317,10 @@ impl DatabaseEditor { /// Returns a list of fields of the view. /// If `field_ids` is not provided, all the fields will be returned in the order of the field that /// defined in the view. Otherwise, the fields will be returned in the order of the `field_ids`. - pub fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec { - let database = self.database.lock(); + pub async fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec { + let database = self.database.read().await; let field_ids = field_ids.unwrap_or_else(|| { database - .fields .get_all_field_orders() .into_iter() .map(|field| field.id) @@ -309,22 +330,19 @@ impl DatabaseEditor { } pub async fn update_field(&self, params: FieldChangesetParams) -> FlowyResult<()> { - self - .database - .lock() - .fields - .update_field(¶ms.field_id, |update| { - update.set_name_if_not_none(params.name); - }); - notify_did_update_database_field(&self.database, ¶ms.field_id)?; + let mut database = self.database.write().await; + database.update_field(¶ms.field_id, |update| { + update.set_name_if_not_none(params.name); + }); + notify_did_update_database_field(&database, ¶ms.field_id)?; Ok(()) } pub async fn delete_field(&self, field_id: &str) -> FlowyResult<()> { let is_primary = self .database - .lock() - .fields + .write() + .await .get_field(field_id) .map(|field| field.is_primary) .unwrap_or(false); @@ -337,7 +355,7 @@ impl DatabaseEditor { } let database_id = { - let database = self.database.lock(); + let mut database = self.database.write().await; database.delete_field(field_id); database.get_database_id() }; @@ -355,6 +373,7 @@ impl DatabaseEditor { pub async fn clear_field(&self, view_id: &str, field_id: &str) -> FlowyResult<()> { let field_type: FieldType = self .get_field(field_id) + .await .map(|field| field.field_type.into()) .unwrap_or_default(); @@ -385,8 +404,17 @@ impl DatabaseEditor { old_field: Field, ) -> FlowyResult<()> { let view_editors = self.database_views.editors().await; - update_field_type_option_fn(&self.database, &view_editors, type_option_data, old_field).await?; + { + let mut database = self.database.write().await; + update_field_type_option_fn(&mut database, type_option_data, &old_field).await?; + drop(database); + } + for view_editor in view_editors { + view_editor + .v_did_update_field_type_option(&old_field) + .await?; + } Ok(()) } @@ -395,7 +423,8 @@ impl DatabaseEditor { field_id: &str, new_field_type: FieldType, ) -> FlowyResult<()> { - let field = self.database.lock().fields.get_field(field_id); + let mut database = self.database.write().await; + let field = database.get_field(field_id); match field { None => {}, Some(field) => { @@ -418,15 +447,11 @@ impl DatabaseEditor { old_type_option_data, new_type_option_data, ); - self - .database - .lock() - .fields - .update_field(field_id, |update| { - update - .set_field_type(new_field_type.into()) - .set_type_option(new_field_type.into(), Some(transformed_type_option)); - }); + database.update_field(field_id, |update| { + update + .set_field_type(new_field_type.into()) + .set_type_option(new_field_type.into(), Some(transformed_type_option)); + }); for view in self.database_views.editors().await { view.v_did_update_field_type(field_id, new_field_type).await; @@ -434,15 +459,13 @@ impl DatabaseEditor { }, } - notify_did_update_database_field(&self.database, field_id)?; + notify_did_update_database_field(&database, field_id)?; Ok(()) } pub async fn duplicate_field(&self, view_id: &str, field_id: &str) -> FlowyResult<()> { - let is_primary = self - .database - .lock() - .fields + let mut database = self.database.write().await; + let is_primary = database .get_field(field_id) .map(|field| field.is_primary) .unwrap_or(false); @@ -454,10 +477,10 @@ impl DatabaseEditor { )); } - let value = self - .database - .lock() - .duplicate_field(view_id, field_id, |field| format!("{} (copy)", field.name)); + let value = + database.duplicate_field(view_id, field_id, |field| format!("{} (copy)", field.name)); + drop(database); + if let Some((index, duplicated_field)) = value { let _ = self .notify_did_insert_database_field(duplicated_field.clone(), index) @@ -478,20 +501,16 @@ impl DatabaseEditor { pub async fn duplicate_row(&self, view_id: &str, row_id: &RowId) -> FlowyResult<()> { let (row_detail, index) = { - let database = self.database.lock(); + let mut database = self.database.write().await; let params = database .duplicate_row(row_id) + .await .ok_or_else(|| FlowyError::internal().with_context("error while copying row"))?; - let (index, row_order) = database - .create_row_in_view(view_id, params) - .ok_or_else(|| { - FlowyError::internal().with_context("error while inserting duplicated row") - })?; - + let (index, row_order) = database.create_row_in_view(view_id, params); tracing::trace!("duplicated row: {:?} at {}", row_order, index); - let row_detail = database.get_row_detail(&row_order.id); + let row_detail = database.get_row_detail(&row_order.id).await; (row_detail, index) }; @@ -511,14 +530,14 @@ impl DatabaseEditor { from_row_id: RowId, to_row_id: RowId, ) -> FlowyResult<()> { - let database = self.database.lock(); + let mut database = self.database.write().await; - let row_detail = database.get_row_detail(&from_row_id).ok_or_else(|| { + let row_detail = database.get_row_detail(&from_row_id).await.ok_or_else(|| { let msg = format!("Cannot find row {}", from_row_id); FlowyError::internal().with_context(msg) })?; - database.views.update_database_view(view_id, |view| { + database.update_database_view(view_id, |view| { view.move_row_order(&from_row_id, &to_row_id); }); @@ -546,20 +565,17 @@ impl DatabaseEditor { open_after_create: _, } = view_editor.v_will_create_row(params).await?; - let result = self - .database - .lock() - .create_row_in_view(&view_editor.view_id, collab_params); + let mut database = self.database.write().await; + let (index, order_id) = database.create_row_in_view(&view_editor.view_id, collab_params); + let row_detail = database.get_row_detail(&order_id.id).await; + drop(database); // Explicitly release the lock here - if let Some((index, row_order)) = result { - tracing::trace!("created row: {:?} at {}", row_order, index); - let row_detail = self.database.lock().get_row_detail(&row_order.id); - if let Some(row_detail) = row_detail { - for view in self.database_views.editors().await { - view.v_did_create_row(&row_detail, index).await; - } - return Ok(Some(row_detail)); + if let Some(row_detail) = row_detail { + trace!("created row: {:?} at {}", row_detail, index); + for view in self.database_views.editors().await { + view.v_did_create_row(&row_detail, index).await; } + return Ok(Some(row_detail)); } Ok(None) @@ -579,7 +595,7 @@ impl DatabaseEditor { .and_then(|data| type_option_data_from_pb(data, ¶ms.field_type).ok()) .unwrap_or(default_type_option_data_from_type(params.field_type)); - let (index, field) = self.database.lock().create_field_with_mut( + let (index, field) = self.database.write().await.create_field_with_mut( ¶ms.view_id, name, params.field_type.into(), @@ -601,21 +617,16 @@ impl DatabaseEditor { pub async fn move_field(&self, params: MoveFieldParams) -> FlowyResult<()> { let (field, new_index) = { - let database = self.database.lock(); + let mut database = self.database.write().await; - let field = database - .fields - .get_field(¶ms.from_field_id) - .ok_or_else(|| { - let msg = format!("Field with id: {} not found", ¶ms.from_field_id); - FlowyError::internal().with_context(msg) - })?; + let field = database.get_field(¶ms.from_field_id).ok_or_else(|| { + let msg = format!("Field with id: {} not found", ¶ms.from_field_id); + FlowyError::internal().with_context(msg) + })?; - database - .views - .update_database_view(¶ms.view_id, |view_update| { - view_update.move_field_order(¶ms.from_field_id, ¶ms.to_field_id); - }); + database.update_database_view(¶ms.view_id, |view_update| { + view_update.move_field_order(¶ms.from_field_id, ¶ms.to_field_id); + }); let new_index = database.index_of_field(¶ms.view_id, ¶ms.from_field_id); @@ -648,18 +659,49 @@ impl DatabaseEditor { Ok(view_editor.v_get_rows().await) } - pub fn get_row(&self, view_id: &str, row_id: &RowId) -> Option { - if self.database.lock().views.is_row_exist(view_id, row_id) { - Some(self.database.lock().get_row(row_id)) + pub async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option { + let database = self.database.read().await; + if database.contains_row(view_id, row_id) { + Some(database.get_row(row_id).await) } else { None } } - pub fn get_row_meta(&self, view_id: &str, row_id: &RowId) -> Option { - if self.database.lock().views.is_row_exist(view_id, row_id) { - let row_meta = self.database.lock().get_row_meta(row_id)?; - let row_document_id = self.database.lock().get_row_document_id(row_id)?; + pub async fn init_database_row(&self, row_id: &RowId) -> FlowyResult<()> { + let database_row = self + .database + .read() + .await + .get_row_collab(row_id) + .ok_or_else(|| { + FlowyError::record_not_found() + .with_context(format!("The row:{} in database not found", row_id)) + })?; + + let collab_object = self.collab_builder.collab_object( + &self.user.workspace_id()?, + self.user.user_id()?, + row_id, + CollabType::DatabaseRow, + )?; + + if let Err(err) = + self + .collab_builder + .finalize(collab_object, CollabBuilderConfig::default(), database_row) + { + error!("Failed to init database row: {}", err); + } + + Ok(()) + } + + pub async fn get_row_meta(&self, view_id: &str, row_id: &RowId) -> Option { + let database = self.database.read().await; + if database.contains_row(view_id, row_id) { + let row_meta = database.get_row_meta(row_id).await?; + let row_document_id = database.get_row_document_id(row_id)?; Some(RowMetaPB { id: row_id.clone().into_inner(), document_id: row_document_id, @@ -673,9 +715,10 @@ impl DatabaseEditor { } } - pub fn get_row_detail(&self, view_id: &str, row_id: &RowId) -> Option { - if self.database.lock().views.is_row_exist(view_id, row_id) { - self.database.lock().get_row_detail(row_id) + pub async fn get_row_detail(&self, view_id: &str, row_id: &RowId) -> Option { + let database = self.database.read().await; + if database.contains_row(view_id, row_id) { + database.get_row_detail(row_id).await } else { warn!("the row:{} is exist in view:{}", row_id.as_str(), view_id); None @@ -683,7 +726,7 @@ impl DatabaseEditor { } pub async fn delete_rows(&self, row_ids: &[RowId]) { - let rows = self.database.lock().remove_rows(row_ids); + let rows = self.database.write().await.remove_rows(row_ids).await; for row in rows { tracing::trace!("Did delete row:{:?}", row); @@ -695,15 +738,20 @@ impl DatabaseEditor { #[tracing::instrument(level = "trace", skip_all)] pub async fn update_row_meta(&self, row_id: &RowId, changeset: UpdateRowMetaParams) { - self.database.lock().update_row_meta(row_id, |meta_update| { - meta_update - .insert_cover_if_not_none(changeset.cover_url) - .insert_icon_if_not_none(changeset.icon_url) - .update_is_document_empty_if_not_none(changeset.is_document_empty); - }); + let mut database = self.database.write().await; + database + .update_row_meta(row_id, |meta_update| { + meta_update + .insert_cover_if_not_none(changeset.cover_url) + .insert_icon_if_not_none(changeset.icon_url) + .update_is_document_empty_if_not_none(changeset.is_document_empty); + }) + .await; // Use the temporary row meta to get rid of the lock that not implement the `Send` or 'Sync' trait. - let row_detail = self.database.lock().get_row_detail(row_id); + let row_detail = database.get_row_detail(row_id).await; + drop(database); + if let Some(row_detail) = row_detail { for view in self.database_views.editors().await { view.v_did_update_row_meta(row_id, &row_detail).await; @@ -722,13 +770,13 @@ impl DatabaseEditor { } pub async fn get_cell(&self, field_id: &str, row_id: &RowId) -> Option { - let database = self.database.lock(); - let field = database.fields.get_field(field_id)?; + let database = self.database.read().await; + let field = database.get_field(field_id)?; let field_type = FieldType::from(field.field_type); // If the cell data is referenced, return the reference data. Otherwise, return an empty cell. match field_type { FieldType::LastEditedTime | FieldType::CreatedTime => { - let row = database.get_row(row_id); + let row = database.get_row(row_id).await; let wrapped_cell_data = if field_type.is_created_time() { TimestampCellDataWrapper::from((field_type, TimestampCellData::new(row.created_at))) } else { @@ -736,14 +784,14 @@ impl DatabaseEditor { }; Some(Cell::from(wrapped_cell_data)) }, - _ => database.get_cell(field_id, row_id).cell, + _ => database.get_cell(field_id, row_id).await.cell, } } pub async fn get_cell_pb(&self, field_id: &str, row_id: &RowId) -> Option { let (field, cell) = { let cell = self.get_cell(field_id, row_id).await?; - let field = self.database.lock().fields.get_field(field_id)?; + let field = self.database.read().await.get_field(field_id)?; (field, cell) }; @@ -758,12 +806,13 @@ impl DatabaseEditor { } pub async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec { - let database = self.database.lock(); - if let Some(field) = database.fields.get_field(field_id) { + let database = self.database.read().await; + if let Some(field) = database.get_field(field_id) { let field_type = FieldType::from(field.field_type); match field_type { FieldType::LastEditedTime | FieldType::CreatedTime => database .get_rows_for_view(view_id) + .await .into_iter() .map(|row| { let data = if field_type.is_created_time() { @@ -777,7 +826,7 @@ impl DatabaseEditor { } }) .collect(), - _ => database.get_cells_for_field(view_id, field_id), + _ => database.get_cells_for_field(view_id, field_id).await, } } else { vec![] @@ -793,15 +842,15 @@ impl DatabaseEditor { cell_changeset: BoxAny, ) -> FlowyResult<()> { let (field, cell) = { - let database = self.database.lock(); - let field = match database.fields.get_field(field_id) { + let database = self.database.read().await; + let field = match database.get_field(field_id) { Some(field) => Ok(field), None => { let msg = format!("Field with id:{} not found", &field_id); Err(FlowyError::internal().with_context(msg)) }, }?; - (field, database.get_cell(field_id, row_id).cell) + (field, database.get_cell(field_id, row_id).await.cell) }; let new_cell = @@ -812,10 +861,12 @@ impl DatabaseEditor { async fn update_last_modified_time(&self, row_detail: RowDetail, view_id: &str) { self .database - .lock() - .update_row(&row_detail.row.id, |row_update| { + .write() + .await + .update_row(row_detail.row.id.clone(), |row_update| { row_update.set_last_modified(timestamp()); - }); + }) + .await; let editor = self.database_views.get_view_editor(view_id).await; if let Ok(editor) = editor { @@ -835,12 +886,17 @@ impl DatabaseEditor { new_cell: Cell, ) -> FlowyResult<()> { // Get the old row before updating the cell. It would be better to get the old cell - let old_row = { self.get_row_detail(view_id, row_id) }; - self.database.lock().update_row(row_id, |row_update| { - row_update.update_cells(|cell_update| { - cell_update.insert(field_id, new_cell); - }); - }); + let old_row = self.get_row_detail(view_id, row_id).await; + self + .database + .write() + .await + .update_row(row_id.clone(), |row_update| { + row_update.update_cells(|cell_update| { + cell_update.insert(field_id, new_cell); + }); + }) + .await; self .did_update_row(view_id, row_id, field_id, old_row) @@ -851,13 +907,18 @@ impl DatabaseEditor { pub async fn clear_cell(&self, view_id: &str, row_id: RowId, field_id: &str) -> FlowyResult<()> { // Get the old row before updating the cell. It would be better to get the old cell - let old_row = { self.get_row_detail(view_id, &row_id) }; + let old_row = self.get_row_detail(view_id, &row_id).await; - self.database.lock().update_row(&row_id, |row_update| { - row_update.update_cells(|cell_update| { - cell_update.clear(field_id); - }); - }); + self + .database + .write() + .await + .update_row(row_id.clone(), |row_update| { + row_update.update_cells(|cell_update| { + cell_update.clear(field_id); + }); + }) + .await; self .did_update_row(view_id, &row_id, field_id, old_row) @@ -873,7 +934,7 @@ impl DatabaseEditor { field_id: &str, old_row: Option, ) { - let option_row = self.get_row_detail(view_id, row_id); + let option_row = self.get_row_detail(view_id, row_id).await; if let Some(new_row_detail) = option_row { for view in self.database_views.editors().await { view @@ -883,14 +944,14 @@ impl DatabaseEditor { } } - pub fn get_auto_updated_fields_changesets( + pub async fn get_auto_updated_fields_changesets( &self, view_id: &str, row_id: RowId, ) -> Vec { // Get all auto updated fields. It will be used to notify the frontend // that the fields have been updated. - let auto_updated_fields = self.get_auto_updated_fields(view_id); + let auto_updated_fields = self.get_auto_updated_fields(view_id).await; // Collect all the updated field's id. Notify the frontend that all of them have been updated. let auto_updated_field_ids = auto_updated_fields @@ -913,7 +974,7 @@ impl DatabaseEditor { field_id: &str, option_name: String, ) -> Option { - let field = self.database.lock().fields.get_field(field_id)?; + let field = self.database.read().await.get_field(field_id)?; let type_option = select_type_option_from_field(&field).ok()?; let select_option = type_option.create_option(&option_name); Some(SelectOptionPB::from(select_option)) @@ -928,15 +989,10 @@ impl DatabaseEditor { row_id: RowId, options: Vec, ) -> FlowyResult<()> { - let field = self - .database - .lock() - .fields - .get_field(field_id) - .ok_or_else(|| { - FlowyError::record_not_found() - .with_context(format!("Field with id:{} not found", &field_id)) - })?; + let mut database = self.database.write().await; + let field = database.get_field(field_id).ok_or_else(|| { + FlowyError::record_not_found().with_context(format!("Field with id:{} not found", &field_id)) + })?; debug_assert!(FieldType::from(field.field_type).is_select_option()); let mut type_option = select_type_option_from_field(&field)?; @@ -950,13 +1006,12 @@ impl DatabaseEditor { // Update the field's type option let view_editors = self.database_views.editors().await; - update_field_type_option_fn( - &self.database, - &view_editors, - type_option.to_type_option_data(), - field.clone(), - ) - .await?; + update_field_type_option_fn(&mut database, type_option.to_type_option_data(), &field).await?; + drop(database); + + for view_editor in view_editors { + view_editor.v_did_update_field_type_option(&field).await?; + } // Insert the options into the cell self @@ -972,7 +1027,8 @@ impl DatabaseEditor { row_id: RowId, options: Vec, ) -> FlowyResult<()> { - let field = match self.database.lock().fields.get_field(field_id) { + let mut database = self.database.write().await; + let field = match database.get_field(field_id) { Some(field) => Ok(field), None => { let msg = format!("Field with id:{} not found", &field_id); @@ -990,13 +1046,14 @@ impl DatabaseEditor { } let view_editors = self.database_views.editors().await; - update_field_type_option_fn( - &self.database, - &view_editors, - type_option.to_type_option_data(), - field.clone(), - ) - .await?; + update_field_type_option_fn(&mut database, type_option.to_type_option_data(), &field).await?; + + // Drop the database write lock ASAP + drop(database); + + for view_editor in view_editors { + view_editor.v_did_update_field_type_option(&field).await?; + } self .update_cell_with_changeset(view_id, &row_id, field_id, BoxAny::new(cell_changeset)) @@ -1013,8 +1070,8 @@ impl DatabaseEditor { ) -> FlowyResult<()> { let field = self .database - .lock() - .fields + .read() + .await .get_field(field_id) .ok_or_else(|| { FlowyError::record_not_found() @@ -1068,7 +1125,7 @@ impl DatabaseEditor { from_row: RowId, to_row: Option, ) -> FlowyResult<()> { - let row_detail = self.get_row_detail(view_id, &from_row); + let row_detail = self.get_row_detail(view_id, &from_row).await; match row_detail { None => { warn!( @@ -1100,9 +1157,14 @@ impl DatabaseEditor { } tracing::trace!("Row data changed: {:?}", row_changeset); - self.database.lock().update_row(&row_detail.row.id, |row| { - row.set_cells(Cells::from(row_changeset.cell_by_field_id.clone())); - }); + self + .database + .write() + .await + .update_row(row_detail.row.id, |row| { + row.set_cells(Cells::from(row_changeset.cell_by_field_id.clone())); + }) + .await; }, } @@ -1170,7 +1232,7 @@ impl DatabaseEditor { #[tracing::instrument(level = "trace", skip_all, err)] async fn notify_did_insert_database_field(&self, field: Field, index: usize) -> FlowyResult<()> { - let database_id = self.database.lock().get_database_id(); + let database_id = self.database.read().await.get_database_id(); let index_field = IndexFieldPB { field: FieldPB::new(field), index: index as i32, @@ -1184,7 +1246,7 @@ impl DatabaseEditor { &self, changeset: DatabaseFieldChangesetPB, ) -> FlowyResult<()> { - let views = self.database.lock().get_all_database_views_meta(); + let views = self.database.read().await.get_all_database_views_meta(); for view in views { send_notification(&view.id, DatabaseNotification::DidUpdateFields) .payload(changeset.clone()) @@ -1198,10 +1260,12 @@ impl DatabaseEditor { &self, view_id: &str, ) -> FlowyResult { - let view = - self.database.lock().get_view(view_id).ok_or_else(|| { - FlowyError::record_not_found().with_context("Can't find the database view") - })?; + let view = self + .database + .read() + .await + .get_view(view_id) + .ok_or_else(|| FlowyError::record_not_found().with_context("Can't find the database view"))?; Ok(database_view_setting_pb_from_view(view)) } @@ -1213,10 +1277,9 @@ impl DatabaseEditor { .ok_or_else(FlowyError::record_not_found)?; let rows = database_view.v_get_rows().await; let (database_id, fields, is_linked) = { - let database = self.database.lock(); + let database = self.database.read().await; let database_id = database.get_database_id(); let fields = database - .fields .get_all_field_orders() .into_iter() .map(FieldIdPB::from) @@ -1240,13 +1303,11 @@ impl DatabaseEditor { pub async fn export_csv(&self, style: CSVFormat) -> FlowyResult { let database = self.database.clone(); - let csv = tokio::task::spawn_blocking(move || { - let database_guard = database.lock(); - let csv = CSVExport.export_database(&database_guard, style)?; - Ok::(csv) - }) - .await - .map_err(internal_error)??; + let database_guard = database.read().await; + let csv = CSVExport + .export_database(&database_guard, style) + .await + .map_err(internal_error)?; Ok(csv) } @@ -1269,6 +1330,7 @@ impl DatabaseEditor { pub async fn get_all_field_settings(&self, view_id: &str) -> FlowyResult> { let field_ids = self .get_fields(view_id, None) + .await .iter() .map(|field| field.id.clone()) .collect(); @@ -1289,7 +1351,8 @@ impl DatabaseEditor { pub async fn get_related_database_id(&self, field_id: &str) -> FlowyResult { let mut field = self .database - .lock() + .read() + .await .get_fields(Some(vec![field_id.to_string()])); let field = field.pop().ok_or(FlowyError::internal())?; @@ -1304,42 +1367,43 @@ impl DatabaseEditor { &self, row_ids: Option<&Vec>, ) -> FlowyResult> { - let primary_field = self.database.lock().fields.get_primary_field().unwrap(); + let database = self.database.read().await; + let primary_field = database.get_primary_field().unwrap(); let handler = TypeOptionCellExt::new(&primary_field, Some(self.cell_cache.clone())) .get_type_option_cell_data_handler_with_field_type(FieldType::RichText) .ok_or(FlowyError::internal())?; let row_data = { - let database = self.database.lock(); - let mut rows = database.get_database_rows(); + let mut rows = database.get_database_rows().await; if let Some(row_ids) = row_ids { rows.retain(|row| row_ids.contains(&row.id)); } - rows - .iter() - .map(|row| { - let title = database - .get_cell(&primary_field.id, &row.id) - .cell - .and_then(|cell| handler.handle_get_boxed_cell_data(&cell, &primary_field)) - .and_then(|cell_data| cell_data.unbox_or_none()) - .unwrap_or_else(|| StringCellData("".to_string())); + let mut row_data = vec![]; + for row in rows { + let title = database + .get_cell(&primary_field.id, &row.id) + .await + .cell + .and_then(|cell| handler.handle_get_boxed_cell_data(&cell, &primary_field)) + .and_then(|cell_data| cell_data.unbox_or_none()) + .unwrap_or_else(|| StringCellData("".to_string())); - RelatedRowDataPB { - row_id: row.id.to_string(), - name: title.0, - } + row_data.push(RelatedRowDataPB { + row_id: row.id.to_string(), + name: title.0, }) - .collect::>() + } + row_data }; Ok(row_data) } - fn get_auto_updated_fields(&self, view_id: &str) -> Vec { + async fn get_auto_updated_fields(&self, view_id: &str) -> Vec { self .database - .lock() + .read() + .await .get_fields_in_view(view_id, None) .into_iter() .filter(|f| FieldType::from(f.field_type).is_auto_update()) @@ -1348,45 +1412,48 @@ impl DatabaseEditor { /// Only expose this method for testing #[cfg(debug_assertions)] - pub fn get_mutex_database(&self) -> &MutexDatabase { + pub fn get_mutex_database(&self) -> &RwLock { &self.database } } struct DatabaseViewOperationImpl { - database: Arc, + database: Arc>, task_scheduler: Arc>, cell_cache: CellCache, editor_by_view_id: Arc>, } +#[async_trait] impl DatabaseViewOperation for DatabaseViewOperationImpl { - fn get_database(&self) -> Arc { + fn get_database(&self) -> Arc> { self.database.clone() } - fn get_view(&self, view_id: &str) -> Fut> { - let view = self.database.lock().get_view(view_id); - to_fut(async move { view }) + async fn get_view(&self, view_id: &str) -> Option { + self.database.read().await.get_view(view_id) } - fn get_fields(&self, view_id: &str, field_ids: Option>) -> Fut> { - let fields = self.database.lock().get_fields_in_view(view_id, field_ids); - to_fut(async move { fields }) + async fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec { + self + .database + .read() + .await + .get_fields_in_view(view_id, field_ids) } - fn get_field(&self, field_id: &str) -> Option { - self.database.lock().fields.get_field(field_id) + async fn get_field(&self, field_id: &str) -> Option { + self.database.read().await.get_field(field_id) } - fn create_field( + async fn create_field( &self, view_id: &str, name: &str, field_type: FieldType, type_option_data: TypeOptionData, - ) -> Fut { - let (_, field) = self.database.lock().create_field_with_mut( + ) -> Field { + let (_, field) = self.database.write().await.create_field_with_mut( view_id, name.to_string(), field_type.into(), @@ -1398,199 +1465,219 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl { }, default_field_settings_by_layout_map(), ); - to_fut(async move { field }) + field } - fn update_field( + async fn update_field( &self, type_option_data: TypeOptionData, old_field: Field, - ) -> FutureResult<(), FlowyError> { - let weak_editor_by_view_id = Arc::downgrade(&self.editor_by_view_id); - let weak_database = Arc::downgrade(&self.database); - FutureResult::new(async move { - if let (Some(database), Some(editor_by_view_id)) = - (weak_database.upgrade(), weak_editor_by_view_id.upgrade()) - { - let view_editors = editor_by_view_id.read().await.values().cloned().collect(); - let _ = - update_field_type_option_fn(&database, &view_editors, type_option_data, old_field).await; - } - Ok(()) - }) - } - - fn get_primary_field(&self) -> Fut>> { - let field = self - .database - .lock() - .fields - .get_primary_field() - .map(Arc::new); - to_fut(async move { field }) - } - - fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Fut> { - let index = self.database.lock().index_of_row(view_id, row_id); - to_fut(async move { index }) - } - - fn get_row(&self, view_id: &str, row_id: &RowId) -> Fut)>> { - let index = self.database.lock().index_of_row(view_id, row_id); - let row_detail = self.database.lock().get_row_detail(row_id); - to_fut(async move { - match (index, row_detail) { - (Some(index), Some(row_detail)) => Some((index, Arc::new(row_detail))), - _ => None, - } - }) - } - - fn get_rows(&self, view_id: &str) -> Fut>> { - let database = self.database.clone(); - let view_id = view_id.to_string(); - to_fut(async move { - let cloned_database = database.clone(); - // offloads the blocking operation to a thread where blocking is acceptable. This prevents - // blocking the main asynchronous runtime - let row_orders = tokio::task::spawn_blocking(move || { - cloned_database.lock().get_row_orders_for_view(&view_id) - }) + ) -> Result<(), FlowyError> { + let view_editors = self + .editor_by_view_id + .read() .await - .unwrap_or_default(); - tokio::task::yield_now().await; + .values() + .cloned() + .collect::>(); - let mut all_rows = vec![]; + // + { + let mut database = self.database.write().await; + let _ = update_field_type_option_fn(&mut database, type_option_data, &old_field).await; + drop(database); + } - // Loading the rows in chunks of 10 rows in order to prevent blocking the main asynchronous runtime - for chunk in row_orders.chunks(10) { - let cloned_database = database.clone(); - let chunk = chunk.to_vec(); - let rows = tokio::task::spawn_blocking(move || { - let orders = cloned_database.lock().get_rows_from_row_orders(&chunk); - let lock_guard = cloned_database.lock(); - orders - .into_iter() - .flat_map(|row| lock_guard.get_row_detail(&row.id)) - .collect::>() - }) - .await - .unwrap_or_default(); + for view_editor in view_editors { + view_editor + .v_did_update_field_type_option(&old_field) + .await?; + } + Ok(()) + } - all_rows.extend(rows); - tokio::task::yield_now().await; + async fn get_primary_field(&self) -> Option> { + self.database.read().await.get_primary_field().map(Arc::new) + } + + async fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Option { + self.database.read().await.index_of_row(view_id, row_id) + } + + async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option<(usize, Arc)> { + let database = self.database.read().await; + let index = database.index_of_row(view_id, row_id); + let row_detail = database.get_row_detail(row_id).await; + match (index, row_detail) { + (Some(index), Some(row_detail)) => Some((index, Arc::new(row_detail))), + _ => None, + } + } + + async fn get_rows(&self, view_id: &str) -> Vec> { + let view_id = view_id.to_string(); + let row_orders = self.database.read().await.get_row_orders_for_view(&view_id); + trace!("total row orders: {}", row_orders.len()); + + let mut row_details_list = vec![]; + // Loading the rows in chunks of 10 rows in order to prevent blocking the main asynchronous runtime + const CHUNK_SIZE: usize = 10; + for chunk in row_orders.chunks(CHUNK_SIZE) { + let database_read_guard = self.database.read().await; + let chunk = chunk.to_vec(); + let rows = database_read_guard.get_rows_from_row_orders(&chunk).await; + for row in rows { + match database_read_guard.get_row_detail(&row.id).await { + None => warn!("Failed to get row detail for row: {}", row.id.as_str()), + Some(row_details) => { + row_details_list.push(row_details); + }, + } } - - all_rows.into_iter().map(Arc::new).collect() - }) + drop(database_read_guard); + tokio::task::yield_now().await; + } + trace!("total row details: {}", row_details_list.len()); + row_details_list.into_iter().map(Arc::new).collect() } - fn remove_row(&self, row_id: &RowId) -> Option { - self.database.lock().remove_row(row_id) + async fn remove_row(&self, row_id: &RowId) -> Option { + self.database.write().await.remove_row(row_id).await } - fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut>> { - let cells = self.database.lock().get_cells_for_field(view_id, field_id); - to_fut(async move { cells.into_iter().map(Arc::new).collect() }) + async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec> { + let cells = self + .database + .read() + .await + .get_cells_for_field(view_id, field_id) + .await; + cells.into_iter().map(Arc::new).collect() } - fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Fut> { - let cell = self.database.lock().get_cell(field_id, row_id); - to_fut(async move { Arc::new(cell) }) + async fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Arc { + let cell = self.database.read().await.get_cell(field_id, row_id).await; + cell.into() } - fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout { - self.database.lock().views.get_database_view_layout(view_id) + async fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout { + self.database.read().await.get_database_view_layout(view_id) } - fn get_group_setting(&self, view_id: &str) -> Vec { - self.database.lock().get_all_group_setting(view_id) + async fn get_group_setting(&self, view_id: &str) -> Vec { + self.database.read().await.get_all_group_setting(view_id) } - fn insert_group_setting(&self, view_id: &str, setting: GroupSetting) { - self.database.lock().insert_group_setting(view_id, setting); - } - - fn get_sort(&self, view_id: &str, sort_id: &str) -> Option { - self.database.lock().get_sort::(view_id, sort_id) - } - - fn insert_sort(&self, view_id: &str, sort: Sort) { - self.database.lock().insert_sort(view_id, sort); - } - - fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str) { + async fn insert_group_setting(&self, view_id: &str, setting: GroupSetting) { self .database - .lock() + .write() + .await + .insert_group_setting(view_id, setting); + } + + async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option { + self + .database + .read() + .await + .get_sort::(view_id, sort_id) + } + + async fn insert_sort(&self, view_id: &str, sort: Sort) { + self.database.write().await.insert_sort(view_id, sort); + } + + async fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str) { + self + .database + .write() + .await .move_sort(view_id, from_sort_id, to_sort_id); } - fn remove_sort(&self, view_id: &str, sort_id: &str) { - self.database.lock().remove_sort(view_id, sort_id); + async fn remove_sort(&self, view_id: &str, sort_id: &str) { + self.database.write().await.remove_sort(view_id, sort_id); } - fn get_all_sorts(&self, view_id: &str) -> Vec { - self.database.lock().get_all_sorts::(view_id) + async fn get_all_sorts(&self, view_id: &str) -> Vec { + self.database.read().await.get_all_sorts::(view_id) } - fn remove_all_sorts(&self, view_id: &str) { - self.database.lock().remove_all_sorts(view_id); + async fn remove_all_sorts(&self, view_id: &str) { + self.database.write().await.remove_all_sorts(view_id); } - fn get_all_calculations(&self, view_id: &str) -> Vec> { + async fn get_all_calculations(&self, view_id: &str) -> Vec> { self .database - .lock() + .read() + .await .get_all_calculations(view_id) .into_iter() .map(Arc::new) .collect() } - fn get_calculation(&self, view_id: &str, field_id: &str) -> Option { + async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option { self .database - .lock() + .read() + .await .get_calculation::(view_id, field_id) } - fn get_all_filters(&self, view_id: &str) -> Vec { + async fn get_all_filters(&self, view_id: &str) -> Vec { self .database - .lock() + .read() + .await .get_all_filters(view_id) .into_iter() .collect() } - fn delete_filter(&self, view_id: &str, filter_id: &str) { - self.database.lock().remove_filter(view_id, filter_id); - } - - fn insert_filter(&self, view_id: &str, filter: Filter) { - self.database.lock().insert_filter(view_id, &filter); - } - - fn save_filters(&self, view_id: &str, filters: &[Filter]) { + async fn delete_filter(&self, view_id: &str, filter_id: &str) { self .database - .lock() + .write() + .await + .remove_filter(view_id, filter_id); + } + + async fn insert_filter(&self, view_id: &str, filter: Filter) { + self.database.write().await.insert_filter(view_id, &filter); + } + + async fn save_filters(&self, view_id: &str, filters: &[Filter]) { + self + .database + .write() + .await .save_filters::(view_id, filters); } - fn get_filter(&self, view_id: &str, filter_id: &str) -> Option { + async fn get_filter(&self, view_id: &str, filter_id: &str) -> Option { self .database - .lock() + .read() + .await .get_filter::(view_id, filter_id) } - fn get_layout_setting(&self, view_id: &str, layout_ty: &DatabaseLayout) -> Option { - self.database.lock().get_layout_setting(view_id, layout_ty) + async fn get_layout_setting( + &self, + view_id: &str, + layout_ty: &DatabaseLayout, + ) -> Option { + self + .database + .read() + .await + .get_layout_setting(view_id, layout_ty) } - fn insert_layout_setting( + async fn insert_layout_setting( &self, view_id: &str, layout_ty: &DatabaseLayout, @@ -1598,14 +1685,16 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl { ) { self .database - .lock() + .write() + .await .insert_layout_setting(view_id, layout_ty, layout_setting); } - fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout) { + async fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout) { self .database - .lock() + .write() + .await .update_layout_type(view_id, layout_type); } @@ -1620,14 +1709,14 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl { TypeOptionCellExt::new(field, Some(self.cell_cache.clone())).get_type_option_cell_data_handler() } - fn get_field_settings( + async fn get_field_settings( &self, view_id: &str, field_ids: &[String], ) -> HashMap { let (layout_type, field_settings_map) = { - let database = self.database.lock(); - let layout_type = database.views.get_database_view_layout(view_id); + let database = self.database.read().await; + let layout_type = database.get_database_view_layout(view_id); let field_settings_map = database.get_field_settings(view_id, Some(field_ids)); (layout_type, field_settings_map) }; @@ -1658,19 +1747,20 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl { field_settings } - fn update_field_settings(&self, params: FieldSettingsChangesetPB) { - let field_settings_map = self.get_field_settings(¶ms.view_id, &[params.field_id.clone()]); + async fn update_field_settings(&self, params: FieldSettingsChangesetPB) { + let field_settings_map = self + .get_field_settings(¶ms.view_id, &[params.field_id.clone()]) + .await; - let field_settings = field_settings_map - .get(¶ms.field_id) - .cloned() - .unwrap_or_else(|| { - let layout_type = self.get_layout_for_view(¶ms.view_id); + let field_settings = match field_settings_map.get(¶ms.field_id).cloned() { + Some(field_settings) => field_settings, + None => { + let layout_type = self.get_layout_for_view(¶ms.view_id).await; let default_field_settings = default_field_settings_by_layout_map(); let default_field_settings = default_field_settings.get(&layout_type).unwrap(); - FieldSettings::from_any_map(¶ms.field_id, layout_type, default_field_settings) - }); + }, + }; let new_field_settings = FieldSettings { visibility: params @@ -1683,7 +1773,7 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl { ..field_settings }; - self.database.lock().update_field_settings( + self.database.write().await.update_field_settings( ¶ms.view_id, Some(vec![params.field_id]), new_field_settings.clone(), @@ -1697,70 +1787,59 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl { .send() } - fn update_calculation(&self, view_id: &str, calculation: Calculation) { + async fn update_calculation(&self, view_id: &str, calculation: Calculation) { self .database - .lock() + .write() + .await .update_calculation(view_id, calculation) } - fn remove_calculation(&self, view_id: &str, field_id: &str) { - self.database.lock().remove_calculation(view_id, field_id) + async fn remove_calculation(&self, view_id: &str, field_id: &str) { + self + .database + .write() + .await + .remove_calculation(view_id, field_id) } } #[tracing::instrument(level = "trace", skip_all, err)] pub async fn update_field_type_option_fn( - database: &Arc, - view_editors: &Vec>, + database: &mut Database, type_option_data: TypeOptionData, - old_field: Field, + old_field: &Field, ) -> FlowyResult<()> { if type_option_data.is_empty() { warn!("Update type option with empty data"); return Ok(()); } let field_type = FieldType::from(old_field.field_type); - database - .lock() - .fields - .update_field(&old_field.id, |update| { - if old_field.is_primary { - warn!("Cannot update primary field type"); - } else { - update.update_type_options(|type_options_update| { - event!( - tracing::Level::TRACE, - "insert type option to field type: {:?}, {:?}", - field_type, - type_option_data - ); - type_options_update.insert(&field_type.to_string(), type_option_data); - }); - } - }); + database.update_field(&old_field.id, |update| { + if old_field.is_primary { + warn!("Cannot update primary field type"); + } else { + update.update_type_options(|type_options_update| { + event!( + tracing::Level::TRACE, + "insert type option to field type: {:?}, {:?}", + field_type, + type_option_data + ); + type_options_update.insert(&field_type.to_string(), type_option_data); + }); + } + }); let _ = notify_did_update_database_field(database, &old_field.id); - for view_editor in view_editors { - view_editor - .v_did_update_field_type_option(&old_field) - .await?; - } - Ok(()) } #[tracing::instrument(level = "trace", skip_all, err)] -fn notify_did_update_database_field( - database: &Arc, - field_id: &str, -) -> FlowyResult<()> { +fn notify_did_update_database_field(database: &Database, field_id: &str) -> FlowyResult<()> { let (database_id, field, views) = { - let database = database - .try_lock() - .ok_or(FlowyError::internal().with_context("fail to acquire the lock of database"))?; let database_id = database.get_database_id(); - let field = database.fields.get_field(field_id); + let field = database.get_field(field_id); let views = database.get_all_database_views_meta(); (database_id, field, views) }; diff --git a/frontend/rust-lib/flowy-database2/src/services/database/database_observe.rs b/frontend/rust-lib/flowy-database2/src/services/database/database_observe.rs index 682001948d..b25d365ab0 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database/database_observe.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database/database_observe.rs @@ -2,7 +2,7 @@ use crate::entities::{DatabaseSyncStatePB, DidFetchRowPB, RowsChangePB}; use crate::notification::{send_notification, DatabaseNotification, DATABASE_OBSERVABLE_SOURCE}; use crate::services::database::UpdatedRow; use collab_database::blocks::BlockEvent; -use collab_database::database::MutexDatabase; +use collab_database::database::Database; use collab_database::fields::FieldChange; use collab_database::rows::{RowChange, RowId}; use collab_database::views::DatabaseViewChange; @@ -10,11 +10,12 @@ use flowy_notification::{DebounceNotificationSender, NotificationBuilder}; use futures::StreamExt; use lib_dispatch::prelude::af_spawn; use std::sync::Arc; +use tokio::sync::RwLock; use tracing::{trace, warn}; -pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc) { +pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc>) { let weak_database = Arc::downgrade(database); - let mut sync_state = database.lock().subscribe_sync_state(); + let mut sync_state = database.read().await.subscribe_sync_state(); let database_id = database_id.to_string(); af_spawn(async move { while let Some(sync_state) = sync_state.next().await { @@ -35,13 +36,13 @@ pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc, + database: &Arc>, notification_sender: &Arc, ) { let notification_sender = notification_sender.clone(); let database_id = database_id.to_string(); let weak_database = Arc::downgrade(database); - let mut row_change = database.lock().subscribe_row_change(); + let mut row_change = database.read().await.subscribe_row_change(); af_spawn(async move { while let Ok(row_change) = row_change.recv().await { if let Some(database) = weak_database.upgrade() { @@ -59,7 +60,7 @@ pub(crate) async fn observe_rows_change( let cell_id = format!("{}:{}", row_id, field_id); notify_cell(¬ification_sender, &cell_id); - let views = database.lock().get_all_database_views_meta(); + let views = database.read().await.get_all_database_views_meta(); for view in views { notify_row(¬ification_sender, &view.id, &field_id, &row_id); } @@ -75,10 +76,10 @@ pub(crate) async fn observe_rows_change( }); } #[allow(dead_code)] -pub(crate) async fn observe_field_change(database_id: &str, database: &Arc) { +pub(crate) async fn observe_field_change(database_id: &str, database: &Arc>) { let database_id = database_id.to_string(); let weak_database = Arc::downgrade(database); - let mut field_change = database.lock().subscribe_field_change(); + let mut field_change = database.read().await.subscribe_field_change(); af_spawn(async move { while let Ok(field_change) = field_change.recv().await { if weak_database.upgrade().is_none() { @@ -100,10 +101,10 @@ pub(crate) async fn observe_field_change(database_id: &str, database: &Arc) { +pub(crate) async fn observe_view_change(database_id: &str, database: &Arc>) { let database_id = database_id.to_string(); let weak_database = Arc::downgrade(database); - let mut view_change = database.lock().subscribe_view_change(); + let mut view_change = database.read().await.subscribe_view_change(); af_spawn(async move { while let Ok(view_change) = view_change.recv().await { if weak_database.upgrade().is_none() { @@ -136,10 +137,10 @@ pub(crate) async fn observe_view_change(database_id: &str, database: &Arc) { +pub(crate) async fn observe_block_event(database_id: &str, database: &Arc>) { let database_id = database_id.to_string(); let weak_database = Arc::downgrade(database); - let mut block_event_rx = database.lock().subscribe_block_event(); + let mut block_event_rx = database.read().await.subscribe_block_event(); af_spawn(async move { while let Ok(event) = block_event_rx.recv().await { if weak_database.upgrade().is_none() { diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/layout_deps.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/layout_deps.rs index 33a4dd8a4e..d337c5002e 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/layout_deps.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/layout_deps.rs @@ -1,9 +1,10 @@ -use collab_database::database::{gen_field_id, MutexDatabase}; +use collab_database::database::{gen_field_id, Database}; use collab_database::fields::Field; use collab_database::views::{ DatabaseLayout, FieldSettingsByFieldIdMap, LayoutSetting, OrderObjectPosition, }; use std::sync::Arc; +use tokio::sync::RwLock; use crate::entities::FieldType; use crate::services::field::{DateTypeOption, SingleSelectTypeOption}; @@ -15,20 +16,20 @@ use crate::services::setting::{BoardLayoutSetting, CalendarLayoutSetting}; /// view depends on a field that can be used to group rows while a calendar view /// depends on a date field. pub struct DatabaseLayoutDepsResolver { - pub database: Arc, + pub database: Arc>, /// The new database layout. pub database_layout: DatabaseLayout, } impl DatabaseLayoutDepsResolver { - pub fn new(database: Arc, database_layout: DatabaseLayout) -> Self { + pub fn new(database: Arc>, database_layout: DatabaseLayout) -> Self { Self { database, database_layout, } } - pub fn resolve_deps_when_create_database_linked_view( + pub async fn resolve_deps_when_create_database_linked_view( &self, view_id: &str, ) -> ( @@ -41,9 +42,8 @@ impl DatabaseLayoutDepsResolver { DatabaseLayout::Board => { let layout_settings = BoardLayoutSetting::new().into(); - let field = if !self - .database - .lock() + let database = self.database.read().await; + let field = if !database .get_fields(None) .into_iter() .any(|field| FieldType::from(field.field_type).can_be_group()) @@ -53,7 +53,7 @@ impl DatabaseLayoutDepsResolver { None }; - let field_settings_map = self.database.lock().get_field_settings(view_id, None); + let field_settings_map = database.get_field_settings(view_id, None); tracing::info!( "resolve_deps_when_create_database_linked_view {:?}", field_settings_map @@ -68,7 +68,8 @@ impl DatabaseLayoutDepsResolver { DatabaseLayout::Calendar => { match self .database - .lock() + .read() + .await .get_fields(None) .into_iter() .find(|field| FieldType::from(field.field_type) == FieldType::DateTime) @@ -89,13 +90,20 @@ impl DatabaseLayoutDepsResolver { /// If the new layout type is a calendar and there is not date field in the database, it will add /// a new date field to the database and create the corresponding layout setting. - pub fn resolve_deps_when_update_layout_type(&self, view_id: &str) { - let fields = self.database.lock().get_fields(None); + pub async fn resolve_deps_when_update_layout_type(&self, view_id: &str) { + let mut database = self.database.write().await; + let fields = database.get_fields(None); // Insert the layout setting if it's not exist match &self.database_layout { DatabaseLayout::Grid => {}, DatabaseLayout::Board => { - self.create_board_layout_setting_if_need(view_id); + if database + .get_layout_setting::(view_id, &self.database_layout) + .is_none() + { + let layout_setting = BoardLayoutSetting::new(); + database.insert_layout_setting(view_id, &self.database_layout, layout_setting); + } }, DatabaseLayout::Calendar => { let date_field_id = match fields @@ -106,7 +114,7 @@ impl DatabaseLayoutDepsResolver { tracing::trace!("Create a new date field after layout type change"); let field = self.create_date_field(); let field_id = field.id.clone(); - self.database.lock().create_field( + database.create_field( None, field, &OrderObjectPosition::End, @@ -116,41 +124,17 @@ impl DatabaseLayoutDepsResolver { }, Some(date_field) => date_field.id, }; - self.create_calendar_layout_setting_if_need(view_id, &date_field_id); + if database + .get_layout_setting::(view_id, &self.database_layout) + .is_none() + { + let layout_setting = CalendarLayoutSetting::new(date_field_id); + database.insert_layout_setting(view_id, &self.database_layout, layout_setting); + } }, } } - fn create_board_layout_setting_if_need(&self, view_id: &str) { - if self - .database - .lock() - .get_layout_setting::(view_id, &self.database_layout) - .is_none() - { - let layout_setting = BoardLayoutSetting::new(); - self - .database - .lock() - .insert_layout_setting(view_id, &self.database_layout, layout_setting); - } - } - - fn create_calendar_layout_setting_if_need(&self, view_id: &str, field_id: &str) { - if self - .database - .lock() - .get_layout_setting::(view_id, &self.database_layout) - .is_none() - { - let layout_setting = CalendarLayoutSetting::new(field_id.to_string()); - self - .database - .lock() - .insert_layout_setting(view_id, &self.database_layout, layout_setting); - } - } - fn create_date_field(&self) -> Field { let field_type = FieldType::DateTime; let default_date_type_option = DateTypeOption::default(); diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/view_calculations.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/view_calculations.rs index 32ddecc667..e6f5da1134 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/view_calculations.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/view_calculations.rs @@ -1,8 +1,8 @@ +use async_trait::async_trait; use collab_database::fields::Field; use std::sync::Arc; use collab_database::rows::RowCell; -use lib_infra::future::{to_fut, Fut}; use crate::services::calculations::{ Calculation, CalculationsController, CalculationsDelegate, CalculationsTaskHandler, @@ -17,7 +17,7 @@ pub async fn make_calculations_controller( delegate: Arc, notifier: DatabaseViewChangedNotifier, ) -> Arc { - let calculations = delegate.get_all_calculations(view_id); + let calculations = delegate.get_all_calculations(view_id).await; let task_scheduler = delegate.get_task_scheduler(); let calculations_delegate = DatabaseViewCalculationsDelegateImpl(delegate.clone()); let handler_id = gen_handler_id(); @@ -29,8 +29,7 @@ pub async fn make_calculations_controller( calculations, task_scheduler.clone(), notifier, - ) - .await; + ); let calculations_controller = Arc::new(calculations_controller); task_scheduler @@ -45,30 +44,33 @@ pub async fn make_calculations_controller( struct DatabaseViewCalculationsDelegateImpl(Arc); +#[async_trait] impl CalculationsDelegate for DatabaseViewCalculationsDelegateImpl { - fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut>> { - self.0.get_cells_for_field(view_id, field_id) + async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec> { + self.0.get_cells_for_field(view_id, field_id).await } - fn get_field(&self, field_id: &str) -> Option { - self.0.get_field(field_id) + async fn get_field(&self, field_id: &str) -> Option { + self.0.get_field(field_id).await } - fn get_calculation(&self, view_id: &str, field_id: &str) -> Fut>> { - let calculation = self.0.get_calculation(view_id, field_id).map(Arc::new); - to_fut(async move { calculation }) + async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option> { + self + .0 + .get_calculation(view_id, field_id) + .await + .map(Arc::new) } - fn update_calculation(&self, view_id: &str, calculation: Calculation) { - self.0.update_calculation(view_id, calculation) + async fn update_calculation(&self, view_id: &str, calculation: Calculation) { + self.0.update_calculation(view_id, calculation).await } - fn remove_calculation(&self, view_id: &str, calculation_id: &str) { - self.0.remove_calculation(view_id, calculation_id) + async fn remove_calculation(&self, view_id: &str, calculation_id: &str) { + self.0.remove_calculation(view_id, calculation_id).await } - fn get_all_calculations(&self, view_id: &str) -> Fut>>> { - let calculations = Arc::new(self.0.get_all_calculations(view_id)); - to_fut(async move { calculations }) + async fn get_all_calculations(&self, view_id: &str) -> Arc>> { + self.0.get_all_calculations(view_id).await.into() } } diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/view_editor.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/view_editor.rs index 1d5d8cf1b4..aafc78e10e 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/view_editor.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/view_editor.rs @@ -156,6 +156,7 @@ impl DatabaseViewEditor { let field = self .delegate .get_field(controller.get_grouping_field_id()) + .await .ok_or_else(|| FlowyError::internal().with_context("Failed to get grouping field"))?; controller.will_create_row(&mut cells, &field, &group_id); } @@ -249,7 +250,10 @@ impl DatabaseViewEditor { field_id: Option, ) { if let Some(controller) = self.group_controller.write().await.as_mut() { - let field = self.delegate.get_field(controller.get_grouping_field_id()); + let field = self + .delegate + .get_field(controller.get_grouping_field_id()) + .await; if let Some(field) = field { let mut row_details = vec![Arc::new(row_detail.clone())]; @@ -413,8 +417,11 @@ impl DatabaseViewEditor { pub async fn v_create_group(&self, name: &str) -> FlowyResult<()> { let mut old_field: Option = None; let result = if let Some(controller) = self.group_controller.write().await.as_mut() { - let create_group_results = controller.create_group(name.to_string())?; - old_field = self.delegate.get_field(controller.get_grouping_field_id()); + let create_group_results = controller.create_group(name.to_string()).await?; + old_field = self + .delegate + .get_field(controller.get_grouping_field_id()) + .await; create_group_results } else { (None, None) @@ -447,20 +454,22 @@ impl DatabaseViewEditor { None => return Ok(RowsChangePB::default()), }; - let old_field = self.delegate.get_field(controller.get_grouping_field_id()); - let (row_ids, type_option_data) = controller.delete_group(group_id)?; + let old_field = self + .delegate + .get_field(controller.get_grouping_field_id()) + .await; + let (row_ids, type_option_data) = controller.delete_group(group_id).await?; drop(group_controller); let mut changes = RowsChangePB::default(); if let Some(field) = old_field { - let deleted_rows = row_ids - .iter() - .filter_map(|row_id| self.delegate.remove_row(row_id)) - .map(|row| row.id.into_inner()); - - changes.deleted_rows.extend(deleted_rows); + for row_id in row_ids { + if let Some(row) = self.delegate.remove_row(&row_id).await { + changes.deleted_rows.push(row.id.into_inner()); + } + } if let Some(type_option) = type_option_data { self.delegate.update_field(type_option, field).await?; @@ -478,19 +487,23 @@ impl DatabaseViewEditor { pub async fn v_update_group(&self, changeset: Vec) -> FlowyResult<()> { let mut type_option_data = None; - let (old_field, updated_groups) = - if let Some(controller) = self.group_controller.write().await.as_mut() { - let old_field = self.delegate.get_field(controller.get_grouping_field_id()); - let (updated_groups, new_type_option) = controller.apply_group_changeset(&changeset)?; + let (old_field, updated_groups) = if let Some(controller) = + self.group_controller.write().await.as_mut() + { + let old_field = self + .delegate + .get_field(controller.get_grouping_field_id()) + .await; + let (updated_groups, new_type_option) = controller.apply_group_changeset(&changeset).await?; - if new_type_option.is_some() { - type_option_data = new_type_option; - } + if new_type_option.is_some() { + type_option_data = new_type_option; + } - (old_field, updated_groups) - } else { - (None, vec![]) - }; + (old_field, updated_groups) + } else { + (None, vec![]) + }; if let Some(old_field) = old_field { if let Some(type_option_data) = type_option_data { @@ -511,7 +524,7 @@ impl DatabaseViewEditor { } pub async fn v_get_all_sorts(&self) -> Vec { - self.delegate.get_all_sorts(&self.view_id) + self.delegate.get_all_sorts(&self.view_id).await } #[tracing::instrument(level = "trace", skip(self), err)] @@ -528,7 +541,7 @@ impl DatabaseViewEditor { condition: params.condition.into(), }; - self.delegate.insert_sort(&self.view_id, sort.clone()); + self.delegate.insert_sort(&self.view_id, sort.clone()).await; let mut sort_controller = self.sort_controller.write().await; @@ -549,7 +562,8 @@ impl DatabaseViewEditor { pub async fn v_reorder_sort(&self, params: ReorderSortPayloadPB) -> FlowyResult<()> { self .delegate - .move_sort(&self.view_id, ¶ms.from_sort_id, ¶ms.to_sort_id); + .move_sort(&self.view_id, ¶ms.from_sort_id, ¶ms.to_sort_id) + .await; let notification = self .sort_controller @@ -573,7 +587,10 @@ impl DatabaseViewEditor { .apply_changeset(SortChangeset::from_delete(params.sort_id.clone())) .await; - self.delegate.remove_sort(&self.view_id, ¶ms.sort_id); + self + .delegate + .remove_sort(&self.view_id, ¶ms.sort_id) + .await; notify_did_update_sort(notification).await; Ok(()) @@ -583,7 +600,7 @@ impl DatabaseViewEditor { let all_sorts = self.v_get_all_sorts().await; self.sort_controller.write().await.delete_all_sorts().await; - self.delegate.remove_all_sorts(&self.view_id); + self.delegate.remove_all_sorts(&self.view_id).await; let mut notification = SortChangesetNotificationPB::new(self.view_id.clone()); notification.delete_sorts = all_sorts.into_iter().map(SortPB::from).collect(); notify_did_update_sort(notification).await; @@ -591,7 +608,7 @@ impl DatabaseViewEditor { } pub async fn v_get_all_calculations(&self) -> Vec> { - self.delegate.get_all_calculations(&self.view_id) + self.delegate.get_all_calculations(&self.view_id).await } pub async fn v_update_calculations( @@ -620,7 +637,8 @@ impl DatabaseViewEditor { let calculation: Calculation = Calculation::from(&insert); self .delegate - .update_calculation(¶ms.view_id, calculation); + .update_calculation(¶ms.view_id, calculation) + .await; } } @@ -636,7 +654,8 @@ impl DatabaseViewEditor { ) -> FlowyResult<()> { self .delegate - .remove_calculation(¶ms.view_id, ¶ms.calculation_id); + .remove_calculation(¶ms.view_id, ¶ms.calculation_id) + .await; let calculation = Calculation::none(params.calculation_id, params.field_id, None); @@ -653,11 +672,11 @@ impl DatabaseViewEditor { } pub async fn v_get_all_filters(&self) -> Vec { - self.delegate.get_all_filters(&self.view_id) + self.delegate.get_all_filters(&self.view_id).await } pub async fn v_get_filter(&self, filter_id: &str) -> Option { - self.delegate.get_filter(&self.view_id, filter_id) + self.delegate.get_filter(&self.view_id, filter_id).await } #[tracing::instrument(level = "trace", skip(self), err)] @@ -686,15 +705,23 @@ impl DatabaseViewEditor { match layout_ty { DatabaseLayout::Grid => {}, DatabaseLayout::Board => { - if let Some(value) = self.delegate.get_layout_setting(&self.view_id, layout_ty) { + if let Some(value) = self + .delegate + .get_layout_setting(&self.view_id, layout_ty) + .await + { layout_setting.board = Some(value.into()); } }, DatabaseLayout::Calendar => { - if let Some(value) = self.delegate.get_layout_setting(&self.view_id, layout_ty) { + if let Some(value) = self + .delegate + .get_layout_setting(&self.view_id, layout_ty) + .await + { let calendar_setting = CalendarLayoutSetting::from(value); // Check the field exist or not - if let Some(field) = self.delegate.get_field(&calendar_setting.field_id) { + if let Some(field) = self.delegate.get_field(&calendar_setting.field_id).await { let field_type = FieldType::from(field.field_type); // Check the type of field is Datetime or not @@ -723,27 +750,33 @@ impl DatabaseViewEditor { DatabaseLayout::Board => { let layout_setting = params.board.unwrap(); - self.delegate.insert_layout_setting( - &self.view_id, - ¶ms.layout_type, - layout_setting.clone().into(), - ); + self + .delegate + .insert_layout_setting( + &self.view_id, + ¶ms.layout_type, + layout_setting.clone().into(), + ) + .await; Some(DatabaseLayoutSettingPB::from_board(layout_setting)) }, DatabaseLayout::Calendar => { let layout_setting = params.calendar.unwrap(); - if let Some(field) = self.delegate.get_field(&layout_setting.field_id) { + if let Some(field) = self.delegate.get_field(&layout_setting.field_id).await { if FieldType::from(field.field_type) != FieldType::DateTime { return Err(FlowyError::unexpect_calendar_field_type()); } - self.delegate.insert_layout_setting( - &self.view_id, - ¶ms.layout_type, - layout_setting.clone().into(), - ); + self + .delegate + .insert_layout_setting( + &self.view_id, + ¶ms.layout_type, + layout_setting.clone().into(), + ) + .await; Some(DatabaseLayoutSettingPB::from_calendar(layout_setting)) } else { @@ -769,10 +802,10 @@ impl DatabaseViewEditor { let notification = self.filter_controller.apply_changeset(changeset).await; notify_did_update_filter(notification).await; - let sorts = self.delegate.get_all_sorts(&self.view_id); + let sorts = self.delegate.get_all_sorts(&self.view_id).await; if let Some(sort) = sorts.iter().find(|sort| sort.field_id == deleted_field_id) { - self.delegate.remove_sort(&self.view_id, &sort.id); + self.delegate.remove_sort(&self.view_id, &sort.id).await; let notification = self .sort_controller .write() @@ -810,7 +843,7 @@ impl DatabaseViewEditor { pub async fn v_did_update_field_type_option(&self, old_field: &Field) -> FlowyResult<()> { let field_id = &old_field.id; - if let Some(field) = self.delegate.get_field(field_id) { + if let Some(field) = self.delegate.get_field(field_id).await { self .sort_controller .read() @@ -839,7 +872,7 @@ impl DatabaseViewEditor { /// Called when a grouping field is updated. #[tracing::instrument(level = "debug", skip_all, err)] pub async fn v_group_by_field(&self, field_id: &str) -> FlowyResult<()> { - if let Some(field) = self.delegate.get_field(field_id) { + if let Some(field) = self.delegate.get_field(field_id).await { tracing::trace!("create new group controller"); let new_group_controller = new_group_controller( @@ -890,7 +923,7 @@ impl DatabaseViewEditor { let text_cell = get_cell_for_row(self.delegate.clone(), &primary_field.id, &row_id).await?; // Date - let date_field = self.delegate.get_field(&calendar_setting.field_id)?; + let date_field = self.delegate.get_field(&calendar_setting.field_id).await?; let date_cell = get_cell_for_row(self.delegate.clone(), &date_field.id, &row_id).await?; let title = text_cell @@ -981,20 +1014,23 @@ impl DatabaseViewEditor { } pub async fn v_get_layout_type(&self) -> DatabaseLayout { - self.delegate.get_layout_for_view(&self.view_id) + self.delegate.get_layout_for_view(&self.view_id).await } #[tracing::instrument(level = "trace", skip_all)] pub async fn v_update_layout_type(&self, new_layout_type: DatabaseLayout) -> FlowyResult<()> { self .delegate - .update_layout_type(&self.view_id, &new_layout_type); + .update_layout_type(&self.view_id, &new_layout_type) + .await; // using the {} brackets to denote the lifetime of the resolver. Because the DatabaseLayoutDepsResolver // is not sync and send, so we can't pass it to the async block. { let resolver = DatabaseLayoutDepsResolver::new(self.delegate.get_database(), new_layout_type); - resolver.resolve_deps_when_update_layout_type(&self.view_id); + resolver + .resolve_deps_when_update_layout_type(&self.view_id) + .await; } // initialize the group controller if the current layout support grouping @@ -1034,12 +1070,14 @@ impl DatabaseViewEditor { } pub async fn v_get_field_settings(&self, field_ids: &[String]) -> HashMap { - self.delegate.get_field_settings(&self.view_id, field_ids) + self + .delegate + .get_field_settings(&self.view_id, field_ids) + .await } pub async fn v_update_field_settings(&self, params: FieldSettingsChangesetPB) -> FlowyResult<()> { - self.delegate.update_field_settings(params); - + self.delegate.update_field_settings(params).await; Ok(()) } @@ -1053,7 +1091,7 @@ impl DatabaseViewEditor { .await .as_ref() .map(|controller| controller.get_grouping_field_id().to_owned())?; - let field = self.delegate.get_field(&group_field_id)?; + let field = self.delegate.get_field(&group_field_id).await?; let mut write_guard = self.group_controller.write().await; if let Some(group_controller) = &mut *write_guard { f(group_controller, field).ok() diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/view_filter.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/view_filter.rs index f710144e60..994041aea9 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/view_filter.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/view_filter.rs @@ -1,10 +1,9 @@ +use async_trait::async_trait; use std::sync::Arc; use collab_database::fields::Field; use collab_database::rows::{RowDetail, RowId}; -use lib_infra::future::Fut; - use crate::services::cell::CellCache; use crate::services::database_view::{ gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation, @@ -43,28 +42,29 @@ pub async fn make_filter_controller( struct DatabaseViewFilterDelegateImpl(Arc); +#[async_trait] impl FilterDelegate for DatabaseViewFilterDelegateImpl { - fn get_field(&self, field_id: &str) -> Option { - self.0.get_field(field_id) + async fn get_field(&self, field_id: &str) -> Option { + self.0.get_field(field_id).await } - fn get_fields(&self, view_id: &str, field_ids: Option>) -> Fut> { - self.0.get_fields(view_id, field_ids) + async fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec { + self.0.get_fields(view_id, field_ids).await } - fn get_rows(&self, view_id: &str) -> Fut>> { - self.0.get_rows(view_id) + async fn get_rows(&self, view_id: &str) -> Vec> { + self.0.get_rows(view_id).await } - fn get_row(&self, view_id: &str, rows_id: &RowId) -> Fut)>> { - self.0.get_row(view_id, rows_id) + async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc)> { + self.0.get_row(view_id, rows_id).await } - fn get_all_filters(&self, view_id: &str) -> Vec { - self.0.get_all_filters(view_id) + async fn get_all_filters(&self, view_id: &str) -> Vec { + self.0.get_all_filters(view_id).await } - fn save_filters(&self, view_id: &str, filters: &[Filter]) { - self.0.save_filters(view_id, filters) + async fn save_filters(&self, view_id: &str, filters: &[Filter]) { + self.0.save_filters(view_id, filters).await } } diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/view_group.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/view_group.rs index 504511608a..b180904d6e 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/view_group.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/view_group.rs @@ -1,10 +1,10 @@ +use async_trait::async_trait; use std::sync::Arc; use collab_database::fields::Field; use collab_database::rows::{RowDetail, RowId}; use flowy_error::FlowyResult; -use lib_infra::future::{to_fut, Fut}; use crate::entities::FieldType; use crate::services::database_view::DatabaseViewOperation; @@ -21,7 +21,7 @@ pub async fn new_group_controller( filter_controller: Arc, grouping_field: Option, ) -> FlowyResult>> { - if !delegate.get_layout_for_view(&view_id).is_board() { + if !delegate.get_layout_for_view(&view_id).await.is_board() { return Ok(None); } @@ -61,45 +61,45 @@ pub(crate) struct GroupControllerDelegateImpl { filter_controller: Arc, } +#[async_trait] impl GroupContextDelegate for GroupControllerDelegateImpl { - fn get_group_setting(&self, view_id: &str) -> Fut>> { - let mut settings = self.delegate.get_group_setting(view_id); - to_fut(async move { - if settings.is_empty() { - None - } else { - Some(Arc::new(settings.remove(0))) - } - }) + async fn get_group_setting(&self, view_id: &str) -> Option> { + let mut settings = self.delegate.get_group_setting(view_id).await; + if settings.is_empty() { + None + } else { + Some(Arc::new(settings.remove(0))) + } } - fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Fut> { - let field_id = field_id.to_owned(); - let view_id = view_id.to_owned(); + async fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Vec { let delegate = self.delegate.clone(); - to_fut(async move { get_cells_for_field(delegate, &view_id, &field_id).await }) + get_cells_for_field(delegate, view_id, field_id).await } - fn save_configuration(&self, view_id: &str, group_setting: GroupSetting) -> Fut> { - self.delegate.insert_group_setting(view_id, group_setting); - to_fut(async move { Ok(()) }) + async fn save_configuration( + &self, + view_id: &str, + group_setting: GroupSetting, + ) -> FlowyResult<()> { + self + .delegate + .insert_group_setting(view_id, group_setting) + .await; + Ok(()) } } +#[async_trait] impl GroupControllerDelegate for GroupControllerDelegateImpl { - fn get_field(&self, field_id: &str) -> Option { - self.delegate.get_field(field_id) + async fn get_field(&self, field_id: &str) -> Option { + self.delegate.get_field(field_id).await } - fn get_all_rows(&self, view_id: &str) -> Fut>> { - let view_id = view_id.to_string(); - let delegate = self.delegate.clone(); - let filter_controller = self.filter_controller.clone(); - to_fut(async move { - let mut row_details = delegate.get_rows(&view_id).await; - filter_controller.filter_rows(&mut row_details).await; - row_details - }) + async fn get_all_rows(&self, view_id: &str) -> Vec> { + let mut row_details = self.delegate.get_rows(view_id).await; + self.filter_controller.filter_rows(&mut row_details).await; + row_details } } @@ -108,7 +108,7 @@ pub(crate) async fn get_cell_for_row( field_id: &str, row_id: &RowId, ) -> Option { - let field = delegate.get_field(field_id)?; + let field = delegate.get_field(field_id).await?; let row_cell = delegate.get_cell_in_row(field_id, row_id).await; let field_type = FieldType::from(field.field_type); let handler = delegate.get_type_option_cell_handler(&field)?; @@ -131,7 +131,7 @@ pub(crate) async fn get_cells_for_field( view_id: &str, field_id: &str, ) -> Vec { - if let Some(field) = delegate.get_field(field_id) { + if let Some(field) = delegate.get_field(field_id).await { let field_type = FieldType::from(field.field_type); if let Some(handler) = delegate.get_type_option_cell_handler(&field) { let cells = delegate.get_cells_for_field(view_id, field_id).await; diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/view_operation.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/view_operation.rs index 3a912646cd..4681566ebd 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/view_operation.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/view_operation.rs @@ -1,14 +1,14 @@ +use async_trait::async_trait; +use collab_database::database::Database; use std::collections::HashMap; use std::sync::Arc; -use collab_database::database::MutexDatabase; use collab_database::fields::{Field, TypeOptionData}; use collab_database::rows::{Row, RowCell, RowDetail, RowId}; use collab_database::views::{DatabaseLayout, DatabaseView, LayoutSetting}; use tokio::sync::RwLock; use flowy_error::FlowyError; -use lib_infra::future::{Fut, FutureResult}; use lib_infra::priority_task::TaskDispatcher; use crate::entities::{FieldSettingsChangesetPB, FieldType}; @@ -20,97 +20,102 @@ use crate::services::group::GroupSetting; use crate::services::sort::Sort; /// Defines the operation that can be performed on a database view +#[async_trait] pub trait DatabaseViewOperation: Send + Sync + 'static { /// Get the database that the view belongs to - fn get_database(&self) -> Arc; + fn get_database(&self) -> Arc>; /// Get the view of the database with the view_id - fn get_view(&self, view_id: &str) -> Fut>; + async fn get_view(&self, view_id: &str) -> Option; /// If the field_ids is None, then it will return all the field revisions - fn get_fields(&self, view_id: &str, field_ids: Option>) -> Fut>; + async fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec; /// Returns the field with the field_id - fn get_field(&self, field_id: &str) -> Option; + async fn get_field(&self, field_id: &str) -> Option; - fn create_field( + async fn create_field( &self, view_id: &str, name: &str, field_type: FieldType, type_option_data: TypeOptionData, - ) -> Fut; + ) -> Field; - fn update_field( + async fn update_field( &self, type_option_data: TypeOptionData, old_field: Field, - ) -> FutureResult<(), FlowyError>; + ) -> Result<(), FlowyError>; - fn get_primary_field(&self) -> Fut>>; + async fn get_primary_field(&self) -> Option>; /// Returns the index of the row with row_id - fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Fut>; + async fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Option; /// Returns the `index` and `RowRevision` with row_id - fn get_row(&self, view_id: &str, row_id: &RowId) -> Fut)>>; + async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option<(usize, Arc)>; /// Returns all the rows in the view - fn get_rows(&self, view_id: &str) -> Fut>>; + async fn get_rows(&self, view_id: &str) -> Vec>; - fn remove_row(&self, row_id: &RowId) -> Option; + async fn remove_row(&self, row_id: &RowId) -> Option; - fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut>>; + async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec>; - fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Fut>; + async fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Arc; /// Return the database layout type for the view with given view_id /// The default layout type is [DatabaseLayout::Grid] - fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout; + async fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout; - fn get_group_setting(&self, view_id: &str) -> Vec; + async fn get_group_setting(&self, view_id: &str) -> Vec; - fn insert_group_setting(&self, view_id: &str, setting: GroupSetting); + async fn insert_group_setting(&self, view_id: &str, setting: GroupSetting); - fn get_sort(&self, view_id: &str, sort_id: &str) -> Option; + async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option; - fn insert_sort(&self, view_id: &str, sort: Sort); + async fn insert_sort(&self, view_id: &str, sort: Sort); - fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str); + async fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str); - fn remove_sort(&self, view_id: &str, sort_id: &str); + async fn remove_sort(&self, view_id: &str, sort_id: &str); - fn get_all_sorts(&self, view_id: &str) -> Vec; + async fn get_all_sorts(&self, view_id: &str) -> Vec; - fn remove_all_sorts(&self, view_id: &str); + async fn remove_all_sorts(&self, view_id: &str); - fn get_all_calculations(&self, view_id: &str) -> Vec>; + async fn get_all_calculations(&self, view_id: &str) -> Vec>; - fn get_calculation(&self, view_id: &str, field_id: &str) -> Option; + async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option; - fn update_calculation(&self, view_id: &str, calculation: Calculation); + async fn update_calculation(&self, view_id: &str, calculation: Calculation); - fn remove_calculation(&self, view_id: &str, calculation_id: &str); + async fn remove_calculation(&self, view_id: &str, calculation_id: &str); - fn get_all_filters(&self, view_id: &str) -> Vec; + async fn get_all_filters(&self, view_id: &str) -> Vec; - fn get_filter(&self, view_id: &str, filter_id: &str) -> Option; + async fn get_filter(&self, view_id: &str, filter_id: &str) -> Option; - fn delete_filter(&self, view_id: &str, filter_id: &str); + async fn delete_filter(&self, view_id: &str, filter_id: &str); - fn insert_filter(&self, view_id: &str, filter: Filter); + async fn insert_filter(&self, view_id: &str, filter: Filter); - fn save_filters(&self, view_id: &str, filters: &[Filter]); + async fn save_filters(&self, view_id: &str, filters: &[Filter]); - fn get_layout_setting(&self, view_id: &str, layout_ty: &DatabaseLayout) -> Option; + async fn get_layout_setting( + &self, + view_id: &str, + layout_ty: &DatabaseLayout, + ) -> Option; - fn insert_layout_setting( + async fn insert_layout_setting( &self, view_id: &str, layout_ty: &DatabaseLayout, layout_setting: LayoutSetting, ); - fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout); + async fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout); /// Returns a `TaskDispatcher` used to poll a `Task` fn get_task_scheduler(&self) -> Arc>; @@ -120,11 +125,11 @@ pub trait DatabaseViewOperation: Send + Sync + 'static { field: &Field, ) -> Option>; - fn get_field_settings( + async fn get_field_settings( &self, view_id: &str, field_ids: &[String], ) -> HashMap; - fn update_field_settings(&self, params: FieldSettingsChangesetPB); + async fn update_field_settings(&self, params: FieldSettingsChangesetPB); } diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/view_sort.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/view_sort.rs index 0397526b66..a719590e09 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/view_sort.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/view_sort.rs @@ -1,11 +1,10 @@ +use async_trait::async_trait; use std::sync::Arc; use collab_database::fields::Field; use collab_database::rows::RowDetail; use tokio::sync::RwLock; -use lib_infra::future::{to_fut, Fut}; - use crate::services::cell::CellCache; use crate::services::database_view::{ gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation, @@ -23,6 +22,7 @@ pub(crate) async fn make_sort_controller( let handler_id = gen_handler_id(); let sorts = delegate .get_all_sorts(view_id) + .await .into_iter() .map(Arc::new) .collect(); @@ -53,38 +53,31 @@ struct DatabaseViewSortDelegateImpl { filter_controller: Arc, } +#[async_trait] impl SortDelegate for DatabaseViewSortDelegateImpl { - fn get_sort(&self, view_id: &str, sort_id: &str) -> Fut>> { - let sort = self.delegate.get_sort(view_id, sort_id).map(Arc::new); - to_fut(async move { sort }) + async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option> { + self.delegate.get_sort(view_id, sort_id).await.map(Arc::new) } - fn get_rows(&self, view_id: &str) -> Fut>> { + async fn get_rows(&self, view_id: &str) -> Vec> { let view_id = view_id.to_string(); - let delegate = self.delegate.clone(); - let filter_controller = self.filter_controller.clone(); - to_fut(async move { - let mut row_details = delegate.get_rows(&view_id).await; - filter_controller.filter_rows(&mut row_details).await; - row_details - }) + let mut row_details = self.delegate.get_rows(&view_id).await; + self.filter_controller.filter_rows(&mut row_details).await; + row_details } - fn filter_row(&self, row_detail: &RowDetail) -> Fut { - let filter_controller = self.filter_controller.clone(); + async fn filter_row(&self, row_detail: &RowDetail) -> bool { let row_detail = row_detail.clone(); - to_fut(async move { - let mut row_details = vec![Arc::new(row_detail)]; - filter_controller.filter_rows(&mut row_details).await; - !row_details.is_empty() - }) + let mut row_details = vec![Arc::new(row_detail)]; + self.filter_controller.filter_rows(&mut row_details).await; + !row_details.is_empty() } - fn get_field(&self, field_id: &str) -> Option { - self.delegate.get_field(field_id) + async fn get_field(&self, field_id: &str) -> Option { + self.delegate.get_field(field_id).await } - fn get_fields(&self, view_id: &str, field_ids: Option>) -> Fut> { - self.delegate.get_fields(view_id, field_ids) + async fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec { + self.delegate.get_fields(view_id, field_ids).await } } diff --git a/frontend/rust-lib/flowy-database2/src/services/database_view/views.rs b/frontend/rust-lib/flowy-database2/src/services/database_view/views.rs index 132b480123..445257a8b9 100644 --- a/frontend/rust-lib/flowy-database2/src/services/database_view/views.rs +++ b/frontend/rust-lib/flowy-database2/src/services/database_view/views.rs @@ -1,11 +1,11 @@ +use collab_database::database::Database; use std::collections::HashMap; use std::sync::Arc; -use collab_database::database::MutexDatabase; use nanoid::nanoid; use tokio::sync::{broadcast, RwLock}; -use flowy_error::{FlowyError, FlowyResult}; +use flowy_error::FlowyResult; use crate::services::cell::CellCache; use crate::services::database::DatabaseRowEvent; @@ -17,7 +17,7 @@ pub type EditorByViewId = HashMap>; pub struct DatabaseViews { #[allow(dead_code)] - database: Arc, + database: Arc>, cell_cache: CellCache, view_operation: Arc, view_editors: Arc>, @@ -25,7 +25,7 @@ pub struct DatabaseViews { impl DatabaseViews { pub async fn new( - database: Arc, + database: Arc>, cell_cache: CellCache, view_operation: Arc, view_editors: Arc>, @@ -59,13 +59,10 @@ impl DatabaseViews { return Ok(editor.clone()); } - let mut editor_map = self.view_editors.try_write().map_err(|err| { - FlowyError::internal().with_context(format!( - "fail to acquire the lock of editor_by_view_id: {}", - err - )) - })?; - let database_id = self.database.lock().get_database_id(); + //FIXME: not thread-safe + let mut editor_map = self.view_editors.write().await; + let database_id = self.database.read().await.get_database_id(); + //FIXME: that method below is not Send+Sync let editor = Arc::new( DatabaseViewEditor::new( database_id, diff --git a/frontend/rust-lib/flowy-database2/src/services/field/field_operation.rs b/frontend/rust-lib/flowy-database2/src/services/field/field_operation.rs index e9db74358f..758b32dfa5 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/field_operation.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/field_operation.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use flowy_error::FlowyResult; +use flowy_error::{FlowyError, FlowyResult}; use crate::entities::FieldType; use crate::services::database::DatabaseEditor; @@ -11,14 +11,15 @@ pub async fn edit_field_type_option( editor: Arc, action: impl FnOnce(&mut T), ) -> FlowyResult<()> { - let get_type_option = async { - let field = editor.get_field(field_id)?; - let field_type = FieldType::from(field.field_type); - field.get_type_option::(field_type) - }; + let field = editor + .get_field(field_id) + .await + .ok_or_else(FlowyError::field_record_not_found)?; + let field_type = FieldType::from(field.field_type); + let get_type_option = field.get_type_option::(field_type); - if let Some(mut type_option) = get_type_option.await { - if let Some(old_field) = editor.get_field(field_id) { + if let Some(mut type_option) = get_type_option { + if let Some(old_field) = editor.get_field(field_id).await { action(&mut type_option); let type_option_data = type_option.into(); editor diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option.rs index de95ba058c..ea448d76aa 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option.rs @@ -35,7 +35,7 @@ impl From for CheckboxTypeOption { impl From for TypeOptionData { fn from(_data: CheckboxTypeOption) -> Self { - TypeOptionDataBuilder::new().build() + TypeOptionDataBuilder::new() } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option_entities.rs index 35de68136b..8b93382ac3 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checkbox_type_option/checkbox_type_option_entities.rs @@ -1,7 +1,7 @@ use std::str::FromStr; use bytes::Bytes; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; use flowy_error::{FlowyError, FlowyResult}; @@ -21,16 +21,16 @@ impl TypeOptionCellData for CheckboxCellDataPB { impl From<&Cell> for CheckboxCellDataPB { fn from(cell: &Cell) -> Self { - let value = cell.get_str_value(CELL_DATA).unwrap_or_default(); + let value: String = cell.get_as(CELL_DATA).unwrap_or_default(); CheckboxCellDataPB::from_str(&value).unwrap_or_default() } } impl From for Cell { fn from(data: CheckboxCellDataPB) -> Self { - new_cell_builder(FieldType::Checkbox) - .insert_str_value(CELL_DATA, data.to_string()) - .build() + let mut cell = new_cell_builder(FieldType::Checkbox); + cell.insert(CELL_DATA.into(), data.to_string().into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist.rs index ceddeadce6..c800bf1104 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist.rs @@ -31,7 +31,7 @@ impl From for ChecklistTypeOption { impl From for TypeOptionData { fn from(_data: ChecklistTypeOption) -> Self { - TypeOptionDataBuilder::new().build() + TypeOptionDataBuilder::new() } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist_entities.rs index 12b3e07527..ef8a5720e1 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/checklist_type_option/checklist_entities.rs @@ -1,6 +1,6 @@ use crate::entities::FieldType; use crate::services::field::{SelectOption, TypeOptionCellData, CELL_DATA}; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; @@ -64,7 +64,7 @@ impl ChecklistCellData { impl From<&Cell> for ChecklistCellData { fn from(cell: &Cell) -> Self { cell - .get_str_value(CELL_DATA) + .get_as::(CELL_DATA) .map(|data| serde_json::from_str::(&data).unwrap_or_default()) .unwrap_or_default() } @@ -73,9 +73,9 @@ impl From<&Cell> for ChecklistCellData { impl From for Cell { fn from(cell_data: ChecklistCellData) -> Self { let data = serde_json::to_string(&cell_data).unwrap_or_default(); - new_cell_builder(FieldType::Checklist) - .insert_str_value(CELL_DATA, data) - .build() + let mut cell = new_cell_builder(FieldType::Checklist); + cell.insert(CELL_DATA.into(), data.into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option.rs index 6214dc3f24..2a7a713b61 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, NaiveTime, Offset, TimeZone}; use chrono_tz::Tz; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use serde::{Deserialize, Serialize}; @@ -36,14 +36,14 @@ impl TypeOption for DateTypeOption { impl From for DateTypeOption { fn from(data: TypeOptionData) -> Self { let date_format = data - .get_i64_value("date_format") + .get_as::("date_format") .map(DateFormat::from) .unwrap_or_default(); let time_format = data - .get_i64_value("time_format") + .get_as::("time_format") .map(TimeFormat::from) .unwrap_or_default(); - let timezone_id = data.get_str_value("timezone_id").unwrap_or_default(); + let timezone_id: String = data.get_as("timezone_id").unwrap_or_default(); Self { date_format, time_format, @@ -54,11 +54,11 @@ impl From for DateTypeOption { impl From for TypeOptionData { fn from(data: DateTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_i64_value("date_format", data.date_format.value()) - .insert_i64_value("time_format", data.time_format.value()) - .insert_str_value("timezone_id", data.timezone_id) - .build() + TypeOptionDataBuilder::from([ + ("date_format".into(), data.date_format.value().into()), + ("time_format".into(), data.time_format.value().into()), + ("timezone_id".into(), data.timezone_id.into()), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option_entities.rs index c2b0259aff..b57185ce23 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/date_type_option/date_type_option_entities.rs @@ -1,7 +1,7 @@ #![allow(clippy::upper_case_acronyms)] use bytes::Bytes; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; use serde::de::Visitor; use serde::{Deserialize, Serialize}; @@ -58,14 +58,14 @@ impl TypeOptionCellData for DateCellData { impl From<&Cell> for DateCellData { fn from(cell: &Cell) -> Self { let timestamp = cell - .get_str_value(CELL_DATA) + .get_as::(CELL_DATA) .and_then(|data| data.parse::().ok()); let end_timestamp = cell - .get_str_value("end_timestamp") + .get_as::("end_timestamp") .and_then(|data| data.parse::().ok()); - let include_time = cell.get_bool_value("include_time").unwrap_or_default(); - let is_range = cell.get_bool_value("is_range").unwrap_or_default(); - let reminder_id = cell.get_str_value("reminder_id").unwrap_or_default(); + let include_time: bool = cell.get_as("include_time").unwrap_or_default(); + let is_range: bool = cell.get_as("is_range").unwrap_or_default(); + let reminder_id: String = cell.get_as("reminder_id").unwrap_or_default(); Self { timestamp, @@ -101,13 +101,16 @@ impl From<&DateCellData> for Cell { }; // Most of the case, don't use these keys in other places. Otherwise, we should define // constants for them. - new_cell_builder(FieldType::DateTime) - .insert_str_value(CELL_DATA, timestamp_string) - .insert_str_value("end_timestamp", end_timestamp_string) - .insert_bool_value("include_time", cell_data.include_time) - .insert_bool_value("is_range", cell_data.is_range) - .insert_str_value("reminder_id", cell_data.reminder_id.to_owned()) - .build() + let mut cell = new_cell_builder(FieldType::DateTime); + cell.insert(CELL_DATA.into(), timestamp_string.into()); + cell.insert("end_timestamp".into(), end_timestamp_string.into()); + cell.insert("include_time".into(), cell_data.include_time.into()); + cell.insert("is_range".into(), cell_data.is_range.into()); + cell.insert( + "reminder_id".into(), + cell_data.reminder_id.to_owned().into(), + ); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/number_type_option/number_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/number_type_option/number_type_option.rs index 0fc7cd5920..eee660af86 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/number_type_option/number_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/number_type_option/number_type_option.rs @@ -1,14 +1,16 @@ +use collab::preclude::encoding::serde::from_any; +use collab::preclude::Any; +use collab::util::AnyMapExt; use std::cmp::Ordering; use std::default::Default; use std::str::FromStr; -use collab::core::any_map::AnyMapExtension; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::{new_cell_builder, Cell}; use fancy_regex::Regex; use lazy_static::lazy_static; use rust_decimal::Decimal; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; use flowy_error::FlowyResult; @@ -25,12 +27,24 @@ use crate::services::sort::SortCondition; // Number #[derive(Clone, Debug, Serialize, Deserialize)] pub struct NumberTypeOption { + #[serde(default, deserialize_with = "number_format_from_i64")] pub format: NumberFormat, + #[serde(default)] pub scale: u32, + #[serde(default)] pub symbol: String, + #[serde(default)] pub name: String, } +fn number_format_from_i64<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let value = i64::deserialize(deserializer)?; + Ok(NumberFormat::from(value)) +} + #[derive(Clone, Debug, Default)] pub struct NumberCellData(pub String); @@ -42,15 +56,15 @@ impl TypeOptionCellData for NumberCellData { impl From<&Cell> for NumberCellData { fn from(cell: &Cell) -> Self { - Self(cell.get_str_value(CELL_DATA).unwrap_or_default()) + Self(cell.get_as(CELL_DATA).unwrap_or_default()) } } impl From for Cell { fn from(data: NumberCellData) -> Self { - new_cell_builder(FieldType::Number) - .insert_str_value(CELL_DATA, data.0) - .build() + let mut cell = new_cell_builder(FieldType::Number); + cell.insert(CELL_DATA.into(), data.0.into()); + cell } } @@ -75,30 +89,18 @@ impl TypeOption for NumberTypeOption { impl From for NumberTypeOption { fn from(data: TypeOptionData) -> Self { - let format = data - .get_i64_value("format") - .map(NumberFormat::from) - .unwrap_or_default(); - let scale = data.get_i64_value("scale").unwrap_or_default() as u32; - let symbol = data.get_str_value("symbol").unwrap_or_default(); - let name = data.get_str_value("name").unwrap_or_default(); - Self { - format, - scale, - symbol, - name, - } + from_any(&Any::from(data)).unwrap() } } impl From for TypeOptionData { fn from(data: NumberTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_i64_value("format", data.format.value()) - .insert_i64_value("scale", data.scale as i64) - .insert_str_value("name", data.name) - .insert_str_value("symbol", data.symbol) - .build() + TypeOptionDataBuilder::from([ + ("format".into(), data.format.value().into()), + ("scale".into(), data.scale.into()), + ("name".into(), data.name.into()), + ("symbol".into(), data.symbol.into()), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation.rs index ac2548b89d..9806471cb2 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation.rs @@ -1,6 +1,6 @@ +use collab::util::AnyMapExt; use std::cmp::Ordering; -use collab::core::any_map::AnyMapExtension; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use flowy_error::FlowyResult; @@ -23,16 +23,14 @@ pub struct RelationTypeOption { impl From for RelationTypeOption { fn from(value: TypeOptionData) -> Self { - let database_id = value.get_str_value("database_id").unwrap_or_default(); + let database_id: String = value.get_as("database_id").unwrap_or_default(); Self { database_id } } } impl From for TypeOptionData { fn from(value: RelationTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_str_value("database_id", value.database_id) - .build() + TypeOptionDataBuilder::from([("database_id".into(), value.database_id.into())]) } } @@ -57,7 +55,7 @@ impl CellDataChangeset for RelationTypeOption { return Ok(((&cell_data).into(), cell_data)); } - let cell_data: RelationCellData = cell.unwrap().as_ref().into(); + let cell_data: RelationCellData = cell.as_ref().unwrap().into(); let mut row_ids = cell_data.row_ids.clone(); for inserted in changeset.inserted_row_ids.iter() { if !row_ids.iter().any(|row_id| row_id == inserted) { diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation_entities.rs index 97b18590af..c8911a2ffe 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/relation_type_option/relation_entities.rs @@ -40,9 +40,9 @@ impl From<&RelationCellData> for Cell { .map(|id| Any::String(Arc::from(id.to_string()))) .collect::>(), )); - new_cell_builder(FieldType::Relation) - .insert_any(CELL_DATA, data) - .build() + let mut cell = new_cell_builder(FieldType::Relation); + cell.insert(CELL_DATA.into(), data); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/multi_select_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/multi_select_type_option.rs index 8ebd0d1db4..850b383a45 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/multi_select_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/multi_select_type_option.rs @@ -1,6 +1,6 @@ +use collab::util::AnyMapExt; use std::cmp::Ordering; -use collab::core::any_map::AnyMapExtension; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use serde::{Deserialize, Serialize}; @@ -33,8 +33,8 @@ impl TypeOption for MultiSelectTypeOption { impl From for MultiSelectTypeOption { fn from(data: TypeOptionData) -> Self { data - .get_str_value("content") - .map(|s| serde_json::from_str::(&s).unwrap_or_default()) + .get_as::("content") + .map(|json| serde_json::from_str::(&json).unwrap_or_default()) .unwrap_or_default() } } @@ -42,9 +42,7 @@ impl From for MultiSelectTypeOption { impl From for TypeOptionData { fn from(data: MultiSelectTypeOption) -> Self { let content = serde_json::to_string(&data).unwrap_or_default(); - TypeOptionDataBuilder::new() - .insert_str_value("content", content) - .build() + TypeOptionDataBuilder::from([("content".into(), content.into())]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/select_ids.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/select_ids.rs index c47738b788..c75730dfac 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/select_ids.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/select_ids.rs @@ -1,6 +1,6 @@ +use collab::util::AnyMapExt; use std::str::FromStr; -use collab::core::any_map::AnyMapExtension; use collab_database::rows::{new_cell_builder, Cell}; use flowy_error::FlowyError; @@ -26,9 +26,9 @@ impl SelectOptionIds { self.0 } pub fn to_cell_data(&self, field_type: FieldType) -> Cell { - new_cell_builder(field_type) - .insert_str_value(CELL_DATA, self.to_string()) - .build() + let mut cell = new_cell_builder(field_type); + cell.insert(CELL_DATA.into(), self.to_string().into()); + cell } } @@ -40,7 +40,7 @@ impl TypeOptionCellData for SelectOptionIds { impl From<&Cell> for SelectOptionIds { fn from(cell: &Cell) -> Self { - let value = cell.get_str_value(CELL_DATA).unwrap_or_default(); + let value: String = cell.get_as(CELL_DATA).unwrap_or_default(); Self::from_str(&value).unwrap_or_default() } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/single_select_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/single_select_type_option.rs index fa0745133b..bbf131b64f 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/single_select_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/selection_type_option/single_select_type_option.rs @@ -8,7 +8,7 @@ use crate::services::field::{ SelectOptionCellChangeset, SelectOptionIds, SelectTypeOptionSharedAction, }; use crate::services::sort::SortCondition; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use flowy_error::FlowyResult; @@ -32,7 +32,7 @@ impl TypeOption for SingleSelectTypeOption { impl From for SingleSelectTypeOption { fn from(data: TypeOptionData) -> Self { data - .get_str_value("content") + .get_as::("content") .map(|s| serde_json::from_str::(&s).unwrap_or_default()) .unwrap_or_default() } @@ -41,9 +41,7 @@ impl From for SingleSelectTypeOption { impl From for TypeOptionData { fn from(data: SingleSelectTypeOption) -> Self { let content = serde_json::to_string(&data).unwrap_or_default(); - TypeOptionDataBuilder::new() - .insert_str_value("content", content) - .build() + TypeOptionDataBuilder::from([("content".into(), content.into())]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary.rs index 920f76de8e..4d99e67dd3 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary.rs @@ -7,7 +7,7 @@ use crate::services::field::{ TypeOptionCellDataSerde, TypeOptionTransform, }; use crate::services::sort::SortCondition; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use flowy_error::FlowyResult; @@ -20,16 +20,14 @@ pub struct SummarizationTypeOption { impl From for SummarizationTypeOption { fn from(value: TypeOptionData) -> Self { - let auto_fill = value.get_bool_value("auto_fill").unwrap_or_default(); + let auto_fill: bool = value.get_as("auto_fill").unwrap_or_default(); Self { auto_fill } } } impl From for TypeOptionData { fn from(value: SummarizationTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_bool_value("auto_fill", value.auto_fill) - .build() + TypeOptionDataBuilder::from([("auto_fill".into(), value.auto_fill.into())]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary_entities.rs index 8d45578e38..ef41e2d0f5 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/summary_type_option/summary_entities.rs @@ -1,6 +1,6 @@ use crate::entities::FieldType; use crate::services::field::{TypeOptionCellData, CELL_DATA}; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; #[derive(Default, Debug, Clone)] @@ -21,15 +21,15 @@ impl TypeOptionCellData for SummaryCellData { impl From<&Cell> for SummaryCellData { fn from(cell: &Cell) -> Self { - Self(cell.get_str_value(CELL_DATA).unwrap_or_default()) + Self(cell.get_as::(CELL_DATA).unwrap_or_default()) } } impl From for Cell { fn from(data: SummaryCellData) -> Self { - new_cell_builder(FieldType::Summary) - .insert_str_value(CELL_DATA, data.0) - .build() + let mut cell = new_cell_builder(FieldType::Summary); + cell.insert(CELL_DATA.into(), data.0.into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/text_type_option/text_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/text_type_option/text_type_option.rs index 5cb2875de5..d32c9f0e44 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/text_type_option/text_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/text_type_option/text_type_option.rs @@ -1,6 +1,6 @@ +use collab::util::AnyMapExt; use std::cmp::Ordering; -use collab::core::any_map::AnyMapExtension; use collab_database::fields::{Field, TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::{new_cell_builder, Cell}; use serde::{Deserialize, Serialize}; @@ -33,16 +33,15 @@ impl TypeOption for RichTextTypeOption { impl From for RichTextTypeOption { fn from(data: TypeOptionData) -> Self { - let s = data.get_str_value(CELL_DATA).unwrap_or_default(); - Self { inner: s } + Self { + inner: data.get_as(CELL_DATA).unwrap_or_default(), + } } } impl From for TypeOptionData { fn from(data: RichTextTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_str_value(CELL_DATA, data.inner) - .build() + TypeOptionDataBuilder::from([(CELL_DATA.into(), data.inner.into())]) } } @@ -164,15 +163,15 @@ impl TypeOptionCellData for StringCellData { impl From<&Cell> for StringCellData { fn from(cell: &Cell) -> Self { - Self(cell.get_str_value(CELL_DATA).unwrap_or_default()) + Self(cell.get_as(CELL_DATA).unwrap_or_default()) } } impl From for Cell { fn from(data: StringCellData) -> Self { - new_cell_builder(FieldType::RichText) - .insert_str_value(CELL_DATA, data.0) - .build() + let mut cell = new_cell_builder(FieldType::RichText); + cell.insert(CELL_DATA.into(), data.0.into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time.rs index 0b7c141cb8..2125eb8f88 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time.rs @@ -29,7 +29,7 @@ impl From for TimeTypeOption { impl From for TypeOptionData { fn from(_data: TimeTypeOption) -> Self { - TypeOptionDataBuilder::new().build() + TypeOptionDataBuilder::new() } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time_entities.rs index 6084c80b5f..f07babeda0 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/time_type_option/time_entities.rs @@ -1,6 +1,6 @@ use crate::entities::FieldType; use crate::services::field::{TypeOptionCellData, CELL_DATA}; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; #[derive(Clone, Debug, Default)] @@ -16,7 +16,7 @@ impl From<&Cell> for TimeCellData { fn from(cell: &Cell) -> Self { Self( cell - .get_str_value(CELL_DATA) + .get_as::(CELL_DATA) .and_then(|data| data.parse::().ok()), ) } @@ -40,8 +40,8 @@ impl ToString for TimeCellData { impl From<&TimeCellData> for Cell { fn from(data: &TimeCellData) -> Self { - new_cell_builder(FieldType::Time) - .insert_str_value(CELL_DATA, data.to_string()) - .build() + let mut cell = new_cell_builder(FieldType::Time); + cell.insert(CELL_DATA.into(), data.to_string().into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option.rs index 17b9f54dd3..116bd1a0a4 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option.rs @@ -1,7 +1,7 @@ use std::cmp::Ordering; use chrono::{DateTime, Local, Offset}; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use flowy_error::{ErrorCode, FlowyError, FlowyResult}; @@ -44,16 +44,16 @@ impl TypeOption for TimestampTypeOption { impl From for TimestampTypeOption { fn from(data: TypeOptionData) -> Self { let date_format = data - .get_i64_value("date_format") + .get_as::("date_format") .map(DateFormat::from) .unwrap_or_default(); let time_format = data - .get_i64_value("time_format") + .get_as::("time_format") .map(TimeFormat::from) .unwrap_or_default(); - let include_time = data.get_bool_value("include_time").unwrap_or_default(); + let include_time = data.get_as::("include_time").unwrap_or_default(); let field_type = data - .get_i64_value("field_type") + .get_as::("field_type") .map(FieldType::from) .unwrap_or(FieldType::LastEditedTime); Self { @@ -67,12 +67,12 @@ impl From for TimestampTypeOption { impl From for TypeOptionData { fn from(option: TimestampTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_i64_value("date_format", option.date_format.value()) - .insert_i64_value("time_format", option.time_format.value()) - .insert_bool_value("include_time", option.include_time) - .insert_i64_value("field_type", option.field_type.value()) - .build() + TypeOptionDataBuilder::from([ + ("date_format".into(), option.date_format.value().into()), + ("time_format".into(), option.time_format.value().into()), + ("include_time".into(), option.include_time.into()), + ("field_type".into(), option.field_type.value().into()), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option_entities.rs index 307b7637b8..a1e416d688 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/timestamp_type_option/timestamp_type_option_entities.rs @@ -1,4 +1,4 @@ -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; use serde::Serialize; @@ -23,7 +23,7 @@ impl TimestampCellData { impl From<&Cell> for TimestampCellData { fn from(cell: &Cell) -> Self { let timestamp = cell - .get_str_value(CELL_DATA) + .get_as::(CELL_DATA) .and_then(|data| data.parse::().ok()); Self { timestamp } } @@ -45,11 +45,11 @@ impl From<(FieldType, TimestampCellData)> for TimestampCellDataWrapper { impl From for Cell { fn from(wrapper: TimestampCellDataWrapper) -> Self { let (field_type, data) = (wrapper.field_type, wrapper.data); - let timestamp_string = data.timestamp.unwrap_or_default(); + let timestamp_string = data.timestamp.unwrap_or_default().to_string(); - new_cell_builder(field_type) - .insert_str_value(CELL_DATA, timestamp_string) - .build() + let mut cell = new_cell_builder(field_type); + cell.insert(CELL_DATA.into(), timestamp_string.into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate.rs index 5403782387..ff84213a15 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate.rs @@ -7,16 +7,20 @@ use crate::services::field::{ TypeOptionCellDataSerde, TypeOptionTransform, }; use crate::services::sort::SortCondition; -use collab::core::any_map::AnyMapExtension; +use collab::preclude::encoding::serde::from_any; +use collab::preclude::Any; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use flowy_error::FlowyResult; +use serde::Deserialize; use std::cmp::Ordering; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Deserialize)] pub struct TranslateTypeOption { + #[serde(default)] pub auto_fill: bool, /// Use [TranslateTypeOption::language_from_type] to get the language name + #[serde(default, rename = "language")] pub language_type: i64, } @@ -48,21 +52,16 @@ impl Default for TranslateTypeOption { impl From for TranslateTypeOption { fn from(value: TypeOptionData) -> Self { - let auto_fill = value.get_bool_value("auto_fill").unwrap_or_default(); - let language = value.get_i64_value("language").unwrap_or_default(); - Self { - auto_fill, - language_type: language, - } + from_any(&Any::from(value)).unwrap() } } impl From for TypeOptionData { fn from(value: TranslateTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_bool_value("auto_fill", value.auto_fill) - .insert_i64_value("language", value.language_type) - .build() + TypeOptionDataBuilder::from([ + ("auto_fill".into(), value.auto_fill.into()), + ("language".into(), value.language_type.into()), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate_entities.rs index b52b746ab5..eefbf873da 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/translate_type_option/translate_entities.rs @@ -1,6 +1,6 @@ use crate::entities::FieldType; use crate::services::field::{TypeOptionCellData, CELL_DATA}; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; #[derive(Default, Debug, Clone)] @@ -21,15 +21,15 @@ impl TypeOptionCellData for TranslateCellData { impl From<&Cell> for TranslateCellData { fn from(cell: &Cell) -> Self { - Self(cell.get_str_value(CELL_DATA).unwrap_or_default()) + Self(cell.get_as(CELL_DATA).unwrap_or_default()) } } impl From for Cell { fn from(data: TranslateCellData) -> Self { - new_cell_builder(FieldType::Translate) - .insert_str_value(CELL_DATA, data.0) - .build() + let mut cell = new_cell_builder(FieldType::Translate); + cell.insert(CELL_DATA.into(), data.0.into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/type_option_cell.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/type_option_cell.rs index 415f694164..9e33d874be 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/type_option_cell.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/type_option_cell.rs @@ -1,5 +1,7 @@ +use collab::preclude::Any; use std::cmp::Ordering; use std::collections::hash_map::DefaultHasher; +use std::collections::HashMap; use std::hash::{Hash, Hasher}; use collab_database::fields::{Field, TypeOptionData}; @@ -96,15 +98,40 @@ impl CellDataCacheKey { pub fn new(field_rev: &Field, decoded_field_type: FieldType, cell: &Cell) -> Self { let mut hasher = DefaultHasher::new(); if let Some(type_option_data) = field_rev.get_any_type_option(decoded_field_type) { - type_option_data.hash(&mut hasher); + map_hash(&type_option_data, &mut hasher); } hasher.write(field_rev.id.as_bytes()); hasher.write_u8(decoded_field_type as u8); - cell.hash(&mut hasher); + map_hash(cell, &mut hasher); Self(hasher.finish()) } } +fn any_hash(any: &Any, hasher: &mut H) { + //FIXME: this is very bad idea for hash calculation + match any { + Any::Null | Any::Undefined => hasher.write_u8(0), + Any::Bool(v) => v.hash(hasher), + Any::Number(v) => v.to_be_bytes().hash(hasher), + Any::BigInt(v) => v.hash(hasher), + Any::String(v) => v.hash(hasher), + Any::Buffer(v) => v.hash(hasher), + Any::Array(v) => { + for v in v.iter() { + any_hash(v, hasher); + } + }, + Any::Map(v) => map_hash(v, hasher), + } +} + +fn map_hash(map: &HashMap, hasher: &mut H) { + for (k, v) in map.iter() { + k.hash(hasher); + any_hash(v, hasher); + } +} + impl AsRef for CellDataCacheKey { fn as_ref(&self) -> &u64 { &self.0 @@ -159,9 +186,10 @@ where fn get_cell_data_from_cache(&self, cell: &Cell, field: &Field) -> Option { let key = self.get_cell_data_cache_key(cell, field); - let cell_data_cache = self.cell_data_cache.as_ref()?.read(); + let cell_data_cache = self.cell_data_cache.as_ref()?; - cell_data_cache.get(key.as_ref()).cloned() + let cell = cell_data_cache.get::(key.as_ref())?; + Some(cell.value().clone()) } fn set_cell_data_in_cache(&self, cell: &Cell, cell_data: T::CellData, field: &Field) { @@ -174,7 +202,7 @@ where cell, cell_data ); - cell_data_cache.write().insert(key.as_ref(), cell_data); + cell_data_cache.insert(key.as_ref(), cell_data); } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option.rs index 3a95c6bae0..f167284014 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option.rs @@ -1,6 +1,7 @@ +use collab::preclude::encoding::serde::from_any; +use collab::preclude::Any; use std::cmp::Ordering; -use collab::core::any_map::AnyMapExtension; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::rows::Cell; use flowy_error::FlowyResult; @@ -16,7 +17,9 @@ use crate::services::sort::SortCondition; #[derive(Debug, Clone, Serialize, Deserialize, Default)] pub struct URLTypeOption { + #[serde(default)] pub url: String, + #[serde(default)] pub content: String, } @@ -29,18 +32,16 @@ impl TypeOption for URLTypeOption { impl From for URLTypeOption { fn from(data: TypeOptionData) -> Self { - let url = data.get_str_value("url").unwrap_or_default(); - let content = data.get_str_value("content").unwrap_or_default(); - Self { url, content } + from_any(&Any::from(data)).unwrap() } } impl From for TypeOptionData { fn from(data: URLTypeOption) -> Self { - TypeOptionDataBuilder::new() - .insert_str_value("url", data.url) - .insert_str_value("content", data.content) - .build() + TypeOptionDataBuilder::from([ + ("url".into(), data.url.into()), + ("content".into(), data.content.into()), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option_entities.rs b/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option_entities.rs index 2b286e0604..dd351fcc2a 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option_entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field/type_options/url_type_option/url_type_option_entities.rs @@ -1,5 +1,5 @@ use bytes::Bytes; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{new_cell_builder, Cell}; use serde::{Deserialize, Serialize}; @@ -34,16 +34,17 @@ impl TypeOptionCellData for URLCellData { impl From<&Cell> for URLCellData { fn from(cell: &Cell) -> Self { - let data = cell.get_str_value(CELL_DATA).unwrap_or_default(); - Self { data } + Self { + data: cell.get_as(CELL_DATA).unwrap_or_default(), + } } } impl From for Cell { fn from(data: URLCellData) -> Self { - new_cell_builder(FieldType::URL) - .insert_str_value(CELL_DATA, data.data) - .build() + let mut cell = new_cell_builder(FieldType::URL); + cell.insert(CELL_DATA.into(), data.data.into()); + cell } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field_settings/entities.rs b/frontend/rust-lib/flowy-database2/src/services/field_settings/entities.rs index 9f9e82311f..1fb7cde207 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field_settings/entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field_settings/entities.rs @@ -1,4 +1,4 @@ -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::views::{DatabaseLayout, FieldSettingsMap, FieldSettingsMapBuilder}; use crate::entities::FieldVisibility; @@ -25,16 +25,11 @@ impl FieldSettings { field_settings: &FieldSettingsMap, ) -> Self { let visibility = field_settings - .get_i64_value(VISIBILITY) + .get_as::(VISIBILITY) .map(Into::into) .unwrap_or_else(|| default_field_visibility(layout_type)); - let width = field_settings - .get_i64_value(WIDTH) - .map(|value| value as i32) - .unwrap_or(DEFAULT_WIDTH); - let wrap_cell_content = field_settings - .get_bool_value(WRAP_CELL_CONTENT) - .unwrap_or(true); + let width = field_settings.get_as::(WIDTH).unwrap_or(DEFAULT_WIDTH); + let wrap_cell_content: bool = field_settings.get_as(WRAP_CELL_CONTENT).unwrap_or(true); Self { field_id: field_id.to_string(), @@ -47,10 +42,16 @@ impl FieldSettings { impl From for FieldSettingsMap { fn from(field_settings: FieldSettings) -> Self { - FieldSettingsMapBuilder::new() - .insert_i64_value(VISIBILITY, field_settings.visibility.into()) - .insert_i64_value(WIDTH, field_settings.width as i64) - .insert_bool_value(WRAP_CELL_CONTENT, field_settings.wrap_cell_content) - .build() + FieldSettingsMapBuilder::from([ + ( + VISIBILITY.into(), + i64::from(field_settings.visibility).into(), + ), + (WIDTH.into(), field_settings.width.into()), + ( + WRAP_CELL_CONTENT.into(), + field_settings.wrap_cell_content.into(), + ), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/field_settings/field_settings_builder.rs b/frontend/rust-lib/flowy-database2/src/services/field_settings/field_settings_builder.rs index 7602224acd..7f9ed6b2c0 100644 --- a/frontend/rust-lib/flowy-database2/src/services/field_settings/field_settings_builder.rs +++ b/frontend/rust-lib/flowy-database2/src/services/field_settings/field_settings_builder.rs @@ -86,9 +86,8 @@ pub fn default_field_settings_by_layout_map() -> HashMap Option; - fn get_fields(&self, view_id: &str, field_ids: Option>) -> Fut>; - fn get_rows(&self, view_id: &str) -> Fut>>; - fn get_row(&self, view_id: &str, rows_id: &RowId) -> Fut)>>; - fn get_all_filters(&self, view_id: &str) -> Vec; - fn save_filters(&self, view_id: &str, filters: &[Filter]); + async fn get_field(&self, field_id: &str) -> Option; + async fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec; + async fn get_rows(&self, view_id: &str) -> Vec>; + async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc)>; + async fn get_all_filters(&self, view_id: &str) -> Vec; + async fn save_filters(&self, view_id: &str, filters: &[Filter]); } pub trait PreFillCellsWithFilter { @@ -72,7 +73,7 @@ impl FilterController { let mut need_save = false; - let mut filters = delegate.get_all_filters(view_id); + let mut filters = delegate.get_all_filters(view_id).await; let mut filtering_field_ids: HashMap> = HashMap::new(); for filter in filters.iter() { @@ -93,7 +94,7 @@ impl FilterController { } if need_save { - delegate.save_filters(view_id, &filters); + delegate.save_filters(view_id, &filters).await; } Self { @@ -231,7 +232,7 @@ impl FilterController { }, } - self.delegate.save_filters(&self.view_id, &filters); + self.delegate.save_filters(&self.view_id, &filters).await; self .gen_task(FilterEvent::FilterDidChanged, QualityOfService::Background) diff --git a/frontend/rust-lib/flowy-database2/src/services/filter/entities.rs b/frontend/rust-lib/flowy-database2/src/services/filter/entities.rs index 718d062fbb..adb5ef8c5b 100644 --- a/frontend/rust-lib/flowy-database2/src/services/filter/entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/filter/entities.rs @@ -1,8 +1,10 @@ use std::collections::HashMap; use std::mem; +use std::ops::Deref; use anyhow::bail; -use collab::core::any_map::AnyMapExtension; +use collab::preclude::Any; +use collab::util::AnyMapExt; use collab_database::database::gen_database_filter_id; use collab_database::rows::RowId; use collab_database::views::{FilterMap, FilterMapBuilder}; @@ -316,13 +318,20 @@ const FILTER_DATA_INDEX: i64 = 2; impl<'a> From<&'a Filter> for FilterMap { fn from(filter: &'a Filter) -> Self { - let mut builder = FilterMapBuilder::new() - .insert_str_value(FILTER_ID, &filter.id) - .insert_i64_value(FILTER_TYPE, filter.inner.get_int_repr()); + let mut builder = FilterMapBuilder::from([ + (FILTER_ID.into(), filter.id.as_str().into()), + (FILTER_TYPE.into(), filter.inner.get_int_repr().into()), + ]); builder = match &filter.inner { FilterInner::And { children } | FilterInner::Or { children } => { - builder.insert_maps(FILTER_CHILDREN, children.iter().collect::>()) + let mut vec = Vec::with_capacity(children.len()); + for child in children.iter() { + let any: Any = FilterMap::from(child).into(); + vec.push(any); + } + builder.insert(FILTER_CHILDREN.into(), Any::from(vec)); + builder }, FilterInner::Data { field_id, @@ -387,15 +396,15 @@ impl<'a> From<&'a Filter> for FilterMap { Default::default() }); + builder.insert(FIELD_ID.into(), field_id.as_str().into()); + builder.insert(FIELD_TYPE.into(), i64::from(field_type).into()); + builder.insert(FILTER_CONDITION.into(), (condition as i64).into()); + builder.insert(FILTER_CONTENT.into(), content.into()); builder - .insert_str_value(FIELD_ID, field_id) - .insert_i64_value(FIELD_TYPE, field_type.into()) - .insert_i64_value(FILTER_CONDITION, condition as i64) - .insert_str_value(FILTER_CONTENT, content) }, }; - builder.build() + builder } } @@ -403,32 +412,30 @@ impl TryFrom for Filter { type Error = anyhow::Error; fn try_from(filter_map: FilterMap) -> Result { - let filter_id = filter_map - .get_str_value(FILTER_ID) + let filter_id: String = filter_map + .get_as(FILTER_ID) .ok_or_else(|| anyhow::anyhow!("invalid filter data"))?; - let filter_type = filter_map - .get_i64_value(FILTER_TYPE) - .unwrap_or(FILTER_DATA_INDEX); + let filter_type: i64 = filter_map.get_as(FILTER_TYPE).unwrap_or(FILTER_DATA_INDEX); let filter = Filter { id: filter_id, inner: match filter_type { FILTER_AND_INDEX => FilterInner::And { - children: filter_map.try_get_array(FILTER_CHILDREN), + children: get_children(filter_map), }, FILTER_OR_INDEX => FilterInner::Or { - children: filter_map.try_get_array(FILTER_CHILDREN), + children: get_children(filter_map), }, FILTER_DATA_INDEX => { - let field_id = filter_map - .get_str_value(FIELD_ID) + let field_id: String = filter_map + .get_as(FIELD_ID) .ok_or_else(|| anyhow::anyhow!("invalid filter data"))?; let field_type = filter_map - .get_i64_value(FIELD_TYPE) + .get_as::(FIELD_TYPE) .map(FieldType::from) .unwrap_or_default(); - let condition = filter_map.get_i64_value(FILTER_CONDITION).unwrap_or(0); - let content = filter_map.get_str_value(FILTER_CONTENT).unwrap_or_default(); + let condition: i64 = filter_map.get_as(FILTER_CONDITION).unwrap_or_default(); + let content: String = filter_map.get_as(FILTER_CONTENT).unwrap_or_default(); FilterInner::new_data(field_id, field_type, condition, content) }, @@ -440,6 +447,22 @@ impl TryFrom for Filter { } } +fn get_children(filter_map: FilterMap) -> Vec { + //TODO: this method wouldn't be necessary if we could make Filters serializable in backward + // compatible way + let mut result = Vec::new(); + if let Some(Any::Array(children)) = filter_map.get(FILTER_CHILDREN) { + for child in children.iter() { + if let Any::Map(child_map) = child { + if let Ok(filter) = Filter::try_from(child_map.deref().clone()) { + result.push(filter); + } + } + } + } + result +} + #[derive(Debug)] pub enum FilterChangeset { Insert { diff --git a/frontend/rust-lib/flowy-database2/src/services/group/action.rs b/frontend/rust-lib/flowy-database2/src/services/group/action.rs index b540fb5fa3..cf4b8ae5eb 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/action.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/action.rs @@ -1,3 +1,4 @@ +use async_trait::async_trait; use collab_database::fields::{Field, TypeOptionData}; use collab_database::rows::{Cell, Cells, Row, RowDetail, RowId}; @@ -10,7 +11,7 @@ use crate::services::group::{GroupChangeset, GroupData, MoveGroupRowContext}; /// [GroupCustomize] is implemented by parameterized `BaseGroupController`s to provide different /// behaviors. This allows the BaseGroupController to call these actions indescriminantly using /// polymorphism. -/// +#[async_trait] pub trait GroupCustomize: Send + Sync { type GroupTypeOption: TypeOption; /// Returns the a value of the cell if the cell data is not exist. @@ -67,14 +68,14 @@ pub trait GroupCustomize: Send + Sync { None } - fn create_group( + async fn create_group( &mut self, _name: String, ) -> FlowyResult<(Option, Option)> { Ok((None, None)) } - fn delete_group(&mut self, group_id: &str) -> FlowyResult>; + async fn delete_group(&mut self, group_id: &str) -> FlowyResult>; fn update_type_option_when_update_group( &mut self, @@ -95,7 +96,7 @@ pub trait GroupCustomize: Send + Sync { /// or a `DefaultGroupController` may be the actual object that provides the functionality of /// this trait. For example, a `Single-Select` group controller will be a `BaseGroupController`, /// while a `URL` group controller will be a `DefaultGroupController`. -/// +#[async_trait] pub trait GroupController: Send + Sync { /// Returns the id of field that is being used to group the rows fn get_grouping_field_id(&self) -> &str; @@ -119,7 +120,7 @@ pub trait GroupController: Send + Sync { /// Returns a new type option data for the grouping field if it's altered. /// /// * `name`: name of the new group - fn create_group( + async fn create_group( &mut self, name: String, ) -> FlowyResult<(Option, Option)>; @@ -179,7 +180,10 @@ pub trait GroupController: Send + Sync { /// successful. /// /// * `group_id`: the id of the group to be deleted - fn delete_group(&mut self, group_id: &str) -> FlowyResult<(Vec, Option)>; + async fn delete_group( + &mut self, + group_id: &str, + ) -> FlowyResult<(Vec, Option)>; /// Updates the name and/or visibility of groups. /// @@ -187,7 +191,7 @@ pub trait GroupController: Send + Sync { /// in the field type option data. /// /// * `changesets`: list of changesets to be made to one or more groups - fn apply_group_changeset( + async fn apply_group_changeset( &mut self, changesets: &[GroupChangeset], ) -> FlowyResult<(Vec, Option)>; diff --git a/frontend/rust-lib/flowy-database2/src/services/group/configuration.rs b/frontend/rust-lib/flowy-database2/src/services/group/configuration.rs index 980fee21b2..ba949de745 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/configuration.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/configuration.rs @@ -1,3 +1,4 @@ +use async_trait::async_trait; use std::fmt::Formatter; use std::marker::PhantomData; use std::sync::Arc; @@ -10,7 +11,6 @@ use tracing::event; use flowy_error::{FlowyError, FlowyResult}; use lib_dispatch::prelude::af_spawn; -use lib_infra::future::Fut; use crate::entities::{GroupChangesPB, GroupPB, InsertedGroupPB}; use crate::services::field::RowSingleCellData; @@ -18,12 +18,14 @@ use crate::services::group::{ default_group_setting, GeneratedGroups, Group, GroupChangeset, GroupData, GroupSetting, }; +#[async_trait] pub trait GroupContextDelegate: Send + Sync + 'static { - fn get_group_setting(&self, view_id: &str) -> Fut>>; + async fn get_group_setting(&self, view_id: &str) -> Option>; - fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Fut>; + async fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Vec; - fn save_configuration(&self, view_id: &str, group_setting: GroupSetting) -> Fut>; + async fn save_configuration(&self, view_id: &str, group_setting: GroupSetting) + -> FlowyResult<()>; } impl std::fmt::Display for GroupControllerContext { diff --git a/frontend/rust-lib/flowy-database2/src/services/group/controller.rs b/frontend/rust-lib/flowy-database2/src/services/group/controller.rs index a918e7f7c2..bad911bcde 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/controller.rs @@ -1,10 +1,10 @@ +use async_trait::async_trait; use std::marker::PhantomData; use std::sync::Arc; use collab_database::fields::{Field, TypeOptionData}; use collab_database::rows::{Cells, Row, RowDetail, RowId}; use futures::executor::block_on; -use lib_infra::future::Fut; use serde::de::DeserializeOwned; use serde::Serialize; @@ -23,10 +23,11 @@ use crate::services::group::configuration::GroupControllerContext; use crate::services::group::entities::GroupData; use crate::services::group::{GroupChangeset, GroupsBuilder, MoveGroupRowContext}; +#[async_trait] pub trait GroupControllerDelegate: Send + Sync + 'static { - fn get_field(&self, field_id: &str) -> Option; + async fn get_field(&self, field_id: &str) -> Option; - fn get_all_rows(&self, view_id: &str) -> Fut>>; + async fn get_all_rows(&self, view_id: &str) -> Vec>; } /// [BaseGroupController] is a generic group controller that provides customized implementations @@ -75,10 +76,11 @@ where }) } - pub fn get_grouping_field_type_option(&self) -> Option { + pub async fn get_grouping_field_type_option(&self) -> Option { self .delegate .get_field(&self.grouping_field_id) + .await .and_then(|field| field.get_type_option::(FieldType::from(field.field_type))) } @@ -154,6 +156,7 @@ where } } +#[async_trait] impl GroupController for BaseGroupController where P: CellProtobufBlobParser::CellProtobufType>, @@ -215,11 +218,11 @@ where Ok(()) } - fn create_group( + async fn create_group( &mut self, name: String, ) -> FlowyResult<(Option, Option)> { - ::create_group(self, name) + ::create_group(self, name).await } fn move_group(&mut self, from_group_id: &str, to_group_id: &str) -> FlowyResult<()> { @@ -373,7 +376,10 @@ where Ok(None) } - fn delete_group(&mut self, group_id: &str) -> FlowyResult<(Vec, Option)> { + async fn delete_group( + &mut self, + group_id: &str, + ) -> FlowyResult<(Vec, Option)> { let group = if group_id != self.get_grouping_field_id() { self.get_group(group_id) } else { @@ -387,14 +393,14 @@ where .iter() .map(|row| row.row.id.clone()) .collect(); - let type_option_data = ::delete_group(self, group_id)?; + let type_option_data = ::delete_group(self, group_id).await?; Ok((row_ids, type_option_data)) }, None => Ok((vec![], None)), } } - fn apply_group_changeset( + async fn apply_group_changeset( &mut self, changeset: &[GroupChangeset], ) -> FlowyResult<(Vec, Option)> { @@ -404,7 +410,7 @@ where } // update group name - let type_option = self.get_grouping_field_type_option().ok_or_else(|| { + let type_option = self.get_grouping_field_type_option().await.ok_or_else(|| { FlowyError::internal().with_context("Failed to get grouping field type option") })?; diff --git a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/checkbox_controller.rs b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/checkbox_controller.rs index a3057b24a0..62896e6a29 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/checkbox_controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/checkbox_controller.rs @@ -25,14 +25,14 @@ pub type CheckboxGroupController = BaseGroupController; pub type CheckboxGroupControllerContext = GroupControllerContext; + +#[async_trait] impl GroupCustomize for CheckboxGroupController { type GroupTypeOption = CheckboxTypeOption; fn placeholder_cell(&self) -> Option { - Some( - new_cell_builder(FieldType::Checkbox) - .insert_str_value("data", UNCHECK) - .build(), - ) + let mut cell = new_cell_builder(FieldType::Checkbox); + cell.insert("data".into(), UNCHECK.into()); + Some(cell) } fn can_group( @@ -129,7 +129,7 @@ impl GroupCustomize for CheckboxGroupController { group_changeset } - fn delete_group(&mut self, _group_id: &str) -> FlowyResult> { + async fn delete_group(&mut self, _group_id: &str) -> FlowyResult> { Ok(None) } diff --git a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/date_controller.rs b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/date_controller.rs index 1402793264..9644f918a3 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/date_controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/date_controller.rs @@ -53,15 +53,14 @@ pub type DateGroupController = pub type DateGroupControllerContext = GroupControllerContext; +#[async_trait] impl GroupCustomize for DateGroupController { type GroupTypeOption = DateTypeOption; fn placeholder_cell(&self) -> Option { - Some( - new_cell_builder(FieldType::DateTime) - .insert_str_value("data", "") - .build(), - ) + let mut cell = new_cell_builder(FieldType::DateTime); + cell.insert("data".into(), "".into()); + Some(cell) } fn can_group( @@ -214,7 +213,7 @@ impl GroupCustomize for DateGroupController { deleted_group } - fn delete_group(&mut self, group_id: &str) -> FlowyResult> { + async fn delete_group(&mut self, group_id: &str) -> FlowyResult> { self.context.delete_group(group_id)?; Ok(None) } diff --git a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/default_controller.rs b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/default_controller.rs index bcfd48bc09..a652e7e24c 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/default_controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/default_controller.rs @@ -1,3 +1,4 @@ +use async_trait::async_trait; use std::sync::Arc; use collab_database::fields::{Field, TypeOptionData}; @@ -38,6 +39,7 @@ impl DefaultGroupController { } } +#[async_trait] impl GroupController for DefaultGroupController { fn get_grouping_field_id(&self) -> &str { &self.field_id @@ -58,7 +60,7 @@ impl GroupController for DefaultGroupController { Ok(()) } - fn create_group( + async fn create_group( &mut self, _name: String, ) -> FlowyResult<(Option, Option)> { @@ -125,11 +127,14 @@ impl GroupController for DefaultGroupController { Ok(None) } - fn delete_group(&mut self, _group_id: &str) -> FlowyResult<(Vec, Option)> { + async fn delete_group( + &mut self, + _group_id: &str, + ) -> FlowyResult<(Vec, Option)> { Ok((vec![], None)) } - fn apply_group_changeset( + async fn apply_group_changeset( &mut self, _changeset: &[GroupChangeset], ) -> FlowyResult<(Vec, Option)> { diff --git a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/multi_select_controller.rs b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/multi_select_controller.rs index cae19109f6..752679ae50 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/multi_select_controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/multi_select_controller.rs @@ -31,6 +31,7 @@ pub type MultiSelectGroupController = BaseGroupController< SelectOptionCellDataParser, >; +#[async_trait] impl GroupCustomize for MultiSelectGroupController { type GroupTypeOption = MultiSelectTypeOption; @@ -43,11 +44,9 @@ impl GroupCustomize for MultiSelectGroupController { } fn placeholder_cell(&self) -> Option { - Some( - new_cell_builder(FieldType::MultiSelect) - .insert_str_value("data", "") - .build(), - ) + let mut cell = new_cell_builder(FieldType::MultiSelect); + cell.insert("data".into(), "".into()); + Some(cell) } fn add_or_remove_row_when_cell_changed( @@ -88,11 +87,11 @@ impl GroupCustomize for MultiSelectGroupController { group_changeset } - fn create_group( + async fn create_group( &mut self, name: String, ) -> FlowyResult<(Option, Option)> { - let mut new_type_option = self.get_grouping_field_type_option().ok_or_else(|| { + let mut new_type_option = self.get_grouping_field_type_option().await.ok_or_else(|| { FlowyError::internal().with_context("Failed to get grouping field type option") })?; let new_select_option = new_type_option.create_option(&name); @@ -104,8 +103,8 @@ impl GroupCustomize for MultiSelectGroupController { Ok((Some(new_type_option.into()), Some(inserted_group_pb))) } - fn delete_group(&mut self, group_id: &str) -> FlowyResult> { - let mut new_type_option = self.get_grouping_field_type_option().ok_or_else(|| { + async fn delete_group(&mut self, group_id: &str) -> FlowyResult> { + let mut new_type_option = self.get_grouping_field_type_option().await.ok_or_else(|| { FlowyError::internal().with_context("Failed to get grouping field type option") })?; if let Some(option_index) = new_type_option diff --git a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/single_select_controller.rs b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/single_select_controller.rs index d26ef50b70..b73606cbc4 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/single_select_controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/select_option_controller/single_select_controller.rs @@ -33,6 +33,7 @@ pub type SingleSelectGroupController = BaseGroupController< SelectOptionCellDataParser, >; +#[async_trait] impl GroupCustomize for SingleSelectGroupController { type GroupTypeOption = SingleSelectTypeOption; @@ -45,11 +46,9 @@ impl GroupCustomize for SingleSelectGroupController { } fn placeholder_cell(&self) -> Option { - Some( - new_cell_builder(FieldType::SingleSelect) - .insert_str_value("data", "") - .build(), - ) + let mut cell = new_cell_builder(FieldType::SingleSelect); + cell.insert("data".into(), "".into()); + Some(cell) } fn add_or_remove_row_when_cell_changed( @@ -90,11 +89,11 @@ impl GroupCustomize for SingleSelectGroupController { group_changeset } - fn create_group( + async fn create_group( &mut self, name: String, ) -> FlowyResult<(Option, Option)> { - let mut new_type_option = self.get_grouping_field_type_option().ok_or_else(|| { + let mut new_type_option = self.get_grouping_field_type_option().await.ok_or_else(|| { FlowyError::internal().with_context("Failed to get grouping field type option") })?; let new_select_option = new_type_option.create_option(&name); @@ -106,8 +105,8 @@ impl GroupCustomize for SingleSelectGroupController { Ok((Some(new_type_option.into()), Some(inserted_group_pb))) } - fn delete_group(&mut self, group_id: &str) -> FlowyResult> { - let mut new_type_option = self.get_grouping_field_type_option().ok_or_else(|| { + async fn delete_group(&mut self, group_id: &str) -> FlowyResult> { + let mut new_type_option = self.get_grouping_field_type_option().await.ok_or_else(|| { FlowyError::internal().with_context("Failed to get grouping field type option") })?; if let Some(option_index) = new_type_option diff --git a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/url_controller.rs b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/url_controller.rs index 9d9a0468cb..de8e3f26ce 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/url_controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/controller_impls/url_controller.rs @@ -27,15 +27,14 @@ pub type URLGroupController = pub type URLGroupControllerContext = GroupControllerContext; +#[async_trait] impl GroupCustomize for URLGroupController { type GroupTypeOption = URLTypeOption; fn placeholder_cell(&self) -> Option { - Some( - new_cell_builder(FieldType::URL) - .insert_str_value("data", "") - .build(), - ) + let mut cell = new_cell_builder(FieldType::URL); + cell.insert("data".into(), "".into()); + Some(cell) } fn can_group( @@ -174,7 +173,7 @@ impl GroupCustomize for URLGroupController { deleted_group } - fn delete_group(&mut self, group_id: &str) -> FlowyResult> { + async fn delete_group(&mut self, group_id: &str) -> FlowyResult> { self.context.delete_group(group_id)?; Ok(None) } diff --git a/frontend/rust-lib/flowy-database2/src/services/group/entities.rs b/frontend/rust-lib/flowy-database2/src/services/group/entities.rs index 12692fd812..cfb5de588e 100644 --- a/frontend/rust-lib/flowy-database2/src/services/group/entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/group/entities.rs @@ -1,16 +1,20 @@ -use anyhow::bail; -use collab::core::any_map::AnyMapExtension; +use collab::preclude::encoding::serde::{from_any, to_any}; +use collab::preclude::Any; use collab_database::database::gen_database_group_id; use collab_database::rows::{RowDetail, RowId}; use collab_database::views::{GroupMap, GroupMapBuilder, GroupSettingBuilder, GroupSettingMap}; use serde::{Deserialize, Serialize}; +use std::sync::Arc; -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Deserialize)] pub struct GroupSetting { pub id: String, pub field_id: String, + #[serde(rename = "ty")] pub field_type: i64, + #[serde(default)] pub groups: Vec, + #[serde(default)] pub content: String, } @@ -44,38 +48,20 @@ impl TryFrom for GroupSetting { type Error = anyhow::Error; fn try_from(value: GroupSettingMap) -> Result { - match ( - value.get_str_value(GROUP_ID), - value.get_str_value(FIELD_ID), - value.get_i64_value(FIELD_TYPE), - ) { - (Some(id), Some(field_id), Some(field_type)) => { - let content = value.get_str_value(CONTENT).unwrap_or_default(); - let groups = value.try_get_array(GROUPS); - Ok(Self { - id, - field_id, - field_type, - groups, - content, - }) - }, - _ => { - bail!("Invalid group setting data") - }, - } + from_any(&Any::from(value)).map_err(|e| e.into()) } } impl From for GroupSettingMap { fn from(setting: GroupSetting) -> Self { - GroupSettingBuilder::new() - .insert_str_value(GROUP_ID, setting.id) - .insert_str_value(FIELD_ID, setting.field_id) - .insert_i64_value(FIELD_TYPE, setting.field_type) - .insert_maps(GROUPS, setting.groups) - .insert_str_value(CONTENT, setting.content) - .build() + let groups = to_any(&setting.groups).unwrap_or_else(|_| Any::Array(Arc::from([]))); + GroupSettingBuilder::from([ + (GROUP_ID.into(), setting.id.into()), + (FIELD_ID.into(), setting.field_id.into()), + (FIELD_TYPE.into(), setting.field_type.into()), + (GROUPS.into(), groups), + (CONTENT.into(), setting.content.into()), + ]) } } @@ -90,22 +76,16 @@ impl TryFrom for Group { type Error = anyhow::Error; fn try_from(value: GroupMap) -> Result { - match value.get_str_value("id") { - None => bail!("Invalid group data"), - Some(id) => { - let visible = value.get_bool_value("visible").unwrap_or_default(); - Ok(Self { id, visible }) - }, - } + from_any(&Any::from(value)).map_err(|e| e.into()) } } impl From for GroupMap { fn from(group: Group) -> Self { - GroupMapBuilder::new() - .insert_str_value("id", group.id) - .insert_bool_value("visible", group.visible) - .build() + GroupMapBuilder::from([ + ("id".into(), group.id.into()), + ("visible".into(), group.visible.into()), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/setting/entities.rs b/frontend/rust-lib/flowy-database2/src/services/setting/entities.rs index 7cfe093725..5a71b58127 100644 --- a/frontend/rust-lib/flowy-database2/src/services/setting/entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/setting/entities.rs @@ -1,50 +1,41 @@ -use collab::core::any_map::AnyMapExtension; +use collab::preclude::encoding::serde::from_any; +use collab::preclude::Any; use collab_database::views::{LayoutSetting, LayoutSettingBuilder}; use serde::{Deserialize, Serialize}; use serde_repr::*; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CalendarLayoutSetting { + #[serde(default)] pub layout_ty: CalendarLayout, + #[serde(default)] pub first_day_of_week: i32, + #[serde(default)] pub show_weekends: bool, + #[serde(default)] pub show_week_numbers: bool, + #[serde(default)] pub field_id: String, } impl From for CalendarLayoutSetting { fn from(setting: LayoutSetting) -> Self { - let layout_ty = setting - .get_i64_value("layout_ty") - .map(CalendarLayout::from) - .unwrap_or_default(); - let first_day_of_week = setting - .get_i64_value("first_day_of_week") - .unwrap_or(DEFAULT_FIRST_DAY_OF_WEEK as i64) as i32; - let show_weekends = setting.get_bool_value("show_weekends").unwrap_or_default(); - let show_week_numbers = setting - .get_bool_value("show_week_numbers") - .unwrap_or_default(); - let field_id = setting.get_str_value("field_id").unwrap_or_default(); - Self { - layout_ty, - first_day_of_week, - show_weekends, - show_week_numbers, - field_id, - } + from_any(&Any::from(setting)).unwrap() } } impl From for LayoutSetting { fn from(setting: CalendarLayoutSetting) -> Self { - LayoutSettingBuilder::new() - .insert_i64_value("layout_ty", setting.layout_ty.value()) - .insert_i64_value("first_day_of_week", setting.first_day_of_week as i64) - .insert_bool_value("show_week_numbers", setting.show_week_numbers) - .insert_bool_value("show_weekends", setting.show_weekends) - .insert_str_value("field_id", setting.field_id) - .build() + LayoutSettingBuilder::from([ + ("layout_ty".into(), setting.layout_ty.value().into()), + ( + "first_day_of_week".into(), + (setting.first_day_of_week as i64).into(), + ), + ("show_week_numbers".into(), setting.show_week_numbers.into()), + ("show_weekends".into(), setting.show_weekends.into()), + ("field_id".into(), setting.field_id.into()), + ]) } } @@ -90,9 +81,11 @@ pub const DEFAULT_FIRST_DAY_OF_WEEK: i32 = 0; pub const DEFAULT_SHOW_WEEKENDS: bool = true; pub const DEFAULT_SHOW_WEEK_NUMBERS: bool = true; -#[derive(Debug, Clone, Default)] +#[derive(Debug, Clone, Default, Deserialize)] pub struct BoardLayoutSetting { + #[serde(default)] pub hide_ungrouped_column: bool, + #[serde(default)] pub collapse_hidden_groups: bool, } @@ -104,22 +97,21 @@ impl BoardLayoutSetting { impl From for BoardLayoutSetting { fn from(setting: LayoutSetting) -> Self { - Self { - hide_ungrouped_column: setting - .get_bool_value("hide_ungrouped_column") - .unwrap_or_default(), - collapse_hidden_groups: setting - .get_bool_value("collapse_hidden_groups") - .unwrap_or_default(), - } + from_any(&Any::from(setting)).unwrap() } } impl From for LayoutSetting { fn from(setting: BoardLayoutSetting) -> Self { - LayoutSettingBuilder::new() - .insert_bool_value("hide_ungrouped_column", setting.hide_ungrouped_column) - .insert_bool_value("collapse_hidden_groups", setting.collapse_hidden_groups) - .build() + LayoutSettingBuilder::from([ + ( + "hide_ungrouped_column".into(), + setting.hide_ungrouped_column.into(), + ), + ( + "collapse_hidden_groups".into(), + setting.collapse_hidden_groups.into(), + ), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/services/share/csv/export.rs b/frontend/rust-lib/flowy-database2/src/services/share/csv/export.rs index 8cb59a1872..3a3a63249b 100644 --- a/frontend/rust-lib/flowy-database2/src/services/share/csv/export.rs +++ b/frontend/rust-lib/flowy-database2/src/services/share/csv/export.rs @@ -21,7 +21,11 @@ pub enum CSVFormat { pub struct CSVExport; impl CSVExport { - pub fn export_database(&self, database: &Database, style: CSVFormat) -> FlowyResult { + pub async fn export_database( + &self, + database: &Database, + style: CSVFormat, + ) -> FlowyResult { let mut wtr = csv::Writer::from_writer(vec![]); let inline_view_id = database.get_inline_view_id(); let fields = database.get_fields_in_view(&inline_view_id, None); @@ -43,7 +47,7 @@ impl CSVExport { fields.into_iter().for_each(|field| { field_by_field_id.insert(field.id.clone(), field); }); - let rows = database.get_rows_for_view(&inline_view_id); + let rows = database.get_rows_for_view(&inline_view_id).await; let stringify = |cell: &Cell, field: &Field, style: CSVFormat| match style { CSVFormat::Original => stringify_cell(cell, field), diff --git a/frontend/rust-lib/flowy-database2/src/services/share/csv/import.rs b/frontend/rust-lib/flowy-database2/src/services/share/csv/import.rs index 531401ea87..8cf6aa4e23 100644 --- a/frontend/rust-lib/flowy-database2/src/services/share/csv/import.rs +++ b/frontend/rust-lib/flowy-database2/src/services/share/csv/import.rs @@ -109,17 +109,18 @@ fn database_from_fields_and_rows( let field_type = FieldType::from(field.field_type); // Make the cell based on the style. - let cell = match format { - CSVFormat::Original => new_cell_builder(field_type) - .insert_str_value(CELL_DATA, cell_content.to_string()) - .build(), - CSVFormat::META => match serde_json::from_str::(cell_content) { - Ok(cell) => cell, - Err(_) => new_cell_builder(field_type) - .insert_str_value(CELL_DATA, "".to_string()) - .build(), + let mut cell = new_cell_builder(field_type); + match format { + CSVFormat::Original => { + cell.insert(CELL_DATA.into(), cell_content.as_str().into()); }, - }; + CSVFormat::META => match serde_json::from_str::(cell_content) { + Ok(cell_json) => cell = cell_json, + Err(_) => { + cell.insert(CELL_DATA.into(), "".into()); + }, + }, + } params.cells.insert(field.id.clone(), cell); } } diff --git a/frontend/rust-lib/flowy-database2/src/services/sort/controller.rs b/frontend/rust-lib/flowy-database2/src/services/sort/controller.rs index 330f46f7f7..ebdb715e32 100644 --- a/frontend/rust-lib/flowy-database2/src/services/sort/controller.rs +++ b/frontend/rust-lib/flowy-database2/src/services/sort/controller.rs @@ -1,3 +1,4 @@ +use async_trait::async_trait; use std::cmp::Ordering; use std::collections::HashMap; use std::str::FromStr; @@ -10,7 +11,6 @@ use serde::{Deserialize, Serialize}; use tokio::sync::RwLock; use flowy_error::FlowyResult; -use lib_infra::future::Fut; use lib_infra::priority_task::{QualityOfService, Task, TaskContent, TaskDispatcher}; use crate::entities::SortChangesetNotificationPB; @@ -24,13 +24,14 @@ use crate::services::sort::{ InsertRowResult, ReorderAllRowsResult, ReorderSingleRowResult, Sort, SortChangeset, SortCondition, }; +#[async_trait] pub trait SortDelegate: Send + Sync { - fn get_sort(&self, view_id: &str, sort_id: &str) -> Fut>>; + async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option>; /// Returns all the rows after applying grid's filter - fn get_rows(&self, view_id: &str) -> Fut>>; - fn filter_row(&self, row_detail: &RowDetail) -> Fut; - fn get_field(&self, field_id: &str) -> Option; - fn get_fields(&self, view_id: &str, field_ids: Option>) -> Fut>; + async fn get_rows(&self, view_id: &str) -> Vec>; + async fn filter_row(&self, row_detail: &RowDetail) -> bool; + async fn get_field(&self, field_id: &str) -> Option; + async fn get_fields(&self, view_id: &str, field_ids: Option>) -> Vec; } pub struct SortController { diff --git a/frontend/rust-lib/flowy-database2/src/services/sort/entities.rs b/frontend/rust-lib/flowy-database2/src/services/sort/entities.rs index 9f9d37d4fb..9b5608761a 100644 --- a/frontend/rust-lib/flowy-database2/src/services/sort/entities.rs +++ b/frontend/rust-lib/flowy-database2/src/services/sort/entities.rs @@ -1,7 +1,7 @@ use std::cmp::Ordering; use anyhow::bail; -use collab::core::any_map::AnyMapExtension; +use collab::util::AnyMapExt; use collab_database::rows::{RowDetail, RowId}; use collab_database::views::{SortMap, SortMapBuilder}; @@ -20,10 +20,13 @@ impl TryFrom for Sort { type Error = anyhow::Error; fn try_from(value: SortMap) -> Result { - match (value.get_str_value(SORT_ID), value.get_str_value(FIELD_ID)) { + match ( + value.get_as::(SORT_ID), + value.get_as::(FIELD_ID), + ) { (Some(id), Some(field_id)) => { let condition = value - .get_i64_value(SORT_CONDITION) + .get_as::(SORT_CONDITION) .map(SortCondition::from) .unwrap_or_default(); Ok(Self { @@ -41,11 +44,11 @@ impl TryFrom for Sort { impl From for SortMap { fn from(data: Sort) -> Self { - SortMapBuilder::new() - .insert_str_value(SORT_ID, data.id) - .insert_str_value(FIELD_ID, data.field_id) - .insert_i64_value(SORT_CONDITION, data.condition.value()) - .build() + SortMapBuilder::from([ + (SORT_ID.into(), data.id.into()), + (FIELD_ID.into(), data.field_id.into()), + (SORT_CONDITION.into(), data.condition.value().into()), + ]) } } diff --git a/frontend/rust-lib/flowy-database2/src/utils/cache.rs b/frontend/rust-lib/flowy-database2/src/utils/cache.rs index 5f9bda50c9..840bdbb1b4 100644 --- a/frontend/rust-lib/flowy-database2/src/utils/cache.rs +++ b/frontend/rust-lib/flowy-database2/src/utils/cache.rs @@ -1,23 +1,25 @@ -use parking_lot::RwLock; +use dashmap::mapref::one::{MappedRef, MappedRefMut}; +use dashmap::DashMap; use std::any::{type_name, Any}; -use std::collections::HashMap; use std::fmt::Debug; use std::hash::Hash; use std::sync::Arc; #[derive(Default, Debug)] /// The better option is use LRU cache -pub struct AnyTypeCache(HashMap); - -impl AnyTypeCache +pub struct AnyTypeCache(DashMap) where - TypeValueKey: Clone + Hash + Eq, + K: Clone + Hash + Eq; + +impl AnyTypeCache +where + K: Clone + Hash + Eq, { - pub fn new() -> Arc>> { - Arc::new(RwLock::new(AnyTypeCache(HashMap::default()))) + pub fn new() -> Arc> { + Arc::new(AnyTypeCache(DashMap::default())) } - pub fn insert(&mut self, key: &TypeValueKey, val: T) -> Option + pub fn insert(&self, key: &K, val: T) -> Option where T: 'static + Send + Sync, { @@ -27,31 +29,27 @@ where .and_then(downcast_owned) } - pub fn remove(&mut self, key: &TypeValueKey) { + pub fn remove(&self, key: &K) { self.0.remove(key); } - pub fn get(&self, key: &TypeValueKey) -> Option<&T> + pub fn get(&self, key: &K) -> Option> where T: 'static + Send + Sync, { - self - .0 - .get(key) - .and_then(|type_value| type_value.boxed.downcast_ref()) + let cell = self.0.get(key)?; + cell.try_map(|v| v.boxed.downcast_ref()).ok() } - pub fn get_mut(&mut self, key: &TypeValueKey) -> Option<&mut T> + pub fn get_mut(&self, key: &K) -> Option> where T: 'static + Send + Sync, { - self - .0 - .get_mut(key) - .and_then(|type_value| type_value.boxed.downcast_mut()) + let cell = self.0.get_mut(key)?; + cell.try_map(|v| v.boxed.downcast_mut()).ok() } - pub fn contains(&self, key: &TypeValueKey) -> bool { + pub fn contains(&self, key: &K) -> bool { self.0.contains_key(key) } @@ -65,7 +63,7 @@ fn downcast_owned(type_value: TypeValue) -> Option } #[derive(Debug)] -struct TypeValue { +pub struct TypeValue { boxed: Box, #[allow(dead_code)] ty: &'static str, diff --git a/frontend/rust-lib/flowy-database2/tests/database/block_test/row_test.rs b/frontend/rust-lib/flowy-database2/tests/database/block_test/row_test.rs index 648de5edc7..7ce0c31a4b 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/block_test/row_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/block_test/row_test.rs @@ -18,7 +18,7 @@ async fn created_at_field_test() { // Get created time of the new row. let row_detail = test.get_rows().await.last().cloned().unwrap(); - let updated_at_field = test.get_first_field(FieldType::CreatedTime); + let updated_at_field = test.get_first_field(FieldType::CreatedTime).await; let cell = test .editor .get_cell(&updated_at_field.id, &row_detail.row.id) @@ -35,7 +35,7 @@ async fn created_at_field_test() { async fn update_at_field_test() { let mut test = DatabaseRowTest::new().await; let row_detail = test.get_rows().await.remove(0); - let last_edit_field = test.get_first_field(FieldType::LastEditedTime); + let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await; let cell = test .editor .get_cell(&last_edit_field.id, &row_detail.row.id) @@ -53,7 +53,7 @@ async fn update_at_field_test() { // Get the updated time of the row. let row_detail = test.get_rows().await.remove(0); - let last_edit_field = test.get_first_field(FieldType::LastEditedTime); + let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await; let cell = test .editor .get_cell(&last_edit_field.id, &row_detail.row.id) diff --git a/frontend/rust-lib/flowy-database2/tests/database/cell_test/test.rs b/frontend/rust-lib/flowy-database2/tests/database/cell_test/test.rs index 1c1f633e47..8f1e52c7c1 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/cell_test/test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/cell_test/test.rs @@ -14,7 +14,7 @@ use crate::database::cell_test::script::DatabaseCellTest; #[tokio::test] async fn grid_cell_update() { let mut test = DatabaseCellTest::new().await; - let fields = test.get_fields(); + let fields = test.get_fields().await; let rows = &test.row_details; let mut scripts = vec![]; @@ -76,7 +76,7 @@ async fn grid_cell_update() { #[tokio::test] async fn text_cell_data_test() { let test = DatabaseCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let cells = test .editor @@ -100,7 +100,7 @@ async fn text_cell_data_test() { #[tokio::test] async fn url_cell_data_test() { let test = DatabaseCellTest::new().await; - let url_field = test.get_first_field(FieldType::URL); + let url_field = test.get_first_field(FieldType::URL).await; let cells = test .editor .get_cells_for_field(&test.view_id, &url_field.id) @@ -122,7 +122,7 @@ async fn url_cell_data_test() { #[tokio::test] async fn update_updated_at_field_on_other_cell_update() { let mut test = DatabaseCellTest::new().await; - let updated_at_field = test.get_first_field(FieldType::LastEditedTime); + let updated_at_field = test.get_first_field(FieldType::LastEditedTime).await; let text_field = test .fields @@ -204,7 +204,7 @@ async fn update_updated_at_field_on_other_cell_update() { #[tokio::test] async fn time_cell_data_test() { let test = DatabaseCellTest::new().await; - let time_field = test.get_first_field(FieldType::Time); + let time_field = test.get_first_field(FieldType::Time).await; let cells = test .editor .get_cells_for_field(&test.view_id, &time_field.id) diff --git a/frontend/rust-lib/flowy-database2/tests/database/database_editor.rs b/frontend/rust-lib/flowy-database2/tests/database/database_editor.rs index 2d087cce00..c18fef66a2 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/database_editor.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/database_editor.rs @@ -76,11 +76,12 @@ impl DatabaseEditorTest { pub async fn new(sdk: EventIntegrationTest, test: ViewTest) -> Self { let editor = sdk .database_manager - .get_database_with_view_id(&test.child_view.id) + .get_database_editor_with_view_id(&test.child_view.id) .await .unwrap(); let fields = editor .get_fields(&test.child_view.id, None) + .await .into_iter() .map(Arc::new) .collect(); @@ -111,10 +112,11 @@ impl DatabaseEditorTest { self.editor.get_rows(&self.view_id).await.unwrap() } - pub fn get_field(&self, field_id: &str, field_type: FieldType) -> Field { + pub async fn get_field(&self, field_id: &str, field_type: FieldType) -> Field { self .editor .get_fields(&self.view_id, None) + .await .into_iter() .filter(|field| { let t_field_type = FieldType::from(field.field_type); @@ -127,10 +129,11 @@ impl DatabaseEditorTest { /// returns the first `Field` in the build-in test grid. /// Not support duplicate `FieldType` in test grid yet. - pub fn get_first_field(&self, field_type: FieldType) -> Field { + pub async fn get_first_field(&self, field_type: FieldType) -> Field { self .editor .get_fields(&self.view_id, None) + .await .into_iter() .filter(|field| { let t_field_type = FieldType::from(field.field_type); @@ -141,22 +144,22 @@ impl DatabaseEditorTest { .unwrap() } - pub fn get_fields(&self) -> Vec { - self.editor.get_fields(&self.view_id, None) + pub async fn get_fields(&self) -> Vec { + self.editor.get_fields(&self.view_id, None).await } - pub fn get_multi_select_type_option(&self, field_id: &str) -> Vec { + pub async fn get_multi_select_type_option(&self, field_id: &str) -> Vec { let field_type = FieldType::MultiSelect; - let field = self.get_field(field_id, field_type); + let field = self.get_field(field_id, field_type).await; let type_option = field .get_type_option::(field_type) .unwrap(); type_option.options } - pub fn get_single_select_type_option(&self, field_id: &str) -> Vec { + pub async fn get_single_select_type_option(&self, field_id: &str) -> Vec { let field_type = FieldType::SingleSelect; - let field = self.get_field(field_id, field_type); + let field = self.get_field(field_id, field_type).await; let type_option = field .get_type_option::(field_type) .unwrap(); @@ -164,18 +167,18 @@ impl DatabaseEditorTest { } #[allow(dead_code)] - pub fn get_checklist_type_option(&self, field_id: &str) -> ChecklistTypeOption { + pub async fn get_checklist_type_option(&self, field_id: &str) -> ChecklistTypeOption { let field_type = FieldType::Checklist; - let field = self.get_field(field_id, field_type); + let field = self.get_field(field_id, field_type).await; field .get_type_option::(field_type) .unwrap() } #[allow(dead_code)] - pub fn get_checkbox_type_option(&self, field_id: &str) -> CheckboxTypeOption { + pub async fn get_checkbox_type_option(&self, field_id: &str) -> CheckboxTypeOption { let field_type = FieldType::Checkbox; - let field = self.get_field(field_id, field_type); + let field = self.get_field(field_id, field_type).await; field .get_type_option::(field_type) .unwrap() @@ -190,6 +193,7 @@ impl DatabaseEditorTest { let field = self .editor .get_fields(&self.view_id, None) + .await .into_iter() .find(|field| field.id == field_id) .unwrap(); @@ -204,6 +208,7 @@ impl DatabaseEditorTest { let field = self .editor .get_fields(&self.view_id, None) + .await .iter() .find(|field| { let field_type = FieldType::from(field.field_type); @@ -225,6 +230,7 @@ impl DatabaseEditorTest { let field = self .editor .get_fields(&self.view_id, None) + .await .iter() .find(|field| { let field_type = FieldType::from(field.field_type); @@ -250,6 +256,7 @@ impl DatabaseEditorTest { let field = self .editor .get_fields(&self.view_id, None) + .await .iter() .find(|field| { let field_type = FieldType::from(field.field_type); @@ -277,7 +284,7 @@ impl DatabaseEditorTest { self .sdk .database_manager - .get_database(database_id) + .get_database_editor(database_id) .await .ok() } diff --git a/frontend/rust-lib/flowy-database2/tests/database/field_settings_test/test.rs b/frontend/rust-lib/flowy-database2/tests/database/field_settings_test/test.rs index b550567699..a378f4d90e 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/field_settings_test/test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/field_settings_test/test.rs @@ -20,11 +20,12 @@ async fn get_default_board_field_settings() { let mut test = FieldSettingsTest::new_board().await; let non_primary_field_ids: Vec = test .get_fields() + .await .into_iter() .filter(|field| !field.is_primary) .map(|field| field.id) .collect(); - let primary_field_id = test.get_first_field(FieldType::RichText).id; + let primary_field_id = test.get_first_field(FieldType::RichText).await.id; test .assert_field_settings( non_primary_field_ids.clone(), @@ -47,11 +48,12 @@ async fn get_default_calendar_field_settings() { let mut test = FieldSettingsTest::new_calendar().await; let non_primary_field_ids: Vec = test .get_fields() + .await .into_iter() .filter(|field| !field.is_primary) .map(|field| field.id) .collect(); - let primary_field_id = test.get_first_field(FieldType::RichText).id; + let primary_field_id = test.get_first_field(FieldType::RichText).await.id; test .assert_field_settings( non_primary_field_ids.clone(), @@ -74,11 +76,12 @@ async fn update_field_settings_test() { let mut test = FieldSettingsTest::new_board().await; let non_primary_field_ids: Vec = test .get_fields() + .await .into_iter() .filter(|field| !field.is_primary) .map(|field| field.id) .collect(); - let primary_field_id = test.get_first_field(FieldType::RichText).id; + let primary_field_id = test.get_first_field(FieldType::RichText).await.id; test .assert_field_settings( diff --git a/frontend/rust-lib/flowy-database2/tests/database/field_test/script.rs b/frontend/rust-lib/flowy-database2/tests/database/field_test/script.rs index 554b5a7b21..bf93c130f8 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/field_test/script.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/field_test/script.rs @@ -64,19 +64,19 @@ impl DatabaseFieldTest { FieldScript::CreateField { params } => { self.field_count += 1; let _ = self.editor.create_field_with_type_option(params).await; - let fields = self.editor.get_fields(&self.view_id, None); + let fields = self.editor.get_fields(&self.view_id, None).await; assert_eq!(self.field_count, fields.len()); }, FieldScript::UpdateField { changeset: change } => { self.editor.update_field(change).await.unwrap(); }, FieldScript::DeleteField { field } => { - if self.editor.get_field(&field.id).is_some() { + if self.editor.get_field(&field.id).await.is_some() { self.field_count -= 1; } self.editor.delete_field(&field.id).await.unwrap(); - let fields = self.editor.get_fields(&self.view_id, None); + let fields = self.editor.get_fields(&self.view_id, None).await; assert_eq!(self.field_count, fields.len()); }, FieldScript::SwitchToField { @@ -95,7 +95,7 @@ impl DatabaseFieldTest { type_option, } => { // - let old_field = self.editor.get_field(&field_id).unwrap(); + let old_field = self.editor.get_field(&field_id).await.unwrap(); self .editor .update_field_type_option(&field_id, type_option, old_field) @@ -103,13 +103,13 @@ impl DatabaseFieldTest { .unwrap(); }, FieldScript::AssertFieldCount(count) => { - assert_eq!(self.get_fields().len(), count); + assert_eq!(self.get_fields().await.len(), count); }, FieldScript::AssertFieldTypeOptionEqual { field_index, expected_type_option_data, } => { - let fields = self.get_fields(); + let fields = self.get_fields().await; let field = &fields[field_index]; let type_option_data = field.get_any_type_option(field.field_type).unwrap(); assert_eq!(type_option_data, expected_type_option_data); @@ -119,7 +119,7 @@ impl DatabaseFieldTest { row_index, expected_content, } => { - let field = self.editor.get_field(&field_id).unwrap(); + let field = self.editor.get_field(&field_id).await.unwrap(); let rows = self.editor.get_rows(&self.view_id()).await.unwrap(); let row_detail = rows.get(row_index).unwrap(); diff --git a/frontend/rust-lib/flowy-database2/tests/database/field_test/test.rs b/frontend/rust-lib/flowy-database2/tests/database/field_test/test.rs index 7cd9f9f3d1..9e949b4965 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/field_test/test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/field_test/test.rs @@ -85,7 +85,7 @@ async fn grid_update_field_with_empty_change() { let scripts = vec![CreateField { params }]; test.run_scripts(scripts).await; - let field = test.get_fields().pop().unwrap().clone(); + let field = test.get_fields().await.pop().unwrap().clone(); let changeset = FieldChangesetParams { field_id: field.id.clone(), view_id: test.view_id(), @@ -110,7 +110,7 @@ async fn grid_delete_field() { let scripts = vec![CreateField { params }]; test.run_scripts(scripts).await; - let field = test.get_fields().pop().unwrap(); + let field = test.get_fields().await.pop().unwrap(); let scripts = vec![ DeleteField { field }, AssertFieldCount(original_field_count), @@ -121,10 +121,10 @@ async fn grid_delete_field() { #[tokio::test] async fn grid_switch_from_select_option_to_checkbox_test() { let mut test = DatabaseFieldTest::new().await; - let field = test.get_first_field(FieldType::SingleSelect); + let field = test.get_first_field(FieldType::SingleSelect).await; // Update the type option data of single select option - let mut options = test.get_single_select_type_option(&field.id); + let mut options = test.get_single_select_type_option(&field.id).await; options.clear(); // Add a new option with name CHECK options.push(SelectOption { @@ -159,7 +159,7 @@ async fn grid_switch_from_select_option_to_checkbox_test() { #[tokio::test] async fn grid_switch_from_checkbox_to_select_option_test() { let mut test = DatabaseFieldTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox).clone(); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await.clone(); let scripts = vec![ // switch to single-select field type SwitchToField { @@ -181,7 +181,7 @@ async fn grid_switch_from_checkbox_to_select_option_test() { ]; test.run_scripts(scripts).await; - let options = test.get_single_select_type_option(&checkbox_field.id); + let options = test.get_single_select_type_option(&checkbox_field.id).await; assert_eq!(options.len(), 2); assert!(options.iter().any(|option| option.name == UNCHECK)); assert!(options.iter().any(|option| option.name == CHECK)); @@ -194,9 +194,9 @@ async fn grid_switch_from_checkbox_to_select_option_test() { #[tokio::test] async fn grid_switch_from_multi_select_to_text_test() { let mut test = DatabaseFieldTest::new().await; - let field_rev = test.get_first_field(FieldType::MultiSelect).clone(); + let field_rev = test.get_first_field(FieldType::MultiSelect).await.clone(); - let multi_select_type_option = test.get_multi_select_type_option(&field_rev.id); + let multi_select_type_option = test.get_multi_select_type_option(&field_rev.id).await; let script_switch_field = vec![SwitchToField { field_id: field_rev.id.clone(), @@ -225,7 +225,7 @@ async fn grid_switch_from_multi_select_to_text_test() { #[tokio::test] async fn grid_switch_from_checkbox_to_text_test() { let mut test = DatabaseFieldTest::new().await; - let field_rev = test.get_first_field(FieldType::Checkbox); + let field_rev = test.get_first_field(FieldType::Checkbox).await; let scripts = vec![ SwitchToField { @@ -252,7 +252,7 @@ async fn grid_switch_from_checkbox_to_text_test() { #[tokio::test] async fn grid_switch_from_date_to_text_test() { let mut test = DatabaseFieldTest::new().await; - let field = test.get_first_field(FieldType::DateTime).clone(); + let field = test.get_first_field(FieldType::DateTime).await.clone(); let scripts = vec![ SwitchToField { field_id: field.id.clone(), @@ -278,7 +278,7 @@ async fn grid_switch_from_date_to_text_test() { #[tokio::test] async fn grid_switch_from_number_to_text_test() { let mut test = DatabaseFieldTest::new().await; - let field = test.get_first_field(FieldType::Number).clone(); + let field = test.get_first_field(FieldType::Number).await.clone(); let scripts = vec![ SwitchToField { @@ -304,7 +304,7 @@ async fn grid_switch_from_number_to_text_test() { #[tokio::test] async fn grid_switch_from_checklist_to_text_test() { let mut test = DatabaseFieldTest::new().await; - let field_rev = test.get_first_field(FieldType::Checklist); + let field_rev = test.get_first_field(FieldType::Checklist).await; let scripts = vec![ SwitchToField { diff --git a/frontend/rust-lib/flowy-database2/tests/database/filter_test/checklist_filter_test.rs b/frontend/rust-lib/flowy-database2/tests/database/filter_test/checklist_filter_test.rs index 3da9cab5a2..3cc8452462 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/filter_test/checklist_filter_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/filter_test/checklist_filter_test.rs @@ -61,7 +61,7 @@ async fn grid_filter_checklist_is_complete_test() { } async fn get_checklist_cell_options(test: &DatabaseFilterTest) -> Vec { - let field = test.get_first_field(FieldType::Checklist); + let field = test.get_first_field(FieldType::Checklist).await; let row_cell = test .editor .get_cell(&field.id, &test.row_details[0].row.id) diff --git a/frontend/rust-lib/flowy-database2/tests/database/filter_test/script.rs b/frontend/rust-lib/flowy-database2/tests/database/filter_test/script.rs index f2b58070e7..2f429752d6 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/filter_test/script.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/filter_test/script.rs @@ -194,7 +194,7 @@ impl DatabaseFilterTest { } => { self.subscribe_view_changed().await; self.assert_future_changed(changed).await; - let field = self.get_first_field(field_type); + let field = self.get_first_field(field_type).await; let params = FilterChangeset::Insert { parent_filter_id, data: FilterInner::Data { diff --git a/frontend/rust-lib/flowy-database2/tests/database/filter_test/select_option_filter_test.rs b/frontend/rust-lib/flowy-database2/tests/database/filter_test/select_option_filter_test.rs index eb808d0bc3..6cda4669f7 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/filter_test/select_option_filter_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/filter_test/select_option_filter_test.rs @@ -43,8 +43,8 @@ async fn grid_filter_multi_select_is_not_empty_test() { #[tokio::test] async fn grid_filter_multi_select_is_test() { let mut test = DatabaseFilterTest::new().await; - let field = test.get_first_field(FieldType::MultiSelect); - let mut options = test.get_multi_select_type_option(&field.id); + let field = test.get_first_field(FieldType::MultiSelect).await; + let mut options = test.get_multi_select_type_option(&field.id).await; let scripts = vec![ CreateDataFilter { parent_filter_id: None, @@ -63,8 +63,8 @@ async fn grid_filter_multi_select_is_test() { #[tokio::test] async fn grid_filter_multi_select_is_test2() { let mut test = DatabaseFilterTest::new().await; - let field = test.get_first_field(FieldType::MultiSelect); - let mut options = test.get_multi_select_type_option(&field.id); + let field = test.get_first_field(FieldType::MultiSelect).await; + let mut options = test.get_multi_select_type_option(&field.id).await; let scripts = vec![ CreateDataFilter { parent_filter_id: None, @@ -106,8 +106,8 @@ async fn grid_filter_single_select_is_empty_test() { #[tokio::test] async fn grid_filter_single_select_is_test() { let mut test = DatabaseFilterTest::new().await; - let field = test.get_first_field(FieldType::SingleSelect); - let mut options = test.get_single_select_type_option(&field.id); + let field = test.get_first_field(FieldType::SingleSelect).await; + let mut options = test.get_single_select_type_option(&field.id).await; let expected = 2; let row_count = test.row_details.len(); let scripts = vec![ @@ -131,9 +131,9 @@ async fn grid_filter_single_select_is_test() { #[tokio::test] async fn grid_filter_single_select_is_test2() { let mut test = DatabaseFilterTest::new().await; - let field = test.get_first_field(FieldType::SingleSelect); + let field = test.get_first_field(FieldType::SingleSelect).await; let row_details = test.get_rows().await; - let mut options = test.get_single_select_type_option(&field.id); + let mut options = test.get_single_select_type_option(&field.id).await; let option = options.remove(0); let row_count = test.row_details.len(); @@ -173,8 +173,8 @@ async fn grid_filter_single_select_is_test2() { #[tokio::test] async fn grid_filter_multi_select_contains_test() { let mut test = DatabaseFilterTest::new().await; - let field = test.get_first_field(FieldType::MultiSelect); - let mut options = test.get_multi_select_type_option(&field.id); + let field = test.get_first_field(FieldType::MultiSelect).await; + let mut options = test.get_multi_select_type_option(&field.id).await; let scripts = vec![ CreateDataFilter { parent_filter_id: None, @@ -193,8 +193,8 @@ async fn grid_filter_multi_select_contains_test() { #[tokio::test] async fn grid_filter_multi_select_contains_test2() { let mut test = DatabaseFilterTest::new().await; - let field = test.get_first_field(FieldType::MultiSelect); - let mut options = test.get_multi_select_type_option(&field.id); + let field = test.get_first_field(FieldType::MultiSelect).await; + let mut options = test.get_multi_select_type_option(&field.id).await; let scripts = vec![ CreateDataFilter { parent_filter_id: None, diff --git a/frontend/rust-lib/flowy-database2/tests/database/group_test/script.rs b/frontend/rust-lib/flowy-database2/tests/database/group_test/script.rs index 1fe883e041..d5312b2985 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/group_test/script.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/group_test/script.rs @@ -159,7 +159,7 @@ impl DatabaseGroupTest { let from_group = self.group_at_index(from_group_index).await; let to_group = self.group_at_index(to_group_index).await; let field_id = from_group.field_id; - let field = self.editor.get_field(&field_id).unwrap(); + let field = self.editor.get_field(&field_id).await.unwrap(); let field_type = FieldType::from(field.field_type); let cell = if to_group.is_default { @@ -203,7 +203,7 @@ impl DatabaseGroupTest { } => { let from_group = self.group_at_index(from_group_index).await; let field_id = from_group.field_id; - let field = self.editor.get_field(&field_id).unwrap(); + let field = self.editor.get_field(&field_id).await.unwrap(); let field_type = FieldType::from(field.field_type); let cell = match field_type { FieldType::URL => insert_url_cell(cell_data, &field), @@ -309,6 +309,7 @@ impl DatabaseGroupTest { self .inner .get_fields() + .await .into_iter() .find(|field| { let ft = FieldType::from(field.field_type); diff --git a/frontend/rust-lib/flowy-database2/tests/database/layout_test/script.rs b/frontend/rust-lib/flowy-database2/tests/database/layout_test/script.rs index 6800a7e4db..57f17b9870 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/layout_test/script.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/layout_test/script.rs @@ -36,7 +36,10 @@ impl DatabaseLayoutTest { } pub async fn get_first_date_field(&self) -> Field { - self.database_test.get_first_field(FieldType::DateTime) + self + .database_test + .get_first_field(FieldType::DateTime) + .await } async fn get_layout_setting( diff --git a/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_according_to_filter_test.rs b/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_according_to_filter_test.rs index b47bf2e99b..a15814f13d 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_according_to_filter_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_according_to_filter_test.rs @@ -17,7 +17,7 @@ use crate::database::pre_fill_cell_test::script::{ async fn according_to_text_contains_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ InsertFilter { @@ -60,7 +60,7 @@ async fn according_to_text_contains_filter_test() { async fn according_to_empty_text_contains_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ InsertFilter { @@ -95,7 +95,7 @@ async fn according_to_empty_text_contains_filter_test() { async fn according_to_text_is_not_empty_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ AssertRowCount(7), @@ -125,7 +125,7 @@ async fn according_to_text_is_not_empty_filter_test() { async fn according_to_checkbox_is_unchecked_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; let scripts = vec![ AssertRowCount(7), @@ -162,7 +162,7 @@ async fn according_to_checkbox_is_unchecked_filter_test() { async fn according_to_checkbox_is_checked_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; let scripts = vec![ AssertRowCount(7), @@ -207,7 +207,7 @@ async fn according_to_checkbox_is_checked_filter_test() { async fn according_to_date_time_is_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let datetime_field = test.get_first_field(FieldType::DateTime); + let datetime_field = test.get_first_field(FieldType::DateTime).await; let scripts = vec![ AssertRowCount(7), @@ -254,7 +254,7 @@ async fn according_to_date_time_is_filter_test() { async fn according_to_invalid_date_time_is_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let datetime_field = test.get_first_field(FieldType::DateTime); + let datetime_field = test.get_first_field(FieldType::DateTime).await; let scripts = vec![ AssertRowCount(7), @@ -290,8 +290,10 @@ async fn according_to_invalid_date_time_is_filter_test() { async fn according_to_select_option_is_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let multi_select_field = test.get_first_field(FieldType::MultiSelect); - let options = test.get_multi_select_type_option(&multi_select_field.id); + let multi_select_field = test.get_first_field(FieldType::MultiSelect).await; + let options = test + .get_multi_select_type_option(&multi_select_field.id) + .await; let filtering_options = [options[1].clone(), options[2].clone()]; let ids = filtering_options @@ -343,8 +345,10 @@ async fn according_to_select_option_is_filter_test() { async fn according_to_select_option_contains_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let multi_select_field = test.get_first_field(FieldType::MultiSelect); - let options = test.get_multi_select_type_option(&multi_select_field.id); + let multi_select_field = test.get_first_field(FieldType::MultiSelect).await; + let options = test + .get_multi_select_type_option(&multi_select_field.id) + .await; let filtering_options = [options[1].clone(), options[2].clone()]; let ids = filtering_options @@ -392,8 +396,10 @@ async fn according_to_select_option_contains_filter_test() { async fn according_to_select_option_is_not_empty_filter_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let multi_select_field = test.get_first_field(FieldType::MultiSelect); - let options = test.get_multi_select_type_option(&multi_select_field.id); + let multi_select_field = test.get_first_field(FieldType::MultiSelect).await; + let options = test + .get_multi_select_type_option(&multi_select_field.id) + .await; let stringified_expected = options.first().unwrap().name.clone(); diff --git a/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_with_payload_test.rs b/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_with_payload_test.rs index a67bad48f3..1cb004f5a3 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_with_payload_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/pre_fill_row_with_payload_test.rs @@ -16,7 +16,7 @@ use crate::database::pre_fill_cell_test::script::{ async fn row_data_payload_with_empty_hashmap_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ CreateRowWithPayload { @@ -47,7 +47,7 @@ async fn row_data_payload_with_empty_hashmap_test() { async fn row_data_payload_with_unknown_field_id_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let malformed_field_id = "this_field_id_will_never_exist"; let scripts = vec![ @@ -87,7 +87,7 @@ async fn row_data_payload_with_unknown_field_id_test() { async fn row_data_payload_with_empty_string_text_data_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let cell_data = ""; let scripts = vec![ @@ -119,7 +119,7 @@ async fn row_data_payload_with_empty_string_text_data_test() { async fn row_data_payload_with_text_data_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let cell_data = "sample cell data"; let scripts = vec![ @@ -151,9 +151,9 @@ async fn row_data_payload_with_text_data_test() { async fn row_data_payload_with_multi_text_data_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); - let number_field = test.get_first_field(FieldType::Number); - let url_field = test.get_first_field(FieldType::URL); + let text_field = test.get_first_field(FieldType::RichText).await; + let number_field = test.get_first_field(FieldType::Number).await; + let url_field = test.get_first_field(FieldType::URL).await; let text_cell_data = "sample cell data"; let number_cell_data = "1234"; @@ -214,7 +214,7 @@ async fn row_data_payload_with_multi_text_data_test() { async fn row_data_payload_with_date_time_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let date_field = test.get_first_field(FieldType::DateTime); + let date_field = test.get_first_field(FieldType::DateTime).await; let cell_data = "1710510086"; let scripts = vec![ @@ -246,7 +246,7 @@ async fn row_data_payload_with_date_time_test() { async fn row_data_payload_with_invalid_date_time_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let date_field = test.get_first_field(FieldType::DateTime); + let date_field = test.get_first_field(FieldType::DateTime).await; let cell_data = DateCellData { timestamp: Some(1710510086), ..Default::default() @@ -276,7 +276,7 @@ async fn row_data_payload_with_invalid_date_time_test() { async fn row_data_payload_with_checkbox_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; let cell_data = "Yes"; let scripts = vec![ @@ -308,8 +308,10 @@ async fn row_data_payload_with_checkbox_test() { async fn row_data_payload_with_select_option_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let multi_select_field = test.get_first_field(FieldType::MultiSelect); - let options = test.get_multi_select_type_option(&multi_select_field.id); + let multi_select_field = test.get_first_field(FieldType::MultiSelect).await; + let options = test + .get_multi_select_type_option(&multi_select_field.id) + .await; let ids = options .iter() @@ -352,8 +354,10 @@ async fn row_data_payload_with_select_option_test() { async fn row_data_payload_with_invalid_select_option_id_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let multi_select_field = test.get_first_field(FieldType::MultiSelect); - let mut options = test.get_multi_select_type_option(&multi_select_field.id); + let multi_select_field = test.get_first_field(FieldType::MultiSelect).await; + let mut options = test + .get_multi_select_type_option(&multi_select_field.id) + .await; let first_id = options.swap_remove(0).id; let ids = [first_id.clone(), "nonsense".to_string()].join(SELECTION_IDS_SEPARATOR); @@ -386,8 +390,10 @@ async fn row_data_payload_with_invalid_select_option_id_test() { async fn row_data_payload_with_too_many_select_option_test() { let mut test = DatabasePreFillRowCellTest::new().await; - let single_select_field = test.get_first_field(FieldType::SingleSelect); - let mut options = test.get_single_select_type_option(&single_select_field.id); + let single_select_field = test.get_first_field(FieldType::SingleSelect).await; + let mut options = test + .get_single_select_type_option(&single_select_field.id) + .await; let ids = options .iter() diff --git a/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/script.rs b/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/script.rs index e41e42207e..6b524fdf15 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/script.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/pre_fill_cell_test/script.rs @@ -106,7 +106,7 @@ impl DatabasePreFillRowCellTest { row_index, expected_content, } => { - let field = self.editor.get_field(&field_id).unwrap(); + let field = self.editor.get_field(&field_id).await.unwrap(); let rows = self.editor.get_rows(&self.view_id).await.unwrap(); let row_detail = rows.get(row_index).unwrap(); diff --git a/frontend/rust-lib/flowy-database2/tests/database/share_test/export_test.rs b/frontend/rust-lib/flowy-database2/tests/database/share_test/export_test.rs index 3fbb0aafe2..02f4f135ca 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/share_test/export_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/share_test/export_test.rs @@ -32,7 +32,7 @@ async fn export_and_then_import_meta_csv_test() { let result = test.import(csv_1.clone(), format).await; let database = test.get_database(&result.database_id).await.unwrap(); - let fields = database.get_fields(&result.view_id, None); + let fields = database.get_fields(&result.view_id, None).await; let rows = database.get_rows(&result.view_id).await.unwrap(); assert_eq!(fields[0].field_type, 0); assert_eq!(fields[1].field_type, 1); @@ -111,7 +111,7 @@ async fn history_database_import_test() { let result = test.import(csv.to_string(), format).await; let database = test.get_database(&result.database_id).await.unwrap(); - let fields = database.get_fields(&result.view_id, None); + let fields = database.get_fields(&result.view_id, None).await; let rows = database.get_rows(&result.view_id).await.unwrap(); assert_eq!(fields[0].field_type, 0); assert_eq!(fields[1].field_type, 1); diff --git a/frontend/rust-lib/flowy-database2/tests/database/sort_test/multi_sort_test.rs b/frontend/rust-lib/flowy-database2/tests/database/sort_test/multi_sort_test.rs index 7fe1874984..d7fe529d13 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/sort_test/multi_sort_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/sort_test/multi_sort_test.rs @@ -7,8 +7,8 @@ use crate::database::sort_test::script::SortScript::*; #[tokio::test] async fn sort_checkbox_and_then_text_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); - let text_field = test.get_first_field(FieldType::RichText); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ AssertCellContentOrder { field_id: checkbox_field.id.clone(), @@ -51,8 +51,8 @@ async fn sort_checkbox_and_then_text_by_descending_test() { #[tokio::test] async fn reorder_sort_test() { let mut test = DatabaseSortTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); - let text_field = test.get_first_field(FieldType::RichText); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; + let text_field = test.get_first_field(FieldType::RichText).await; // Use the same sort set up as above let scripts = vec![ AssertCellContentOrder { diff --git a/frontend/rust-lib/flowy-database2/tests/database/sort_test/script.rs b/frontend/rust-lib/flowy-database2/tests/database/sort_test/script.rs index a6b99dc99c..e95deaa187 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/sort_test/script.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/sort_test/script.rs @@ -118,7 +118,7 @@ impl DatabaseSortTest { SortScript::AssertCellContentOrder { field_id, orders } => { let mut cells = vec![]; let rows = self.editor.get_rows(&self.view_id).await.unwrap(); - let field = self.editor.get_field(&field_id).unwrap(); + let field = self.editor.get_field(&field_id).await.unwrap(); for row_detail in rows { if let Some(cell) = row_detail.row.cells.get(&field_id) { let content = stringify_cell(cell, &field); diff --git a/frontend/rust-lib/flowy-database2/tests/database/sort_test/single_sort_test.rs b/frontend/rust-lib/flowy-database2/tests/database/sort_test/single_sort_test.rs index 63f3b08422..77ce14458f 100644 --- a/frontend/rust-lib/flowy-database2/tests/database/sort_test/single_sort_test.rs +++ b/frontend/rust-lib/flowy-database2/tests/database/sort_test/single_sort_test.rs @@ -6,7 +6,7 @@ use crate::database::sort_test::script::{DatabaseSortTest, SortScript::*}; #[tokio::test] async fn sort_text_by_ascending_test() { let mut test = DatabaseSortTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ AssertCellContentOrder { field_id: text_field.id.clone(), @@ -27,7 +27,7 @@ async fn sort_text_by_ascending_test() { #[tokio::test] async fn sort_text_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ AssertCellContentOrder { field_id: text_field.id.clone(), @@ -48,7 +48,7 @@ async fn sort_text_by_descending_test() { #[tokio::test] async fn sort_change_notification_by_update_text_test() { let mut test = DatabaseSortTest::new().await; - let text_field = test.get_first_field(FieldType::RichText).clone(); + let text_field = test.get_first_field(FieldType::RichText).await.clone(); let scripts = vec![ AssertCellContentOrder { field_id: text_field.id.clone(), @@ -84,7 +84,7 @@ async fn sort_change_notification_by_update_text_test() { #[tokio::test] async fn sort_after_new_row_test() { let mut test = DatabaseSortTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; let scripts = vec![ AssertCellContentOrder { field_id: checkbox_field.id.clone(), @@ -110,7 +110,7 @@ async fn sort_after_new_row_test() { #[tokio::test] async fn sort_text_by_ascending_and_delete_sort_test() { let mut test = DatabaseSortTest::new().await; - let text_field = test.get_first_field(FieldType::RichText); + let text_field = test.get_first_field(FieldType::RichText).await; let scripts = vec![ InsertSort { field: text_field.clone(), @@ -137,7 +137,7 @@ async fn sort_text_by_ascending_and_delete_sort_test() { #[tokio::test] async fn sort_checkbox_by_ascending_test() { let mut test = DatabaseSortTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; let scripts = vec![ AssertCellContentOrder { field_id: checkbox_field.id.clone(), @@ -158,7 +158,7 @@ async fn sort_checkbox_by_ascending_test() { #[tokio::test] async fn sort_checkbox_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let checkbox_field = test.get_first_field(FieldType::Checkbox); + let checkbox_field = test.get_first_field(FieldType::Checkbox).await; let scripts = vec![ AssertCellContentOrder { field_id: checkbox_field.id.clone(), @@ -179,7 +179,7 @@ async fn sort_checkbox_by_descending_test() { #[tokio::test] async fn sort_date_by_ascending_test() { let mut test = DatabaseSortTest::new().await; - let date_field = test.get_first_field(FieldType::DateTime); + let date_field = test.get_first_field(FieldType::DateTime).await; let scripts = vec![ AssertCellContentOrder { field_id: date_field.id.clone(), @@ -216,7 +216,7 @@ async fn sort_date_by_ascending_test() { #[tokio::test] async fn sort_date_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let date_field = test.get_first_field(FieldType::DateTime); + let date_field = test.get_first_field(FieldType::DateTime).await; let scripts = vec![ AssertCellContentOrder { field_id: date_field.id.clone(), @@ -253,7 +253,7 @@ async fn sort_date_by_descending_test() { #[tokio::test] async fn sort_number_by_ascending_test() { let mut test = DatabaseSortTest::new().await; - let number_field = test.get_first_field(FieldType::Number); + let number_field = test.get_first_field(FieldType::Number).await; let scripts = vec![ AssertCellContentOrder { field_id: number_field.id.clone(), @@ -274,7 +274,7 @@ async fn sort_number_by_ascending_test() { #[tokio::test] async fn sort_number_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let number_field = test.get_first_field(FieldType::Number); + let number_field = test.get_first_field(FieldType::Number).await; let scripts = vec![ AssertCellContentOrder { field_id: number_field.id.clone(), @@ -295,7 +295,7 @@ async fn sort_number_by_descending_test() { #[tokio::test] async fn sort_single_select_by_ascending_test() { let mut test = DatabaseSortTest::new().await; - let single_select = test.get_first_field(FieldType::SingleSelect); + let single_select = test.get_first_field(FieldType::SingleSelect).await; let scripts = vec![ AssertCellContentOrder { field_id: single_select.id.clone(), @@ -316,7 +316,7 @@ async fn sort_single_select_by_ascending_test() { #[tokio::test] async fn sort_single_select_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let single_select = test.get_first_field(FieldType::SingleSelect); + let single_select = test.get_first_field(FieldType::SingleSelect).await; let scripts = vec![ AssertCellContentOrder { field_id: single_select.id.clone(), @@ -337,7 +337,7 @@ async fn sort_single_select_by_descending_test() { #[tokio::test] async fn sort_multi_select_by_ascending_test() { let mut test = DatabaseSortTest::new().await; - let multi_select = test.get_first_field(FieldType::MultiSelect); + let multi_select = test.get_first_field(FieldType::MultiSelect).await; let scripts = vec![ AssertCellContentOrder { field_id: multi_select.id.clone(), @@ -374,7 +374,7 @@ async fn sort_multi_select_by_ascending_test() { #[tokio::test] async fn sort_multi_select_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let multi_select = test.get_first_field(FieldType::MultiSelect); + let multi_select = test.get_first_field(FieldType::MultiSelect).await; let scripts = vec![ AssertCellContentOrder { field_id: multi_select.id.clone(), @@ -411,7 +411,7 @@ async fn sort_multi_select_by_descending_test() { #[tokio::test] async fn sort_checklist_by_ascending_test() { let mut test = DatabaseSortTest::new().await; - let checklist_field = test.get_first_field(FieldType::Checklist); + let checklist_field = test.get_first_field(FieldType::Checklist).await; let scripts = vec![ AssertCellContentOrder { field_id: checklist_field.id.clone(), @@ -448,7 +448,7 @@ async fn sort_checklist_by_ascending_test() { #[tokio::test] async fn sort_checklist_by_descending_test() { let mut test = DatabaseSortTest::new().await; - let checklist_field = test.get_first_field(FieldType::Checklist); + let checklist_field = test.get_first_field(FieldType::Checklist).await; let scripts = vec![ AssertCellContentOrder { field_id: checklist_field.id.clone(), diff --git a/frontend/rust-lib/flowy-document-pub/src/cloud.rs b/frontend/rust-lib/flowy-document-pub/src/cloud.rs index 2f4da1bd37..18e40691a1 100644 --- a/frontend/rust-lib/flowy-document-pub/src/cloud.rs +++ b/frontend/rust-lib/flowy-document-pub/src/cloud.rs @@ -2,30 +2,31 @@ use anyhow::Error; pub use collab_document::blocks::DocumentData; use flowy_error::FlowyError; -use lib_infra::future::FutureResult; +use lib_infra::async_trait::async_trait; /// A trait for document cloud service. /// Each kind of server should implement this trait. Check out the [AppFlowyServerProvider] of /// [flowy-server] crate for more information. +#[async_trait] pub trait DocumentCloudService: Send + Sync + 'static { - fn get_document_doc_state( + async fn get_document_doc_state( &self, document_id: &str, workspace_id: &str, - ) -> FutureResult, FlowyError>; + ) -> Result, FlowyError>; - fn get_document_snapshots( + async fn get_document_snapshots( &self, document_id: &str, limit: usize, workspace_id: &str, - ) -> FutureResult, Error>; + ) -> Result, Error>; - fn get_document_data( + async fn get_document_data( &self, document_id: &str, workspace_id: &str, - ) -> FutureResult, Error>; + ) -> Result, Error>; } pub struct DocumentSnapshot { diff --git a/frontend/rust-lib/flowy-document/Cargo.toml b/frontend/rust-lib/flowy-document/Cargo.toml index f64c960b12..6fe59c0c52 100644 --- a/frontend/rust-lib/flowy-document/Cargo.toml +++ b/frontend/rust-lib/flowy-document/Cargo.toml @@ -24,7 +24,6 @@ validator = { version = "0.16.0", features = ["derive"] } protobuf.workspace = true bytes.workspace = true nanoid = "0.4.0" -parking_lot.workspace = true strum_macros = "0.21" serde.workspace = true serde_json.workspace = true diff --git a/frontend/rust-lib/flowy-document/src/document.rs b/frontend/rust-lib/flowy-document/src/document.rs index 6ec018f171..7e1c1d143b 100644 --- a/frontend/rust-lib/flowy-document/src/document.rs +++ b/frontend/rust-lib/flowy-document/src/document.rs @@ -2,86 +2,28 @@ use crate::entities::{ DocEventPB, DocumentAwarenessStatesPB, DocumentSnapshotStatePB, DocumentSyncStatePB, }; use crate::notification::{send_notification, DocumentNotification}; -use collab::core::collab::MutexCollab; -use collab_document::document::DocumentIndexContent; -use collab_document::{blocks::DocumentData, document::Document}; -use flowy_error::FlowyResult; +use collab::preclude::Collab; +use collab_document::document::Document; use futures::StreamExt; use lib_dispatch::prelude::af_spawn; -use parking_lot::Mutex; -use std::{ - ops::{Deref, DerefMut}, - sync::Arc, -}; -use tracing::{instrument, warn}; -/// This struct wrap the document::Document -#[derive(Clone)] -pub struct MutexDocument(Arc>); - -impl MutexDocument { - /// Open a document with the given collab. - /// # Arguments - /// * `collab` - the identifier of the collaboration instance - /// - /// # Returns - /// * `Result` - a Result containing either a new Document object or an Error if the document creation failed - pub fn open(doc_id: &str, collab: Arc) -> FlowyResult { - #[allow(clippy::arc_with_non_send_sync)] - let document = Document::open(collab.clone()).map(|inner| Self(Arc::new(Mutex::new(inner))))?; - subscribe_document_changed(doc_id, &document); - subscribe_document_snapshot_state(&collab); - subscribe_document_sync_state(&collab); - Ok(document) - } - - /// Creates and returns a new Document object with initial data. - /// # Arguments - /// * `collab` - the identifier of the collaboration instance - /// * `data` - the initial data to include in the document - /// - /// # Returns - /// * `Result` - a Result containing either a new Document object or an Error if the document creation failed - pub fn create_with_data(collab: Arc, data: DocumentData) -> FlowyResult { - #[allow(clippy::arc_with_non_send_sync)] - let document = - Document::create_with_data(collab, data).map(|inner| Self(Arc::new(Mutex::new(inner))))?; - Ok(document) - } - - #[instrument(level = "debug", skip_all)] - pub fn start_init_sync(&self) { - if let Some(document) = self.0.try_lock() { - if let Some(collab) = document.get_collab().try_lock() { - collab.start_init_sync(); - } else { - warn!("Failed to start init sync, collab is locked"); - } - } else { - warn!("Failed to start init sync, document is locked"); - } - } -} - -fn subscribe_document_changed(doc_id: &str, document: &MutexDocument) { +pub fn subscribe_document_changed(doc_id: &str, document: &mut Document) { let doc_id_clone_for_block_changed = doc_id.to_owned(); - document - .lock() - .subscribe_block_changed(move |events, is_remote| { - #[cfg(feature = "verbose_log")] - tracing::trace!("subscribe_document_changed: {:?}", events); + document.subscribe_block_changed("key", move |events, is_remote| { + #[cfg(feature = "verbose_log")] + tracing::trace!("subscribe_document_changed: {:?}", events); - // send notification to the client. - send_notification( - &doc_id_clone_for_block_changed, - DocumentNotification::DidReceiveUpdate, - ) - .payload::((events, is_remote, None).into()) - .send(); - }); + // send notification to the client. + send_notification( + &doc_id_clone_for_block_changed, + DocumentNotification::DidReceiveUpdate, + ) + .payload::((events, is_remote, None).into()) + .send(); + }); let doc_id_clone_for_awareness_state = doc_id.to_owned(); - document.lock().subscribe_awareness_state(move |events| { + document.subscribe_awareness_state("key", move |events| { #[cfg(feature = "verbose_log")] tracing::trace!("subscribe_awareness_state: {:?}", events); send_notification( @@ -93,9 +35,9 @@ fn subscribe_document_changed(doc_id: &str, document: &MutexDocument) { }); } -fn subscribe_document_snapshot_state(collab: &Arc) { - let document_id = collab.lock().object_id.clone(); - let mut snapshot_state = collab.lock().subscribe_snapshot_state(); +pub fn subscribe_document_snapshot_state(collab: &Collab) { + let document_id = collab.object_id().to_string(); + let mut snapshot_state = collab.subscribe_snapshot_state(); af_spawn(async move { while let Some(snapshot_state) = snapshot_state.next().await { if let Some(new_snapshot_id) = snapshot_state.snapshot_id() { @@ -111,9 +53,9 @@ fn subscribe_document_snapshot_state(collab: &Arc) { }); } -fn subscribe_document_sync_state(collab: &Arc) { - let document_id = collab.lock().object_id.clone(); - let mut sync_state_stream = collab.lock().subscribe_sync_state(); +pub fn subscribe_document_sync_state(collab: &Collab) { + let document_id = collab.object_id().to_string(); + let mut sync_state_stream = collab.subscribe_sync_state(); af_spawn(async move { while let Some(sync_state) = sync_state_stream.next().await { send_notification( @@ -125,27 +67,3 @@ fn subscribe_document_sync_state(collab: &Arc) { } }); } - -unsafe impl Sync for MutexDocument {} -unsafe impl Send for MutexDocument {} - -impl Deref for MutexDocument { - type Target = Arc>; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for MutexDocument { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl From<&MutexDocument> for DocumentIndexContent { - fn from(doc: &MutexDocument) -> Self { - let doc = doc.lock(); - DocumentIndexContent::from(&*doc) - } -} diff --git a/frontend/rust-lib/flowy-document/src/event_handler.rs b/frontend/rust-lib/flowy-document/src/event_handler.rs index 66a98e3105..2efde86a89 100644 --- a/frontend/rust-lib/flowy-document/src/event_handler.rs +++ b/frontend/rust-lib/flowy-document/src/event_handler.rs @@ -42,7 +42,7 @@ pub(crate) async fn get_encode_collab_handler( let manager = upgrade_document(manager)?; let params: OpenDocumentParams = data.into_inner().try_into()?; let doc_id = params.document_id; - let state = manager.get_encoded_collab_with_view_id(&doc_id).await?; + let state = manager.get_encoded_collab_with_view_id(&doc_id)?; data_result_ok(EncodedCollabPB { state_vector: Vec::from(state.state_vector), doc_state: Vec::from(state.doc_state), @@ -74,8 +74,8 @@ pub(crate) async fn open_document_handler( let doc_id = params.document_id; manager.open_document(&doc_id).await?; - let document = manager.get_opened_document(&doc_id).await?; - let document_data = document.lock().get_document_data()?; + let document = manager.editable_document(&doc_id).await?; + let document_data = document.read().await.get_document_data()?; data_result_ok(DocumentDataPB::from(document_data)) } @@ -122,12 +122,12 @@ pub(crate) async fn apply_action_handler( let manager = upgrade_document(manager)?; let params: ApplyActionParams = data.into_inner().try_into()?; let doc_id = params.document_id; - let document = manager.get_opened_document(&doc_id).await?; + let document = manager.editable_document(&doc_id).await?; let actions = params.actions; if cfg!(feature = "verbose_log") { tracing::trace!("{} applying actions: {:?}", doc_id, actions); } - document.lock().apply_action(actions); + document.write().await.apply_action(actions)?; Ok(()) } @@ -139,9 +139,9 @@ pub(crate) async fn create_text_handler( let manager = upgrade_document(manager)?; let params: TextDeltaParams = data.into_inner().try_into()?; let doc_id = params.document_id; - let document = manager.get_opened_document(&doc_id).await?; - let document = document.lock(); - document.create_text(¶ms.text_id, params.delta); + let document = manager.editable_document(&doc_id).await?; + let mut document = document.write().await; + document.apply_text_delta(¶ms.text_id, params.delta); Ok(()) } @@ -153,10 +153,10 @@ pub(crate) async fn apply_text_delta_handler( let manager = upgrade_document(manager)?; let params: TextDeltaParams = data.into_inner().try_into()?; let doc_id = params.document_id; - let document = manager.get_opened_document(&doc_id).await?; + let document = manager.editable_document(&doc_id).await?; let text_id = params.text_id; let delta = params.delta; - let document = document.lock(); + let mut document = document.write().await; if cfg!(feature = "verbose_log") { tracing::trace!("{} applying delta: {:?}", doc_id, delta); } @@ -194,8 +194,8 @@ pub(crate) async fn redo_handler( let manager = upgrade_document(manager)?; let params: DocumentRedoUndoParams = data.into_inner().try_into()?; let doc_id = params.document_id; - let document = manager.get_opened_document(&doc_id).await?; - let document = document.lock(); + let document = manager.editable_document(&doc_id).await?; + let mut document = document.write().await; let redo = document.redo(); let can_redo = document.can_redo(); let can_undo = document.can_undo(); @@ -213,8 +213,8 @@ pub(crate) async fn undo_handler( let manager = upgrade_document(manager)?; let params: DocumentRedoUndoParams = data.into_inner().try_into()?; let doc_id = params.document_id; - let document = manager.get_opened_document(&doc_id).await?; - let document = document.lock(); + let document = manager.editable_document(&doc_id).await?; + let mut document = document.write().await; let undo = document.undo(); let can_redo = document.can_redo(); let can_undo = document.can_undo(); @@ -232,11 +232,10 @@ pub(crate) async fn can_undo_redo_handler( let manager = upgrade_document(manager)?; let params: DocumentRedoUndoParams = data.into_inner().try_into()?; let doc_id = params.document_id; - let document = manager.get_opened_document(&doc_id).await?; - let document = document.lock(); + let document = manager.editable_document(&doc_id).await?; + let document = document.read().await; let can_redo = document.can_redo(); let can_undo = document.can_undo(); - drop(document); data_result_ok(DocumentRedoUndoResponsePB { can_redo, can_undo, @@ -388,8 +387,7 @@ pub async fn convert_document_handler( let manager = upgrade_document(manager)?; let params: ConvertDocumentParams = data.into_inner().try_into()?; - let document = manager.get_opened_document(¶ms.document_id).await?; - let document_data = document.lock().get_document_data()?; + let document_data = manager.get_document_data(¶ms.document_id).await?; let parser = DocumentDataParser::new(Arc::new(document_data), params.range); if !params.parse_types.any_enabled() { diff --git a/frontend/rust-lib/flowy-document/src/manager.rs b/frontend/rust-lib/flowy-document/src/manager.rs index 5ea5aeb2de..bf503b0cfc 100644 --- a/frontend/rust-lib/flowy-document/src/manager.rs +++ b/frontend/rust-lib/flowy-document/src/manager.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use std::sync::Weak; -use collab::core::collab::{DataSource, MutexCollab}; +use collab::core::collab::DataSource; use collab::core::origin::CollabOrigin; use collab::entity::EncodedCollab; use collab::preclude::Collab; @@ -12,19 +12,26 @@ use collab_document::document_awareness::DocumentAwarenessState; use collab_document::document_awareness::DocumentAwarenessUser; use collab_document::document_data::default_document_data; use collab_entity::CollabType; +use collab_plugins::local_storage::kv::doc::CollabKVAction; +use collab_plugins::local_storage::kv::KVTransactionDB; use collab_plugins::CollabKVDB; use dashmap::DashMap; use lib_infra::util::timestamp; +use tokio::sync::RwLock; use tracing::trace; use tracing::{event, instrument}; -use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig}; +use crate::document::{ + subscribe_document_changed, subscribe_document_snapshot_state, subscribe_document_sync_state, +}; +use collab_integrate::collab_builder::{ + AppFlowyCollabBuilder, CollabBuilderConfig, KVDBCollabPersistenceImpl, +}; use flowy_document_pub::cloud::DocumentCloudService; use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult}; use flowy_storage_pub::storage::{CreatedUpload, StorageService}; use lib_dispatch::prelude::af_spawn; -use crate::document::MutexDocument; use crate::entities::UpdateDocumentAwarenessStatePB; use crate::entities::{ DocumentSnapshotData, DocumentSnapshotMeta, DocumentSnapshotMetaPB, DocumentSnapshotPB, @@ -49,8 +56,8 @@ pub trait DocumentSnapshotService: Send + Sync { pub struct DocumentManager { pub user_service: Arc, collab_builder: Arc, - documents: Arc>>, - removing_documents: Arc>>, + documents: Arc>>>, + removing_documents: Arc>>>, cloud_service: Arc, storage_service: Weak, snapshot_service: Arc, @@ -76,17 +83,17 @@ impl DocumentManager { } /// Get the encoded collab of the document. - pub async fn get_encoded_collab_with_view_id(&self, doc_id: &str) -> FlowyResult { - let doc_state = DataSource::Disk; + pub fn get_encoded_collab_with_view_id(&self, doc_id: &str) -> FlowyResult { let uid = self.user_service.user_id()?; - let collab = self - .collab_for_document(uid, doc_id, doc_state, false) - .await?; - - let collab = collab.lock(); - collab + let doc_state = + KVDBCollabPersistenceImpl::new(self.user_service.collab_db(uid)?, uid).into_data_source(); + let collab = self.collab_for_document(uid, doc_id, doc_state, false)?; + let encoded_collab = collab + .try_read() + .unwrap() .encode_collab_v1(|collab| CollabType::Document.validate_require_data(collab)) - .map_err(internal_error) + .map_err(internal_error)?; + Ok(encoded_collab) } pub async fn initialize(&self, _uid: i64) -> FlowyResult<()> { @@ -132,27 +139,56 @@ impl DocumentManager { format!("document {} already exists", doc_id), )) } else { + let db = self + .user_service + .collab_db(uid)? + .upgrade() + .ok_or_else(|| FlowyError::internal().with_context("Failed to get collab db"))?; let encoded_collab = doc_state_from_document_data( doc_id, data.unwrap_or_else(|| default_document_data(doc_id)), ) .await?; - let doc_state = encoded_collab.doc_state.to_vec(); - let collab = self - .collab_for_document( + + db.with_write_txn(|write_txn| { + write_txn.flush_doc( uid, doc_id, - DataSource::DocStateV1(doc_state.clone()), - false, - ) - .await?; - collab.lock().flush(); + encoded_collab.state_vector.to_vec(), + encoded_collab.doc_state.to_vec(), + )?; + Ok(()) + })?; Ok(encoded_collab) } } - pub async fn get_opened_document(&self, doc_id: &str) -> FlowyResult> { + fn collab_for_document( + &self, + uid: i64, + doc_id: &str, + data_source: DataSource, + sync_enable: bool, + ) -> FlowyResult>> { + let db = self.user_service.collab_db(uid)?; + let workspace_id = self.user_service.workspace_id()?; + let collab_object = + self + .collab_builder + .collab_object(&workspace_id, uid, doc_id, CollabType::Document)?; + let document = self.collab_builder.create_document( + collab_object, + data_source, + db, + CollabBuilderConfig::default().sync_enable(sync_enable), + None, + )?; + Ok(document) + } + + /// Return a document instance if the document is already opened. + pub async fn editable_document(&self, doc_id: &str) -> FlowyResult>> { if let Some(doc) = self.documents.get(doc_id).map(|item| item.value().clone()) { return Ok(doc); } @@ -160,6 +196,7 @@ impl DocumentManager { if let Some(doc) = self.restore_document_from_removing(doc_id) { return Ok(doc); } + Err(FlowyError::internal().with_context("Call open document first")) } @@ -167,12 +204,14 @@ impl DocumentManager { /// If the document does not exist in local disk, try get the doc state from the cloud. /// If the document exists, open the document and cache it #[tracing::instrument(level = "info", skip(self), err)] - async fn init_document_instance(&self, doc_id: &str) -> FlowyResult> { - if let Some(doc) = self.documents.get(doc_id).map(|item| item.value().clone()) { - return Ok(doc); - } - - let mut doc_state = DataSource::Disk; + async fn create_document_instance( + &self, + doc_id: &str, + enable_sync: bool, + ) -> FlowyResult>> { + let uid = self.user_service.user_id()?; + let mut doc_state = + KVDBCollabPersistenceImpl::new(self.user_service.collab_db(uid)?, uid).into_data_source(); // If the document does not exist in local disk, try get the doc state from the cloud. This happens // When user_device_a create a document and user_device_b open the document. if !self.is_doc_exist(doc_id).await? { @@ -192,21 +231,25 @@ impl DocumentManager { } } - let uid = self.user_service.user_id()?; event!( tracing::Level::DEBUG, "Initialize document: {}, workspace_id: {:?}", doc_id, self.user_service.workspace_id() ); - let collab = self - .collab_for_document(uid, doc_id, doc_state, true) - .await?; - - match MutexDocument::open(doc_id, collab) { + let result = self.collab_for_document(uid, doc_id, doc_state, enable_sync); + match result { Ok(document) => { - let document = Arc::new(document); - self.documents.insert(doc_id.to_string(), document.clone()); + // Only push the document to the cache if the sync is enabled. + if enable_sync { + { + let mut lock = document.write().await; + subscribe_document_changed(doc_id, &mut lock); + subscribe_document_snapshot_state(&lock); + subscribe_document_sync_state(&lock); + } + self.documents.insert(doc_id.to_string(), document.clone()); + } Ok(document) }, Err(err) => { @@ -222,47 +265,52 @@ impl DocumentManager { pub async fn get_document_data(&self, doc_id: &str) -> FlowyResult { let document = self.get_document(doc_id).await?; + let document = document.read().await; document.get_document_data().map_err(internal_error) } pub async fn get_document_text(&self, doc_id: &str) -> FlowyResult { let document = self.get_document(doc_id).await?; - let text = convert_document_to_plain_text(document)?; + let document = document.read().await; + let text = convert_document_to_plain_text(&document)?; Ok(text) } - async fn get_document(&self, doc_id: &str) -> FlowyResult { - let mut doc_state = DataSource::Disk; - if !self.is_doc_exist(doc_id).await? { - doc_state = DataSource::DocStateV1( - self - .cloud_service - .get_document_doc_state(doc_id, &self.user_service.workspace_id()?) - .await?, - ); + /// Return a document instance. + /// The returned document might or might not be able to sync with the cloud. + async fn get_document(&self, doc_id: &str) -> FlowyResult>> { + if let Some(doc) = self.documents.get(doc_id).map(|item| item.value().clone()) { + return Ok(doc); } - let uid = self.user_service.user_id()?; - let collab = self - .collab_for_document(uid, doc_id, doc_state, false) - .await?; - let document = Document::open(collab)?; + + if let Some(doc) = self.restore_document_from_removing(doc_id) { + return Ok(doc); + } + + let document = self.create_document_instance(doc_id, false).await?; Ok(document) } pub async fn open_document(&self, doc_id: &str) -> FlowyResult<()> { if let Some(mutex_document) = self.restore_document_from_removing(doc_id) { - mutex_document.start_init_sync(); + let lock = mutex_document.read().await; + lock.start_init_sync(); } - let _ = self.init_document_instance(doc_id).await?; + if self.documents.contains_key(doc_id) { + return Ok(()); + } + + let _ = self.create_document_instance(doc_id, true).await?; Ok(()) } pub async fn close_document(&self, doc_id: &str) -> FlowyResult<()> { if let Some((doc_id, document)) = self.documents.remove(doc_id) { - if let Some(doc) = document.try_lock() { + { // clear the awareness state when close the document - doc.clean_awareness_local_state(); - let _ = doc.flush(); + let mut lock = document.write().await; + lock.clean_awareness_local_state(); + lock.flush(); } let clone_doc_id = doc_id.clone(); trace!("move document to removing_documents: {}", doc_id); @@ -300,20 +348,19 @@ impl DocumentManager { ) -> FlowyResult { let uid = self.user_service.user_id()?; let device_id = self.user_service.device_id()?; - if let Ok(doc) = self.get_opened_document(doc_id).await { - if let Some(doc) = doc.try_lock() { - let user = DocumentAwarenessUser { uid, device_id }; - let selection = state.selection.map(|s| s.into()); - let state = DocumentAwarenessState { - version: 1, - user, - selection, - metadata: state.metadata, - timestamp: timestamp(), - }; - doc.set_awareness_local_state(state); - return Ok(true); - } + if let Ok(doc) = self.editable_document(doc_id).await { + let mut doc = doc.write().await; + let user = DocumentAwarenessUser { uid, device_id }; + let selection = state.selection.map(|s| s.into()); + let state = DocumentAwarenessState { + version: 1, + user, + selection, + metadata: state.metadata, + timestamp: timestamp(), + }; + doc.set_awareness_local_state(state); + return Ok(true); } Ok(false) } @@ -376,27 +423,6 @@ impl DocumentManager { Ok(()) } - async fn collab_for_document( - &self, - uid: i64, - doc_id: &str, - doc_state: DataSource, - sync_enable: bool, - ) -> FlowyResult> { - let db = self.user_service.collab_db(uid)?; - let workspace_id = self.user_service.workspace_id()?; - let collab = self.collab_builder.build_with_config( - &workspace_id, - uid, - doc_id, - CollabType::Document, - db, - doc_state, - CollabBuilderConfig::default().sync_enable(sync_enable), - )?; - Ok(collab) - } - async fn is_doc_exist(&self, doc_id: &str) -> FlowyResult { let uid = self.user_service.user_id()?; if let Some(collab_db) = self.user_service.collab_db(uid)?.upgrade() { @@ -425,7 +451,7 @@ impl DocumentManager { &self.storage_service } - fn restore_document_from_removing(&self, doc_id: &str) -> Option> { + fn restore_document_from_removing(&self, doc_id: &str) -> Option>> { let (doc_id, doc) = self.removing_documents.remove(doc_id)?; trace!( "move document {} from removing_documents to documents", @@ -443,13 +469,8 @@ async fn doc_state_from_document_data( let doc_id = doc_id.to_string(); // spawn_blocking is used to avoid blocking the tokio thread pool if the document is large. let encoded_collab = tokio::task::spawn_blocking(move || { - let collab = Arc::new(MutexCollab::new(Collab::new_with_origin( - CollabOrigin::Empty, - doc_id, - vec![], - false, - ))); - let document = Document::create_with_data(collab.clone(), data).map_err(internal_error)?; + let collab = Collab::new_with_origin(CollabOrigin::Empty, doc_id, vec![], false); + let document = Document::open_with(collab, Some(data)).map_err(internal_error)?; let encode_collab = document.encode_collab()?; Ok::<_, FlowyError>(encode_collab) }) diff --git a/frontend/rust-lib/flowy-document/tests/document/document_insert_test.rs b/frontend/rust-lib/flowy-document/tests/document/document_insert_test.rs index 1181395cae..28c02641e8 100644 --- a/frontend/rust-lib/flowy-document/tests/document/document_insert_test.rs +++ b/frontend/rust-lib/flowy-document/tests/document/document_insert_test.rs @@ -31,9 +31,13 @@ async fn document_apply_insert_block_with_empty_parent_id() { text_id: None, }, }; - document.lock().apply_action(vec![insert_text_action]); + document + .write() + .await + .apply_action(vec![insert_text_action]) + .unwrap(); // read the text block and it's parent id should be the page id - let block = document.lock().get_block(&text_block_id).unwrap(); + let block = document.read().await.get_block(&text_block_id).unwrap(); assert_eq!(block.parent, page_id); } diff --git a/frontend/rust-lib/flowy-document/tests/document/document_redo_undo_test.rs b/frontend/rust-lib/flowy-document/tests/document/document_redo_undo_test.rs index ce97aa0bdd..b11cd2ecde 100644 --- a/frontend/rust-lib/flowy-document/tests/document/document_redo_undo_test.rs +++ b/frontend/rust-lib/flowy-document/tests/document/document_redo_undo_test.rs @@ -23,8 +23,8 @@ async fn undo_redo_test() { // open a document test.open_document(&doc_id).await.unwrap(); - let document = test.get_opened_document(&doc_id).await.unwrap(); - let document = document.lock(); + let document = test.editable_document(&doc_id).await.unwrap(); + let mut document = document.write().await; let page_block = document.get_block(&data.page_id).unwrap(); let page_id = page_block.id; let text_block_id = gen_id(); @@ -49,7 +49,7 @@ async fn undo_redo_test() { text_id: None, }, }; - document.apply_action(vec![insert_text_action]); + document.apply_action(vec![insert_text_action]).unwrap(); let can_undo = document.can_undo(); assert!(can_undo); diff --git a/frontend/rust-lib/flowy-document/tests/document/document_test.rs b/frontend/rust-lib/flowy-document/tests/document/document_test.rs index 8c57d94346..d7906bc114 100644 --- a/frontend/rust-lib/flowy-document/tests/document/document_test.rs +++ b/frontend/rust-lib/flowy-document/tests/document/document_test.rs @@ -23,10 +23,11 @@ async fn restore_document() { test.open_document(&doc_id).await.unwrap(); let data_b = test - .get_opened_document(&doc_id) + .editable_document(&doc_id) .await .unwrap() - .lock() + .read() + .await .get_document_data() .unwrap(); // close a document @@ -37,10 +38,11 @@ async fn restore_document() { _ = test.create_document(uid, &doc_id, Some(data.clone())).await; // open a document let data_b = test - .get_opened_document(&doc_id) + .editable_document(&doc_id) .await .unwrap() - .lock() + .read() + .await .get_document_data() .unwrap(); // close a document @@ -61,8 +63,9 @@ async fn document_apply_insert_action() { // open a document test.open_document(&doc_id).await.unwrap(); - let document = test.get_opened_document(&doc_id).await.unwrap(); - let page_block = document.lock().get_block(&data.page_id).unwrap(); + let document = test.editable_document(&doc_id).await.unwrap(); + let mut document = document.write().await; + let page_block = document.get_block(&data.page_id).unwrap(); // insert a text block let text_block = Block { @@ -84,17 +87,19 @@ async fn document_apply_insert_action() { text_id: None, }, }; - document.lock().apply_action(vec![insert_text_action]); - let data_a = document.lock().get_document_data().unwrap(); + document.apply_action(vec![insert_text_action]).unwrap(); + let data_a = document.get_document_data().unwrap(); + drop(document); // close the original document _ = test.close_document(&doc_id).await; // re-open the document let data_b = test - .get_opened_document(&doc_id) + .editable_document(&doc_id) .await .unwrap() - .lock() + .read() + .await .get_document_data() .unwrap(); // close a document @@ -115,8 +120,9 @@ async fn document_apply_update_page_action() { // open a document test.open_document(&doc_id).await.unwrap(); - let document = test.get_opened_document(&doc_id).await.unwrap(); - let page_block = document.lock().get_block(&data.page_id).unwrap(); + let document = test.editable_document(&doc_id).await.unwrap(); + let mut document = document.write().await; + let page_block = document.get_block(&data.page_id).unwrap(); let mut page_block_clone = page_block; page_block_clone.data = HashMap::new(); @@ -136,13 +142,14 @@ async fn document_apply_update_page_action() { }; let actions = vec![action]; tracing::trace!("{:?}", &actions); - document.lock().apply_action(actions); - let page_block_old = document.lock().get_block(&data.page_id).unwrap(); + document.apply_action(actions).unwrap(); + let page_block_old = document.get_block(&data.page_id).unwrap(); + drop(document); _ = test.close_document(&doc_id).await; // re-open the document - let document = test.get_opened_document(&doc_id).await.unwrap(); - let page_block_new = document.lock().get_block(&data.page_id).unwrap(); + let document = test.editable_document(&doc_id).await.unwrap(); + let page_block_new = document.read().await.get_block(&data.page_id).unwrap(); assert_eq!(page_block_old, page_block_new); assert!(page_block_new.data.contains_key("delta")); } @@ -159,8 +166,9 @@ async fn document_apply_update_action() { // open a document test.open_document(&doc_id).await.unwrap(); - let document = test.get_opened_document(&doc_id).await.unwrap(); - let page_block = document.lock().get_block(&data.page_id).unwrap(); + let document = test.editable_document(&doc_id).await.unwrap(); + let mut document = document.write().await; + let page_block = document.get_block(&data.page_id).unwrap(); // insert a text block let text_block_id = gen_id(); @@ -183,10 +191,10 @@ async fn document_apply_update_action() { text_id: None, }, }; - document.lock().apply_action(vec![insert_text_action]); + document.apply_action(vec![insert_text_action]).unwrap(); // update the text block - let existing_text_block = document.lock().get_block(&text_block_id).unwrap(); + let existing_text_block = document.get_block(&text_block_id).unwrap(); let mut updated_text_block_data = HashMap::new(); updated_text_block_data.insert("delta".to_string(), Value::String("delta".to_string())); let updated_text_block = Block { @@ -208,13 +216,14 @@ async fn document_apply_update_action() { text_id: None, }, }; - document.lock().apply_action(vec![update_text_action]); + document.apply_action(vec![update_text_action]).unwrap(); + drop(document); // close the original document _ = test.close_document(&doc_id).await; // re-open the document - let document = test.get_opened_document(&doc_id).await.unwrap(); - let block = document.lock().get_block(&text_block_id).unwrap(); + let document = test.editable_document(&doc_id).await.unwrap(); + let block = document.read().await.get_block(&text_block_id).unwrap(); assert_eq!(block.data, updated_text_block_data); // close a document _ = test.close_document(&doc_id).await; diff --git a/frontend/rust-lib/flowy-document/tests/document/util.rs b/frontend/rust-lib/flowy-document/tests/document/util.rs index 58663abd14..2bc2f9d7bb 100644 --- a/frontend/rust-lib/flowy-document/tests/document/util.rs +++ b/frontend/rust-lib/flowy-document/tests/document/util.rs @@ -1,13 +1,14 @@ use std::ops::Deref; -use std::sync::Arc; +use std::sync::{Arc, OnceLock}; use anyhow::Error; use collab::preclude::CollabPlugin; use collab_document::blocks::DocumentData; +use collab_document::document::Document; use collab_document::document_data::default_document_data; use nanoid::nanoid; -use parking_lot::Once; use tempfile::TempDir; +use tokio::sync::RwLock; use tracing_subscriber::{fmt::Subscriber, util::SubscriberInitExt, EnvFilter}; use collab_integrate::collab_builder::{ @@ -15,7 +16,6 @@ use collab_integrate::collab_builder::{ CollabPluginProviderType, WorkspaceCollabIntegrate, }; use collab_integrate::CollabKVDB; -use flowy_document::document::MutexDocument; use flowy_document::entities::{DocumentSnapshotData, DocumentSnapshotMeta}; use flowy_document::manager::{DocumentManager, DocumentSnapshotService, DocumentUserService}; use flowy_document_pub::cloud::*; @@ -24,7 +24,6 @@ use flowy_storage_pub::chunked_byte::ChunkedBytes; use flowy_storage_pub::storage::{CreatedUpload, FileProgressReceiver, StorageService}; use lib_infra::async_trait::async_trait; use lib_infra::box_any::BoxAny; -use lib_infra::future::FutureResult; pub struct DocumentTest { inner: DocumentManager, @@ -103,8 +102,8 @@ impl DocumentUserService for FakeUser { } pub fn setup_log() { - static START: Once = Once::new(); - START.call_once(|| { + static START: OnceLock<()> = OnceLock::new(); + START.get_or_init(|| { std::env::set_var("RUST_LOG", "collab_persistence=trace"); let subscriber = Subscriber::builder() .with_env_filter(EnvFilter::from_default_env()) @@ -114,7 +113,7 @@ pub fn setup_log() { }); } -pub async fn create_and_open_empty_document() -> (DocumentTest, Arc, String) { +pub async fn create_and_open_empty_document() -> (DocumentTest, Arc>, String) { let test = DocumentTest::new(); let doc_id: String = gen_document_id(); let data = default_document_data(&doc_id); @@ -126,7 +125,7 @@ pub async fn create_and_open_empty_document() -> (DocumentTest, Arc String { } pub struct LocalTestDocumentCloudServiceImpl(); + +#[async_trait] impl DocumentCloudService for LocalTestDocumentCloudServiceImpl { - fn get_document_doc_state( + async fn get_document_doc_state( &self, document_id: &str, _workspace_id: &str, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { let document_id = document_id.to_string(); - FutureResult::new(async move { - Err(FlowyError::new( - ErrorCode::RecordNotFound, - format!("Document {} not found", document_id), - )) - }) + Err(FlowyError::new( + ErrorCode::RecordNotFound, + format!("Document {} not found", document_id), + )) } - fn get_document_snapshots( + async fn get_document_snapshots( &self, _document_id: &str, _limit: usize, _workspace_id: &str, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(vec![]) }) + ) -> Result, Error> { + Ok(vec![]) } - fn get_document_data( + async fn get_document_data( &self, _document_id: &str, _workspace_id: &str, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(None) }) + ) -> Result, Error> { + Ok(None) } } diff --git a/frontend/rust-lib/flowy-error/src/code.rs b/frontend/rust-lib/flowy-error/src/code.rs index fc12ed4606..93fe6a88d6 100644 --- a/frontend/rust-lib/flowy-error/src/code.rs +++ b/frontend/rust-lib/flowy-error/src/code.rs @@ -307,6 +307,10 @@ pub enum ErrorCode { #[error("Invalid Request")] InvalidRequest = 106, + + #[error("In progress")] + // when client receives InProgress, it should retry + InProgress = 107, } impl ErrorCode { diff --git a/frontend/rust-lib/flowy-folder-pub/src/cloud.rs b/frontend/rust-lib/flowy-folder-pub/src/cloud.rs index 4747a4eb51..19c3422984 100644 --- a/frontend/rust-lib/flowy-folder-pub/src/cloud.rs +++ b/frontend/rust-lib/flowy-folder-pub/src/cloud.rs @@ -3,7 +3,6 @@ pub use anyhow::Error; use collab_entity::CollabType; pub use collab_folder::{Folder, FolderData, Workspace}; use lib_infra::async_trait::async_trait; -use lib_infra::future::FutureResult; use uuid::Uuid; /// [FolderCloudService] represents the cloud service for folder. @@ -11,59 +10,59 @@ use uuid::Uuid; pub trait FolderCloudService: Send + Sync + 'static { /// Creates a new workspace for the user. /// Returns error if the cloud service doesn't support multiple workspaces - fn create_workspace(&self, uid: i64, name: &str) -> FutureResult; + async fn create_workspace(&self, uid: i64, name: &str) -> Result; - fn open_workspace(&self, workspace_id: &str) -> FutureResult<(), Error>; + async fn open_workspace(&self, workspace_id: &str) -> Result<(), Error>; /// Returns all workspaces of the user. /// Returns vec![] if the cloud service doesn't support multiple workspaces - fn get_all_workspace(&self) -> FutureResult, Error>; + async fn get_all_workspace(&self) -> Result, Error>; - fn get_folder_data( + async fn get_folder_data( &self, workspace_id: &str, uid: &i64, - ) -> FutureResult, Error>; + ) -> Result, Error>; - fn get_folder_snapshots( + async fn get_folder_snapshots( &self, workspace_id: &str, limit: usize, - ) -> FutureResult, Error>; + ) -> Result, Error>; - fn get_folder_doc_state( + async fn get_folder_doc_state( &self, workspace_id: &str, uid: i64, collab_type: CollabType, object_id: &str, - ) -> FutureResult, Error>; + ) -> Result, Error>; - fn batch_create_folder_collab_objects( + async fn batch_create_folder_collab_objects( &self, workspace_id: &str, objects: Vec, - ) -> FutureResult<(), Error>; + ) -> Result<(), Error>; fn service_name(&self) -> String; - fn publish_view( + async fn publish_view( &self, workspace_id: &str, payload: Vec, - ) -> FutureResult<(), Error>; + ) -> Result<(), Error>; - fn unpublish_views(&self, workspace_id: &str, view_ids: Vec) -> FutureResult<(), Error>; + async fn unpublish_views(&self, workspace_id: &str, view_ids: Vec) -> Result<(), Error>; - fn get_publish_info(&self, view_id: &str) -> FutureResult; + async fn get_publish_info(&self, view_id: &str) -> Result; - fn set_publish_namespace( + async fn set_publish_namespace( &self, workspace_id: &str, new_namespace: &str, - ) -> FutureResult<(), Error>; + ) -> Result<(), Error>; - fn get_publish_namespace(&self, workspace_id: &str) -> FutureResult; + async fn get_publish_namespace(&self, workspace_id: &str) -> Result; } #[derive(Debug)] diff --git a/frontend/rust-lib/flowy-folder/Cargo.toml b/frontend/rust-lib/flowy-folder/Cargo.toml index e0327a5044..20a131cf6c 100644 --- a/frontend/rust-lib/flowy-folder/Cargo.toml +++ b/frontend/rust-lib/flowy-folder/Cargo.toml @@ -17,7 +17,7 @@ flowy-search-pub = { workspace = true } flowy-sqlite = { workspace = true } flowy-derive.workspace = true flowy-notification = { workspace = true } -parking_lot.workspace = true +arc-swap.workspace = true unicode-segmentation = "1.10" tracing.workspace = true flowy-error = { path = "../flowy-error", features = [ diff --git a/frontend/rust-lib/flowy-folder/src/event_handler.rs b/frontend/rust-lib/flowy-folder/src/event_handler.rs index e3426db8a8..30b6566862 100644 --- a/frontend/rust-lib/flowy-folder/src/event_handler.rs +++ b/frontend/rust-lib/flowy-folder/src/event_handler.rs @@ -107,7 +107,7 @@ pub(crate) async fn create_view_handler( let set_as_current = params.set_as_current; let (view, _) = folder.create_view_with_params(params, true).await?; if set_as_current { - let _ = folder.set_current_view(&view.id).await; + let _ = folder.set_current_view(view.id.clone()).await; } data_result_ok(view_pb_without_child_views(view)) } @@ -121,7 +121,7 @@ pub(crate) async fn create_orphan_view_handler( let set_as_current = params.set_as_current; let view = folder.create_orphan_view_with_params(params).await?; if set_as_current { - let _ = folder.set_current_view(&view.id).await; + let _ = folder.set_current_view(view.id.clone()).await; } data_result_ok(view_pb_without_child_views(view)) } @@ -226,7 +226,7 @@ pub(crate) async fn set_latest_view_handler( ) -> Result<(), FlowyError> { let folder = upgrade_folder(folder)?; let view_id: ViewIdPB = data.into_inner(); - let _ = folder.set_current_view(&view_id.value).await; + let _ = folder.set_current_view(view_id.value.clone()).await; Ok(()) } @@ -400,7 +400,9 @@ pub(crate) async fn update_view_visibility_status_handler( ) -> Result<(), FlowyError> { let folder = upgrade_folder(folder)?; let params = data.into_inner(); - folder.set_views_visibility(params.view_ids, params.is_public); + folder + .set_views_visibility(params.view_ids, params.is_public) + .await; Ok(()) } diff --git a/frontend/rust-lib/flowy-folder/src/manager.rs b/frontend/rust-lib/flowy-folder/src/manager.rs index f9034bf481..bc691ab7dc 100644 --- a/frontend/rust-lib/flowy-folder/src/manager.rs +++ b/frontend/rust-lib/flowy-folder/src/manager.rs @@ -20,14 +20,16 @@ use crate::util::{ use crate::view_operation::{ create_view, EncodedCollabWrapper, FolderOperationHandler, FolderOperationHandlers, }; -use collab::core::collab::{DataSource, MutexCollab}; +use arc_swap::ArcSwapOption; +use collab::core::collab::DataSource; use collab_entity::{CollabType, EncodedCollab}; -use collab_folder::error::FolderError; use collab_folder::{ - Folder, FolderNotify, Section, SectionItem, TrashInfo, UserId, View, ViewLayout, ViewUpdate, + Folder, FolderData, FolderNotify, Section, SectionItem, TrashInfo, View, ViewLayout, ViewUpdate, Workspace, }; -use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig}; +use collab_integrate::collab_builder::{ + AppFlowyCollabBuilder, CollabBuilderConfig, KVDBCollabPersistenceImpl, +}; use collab_integrate::CollabKVDB; use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult}; use flowy_folder_pub::cloud::{gen_view_id, FolderCloudService, FolderCollabParams}; @@ -39,22 +41,27 @@ use flowy_folder_pub::folder_builder::ParentChildViews; use flowy_search_pub::entities::FolderIndexManager; use flowy_sqlite::kv::KVStorePreferences; use futures::future; -use parking_lot::RwLock; use std::collections::HashMap; use std::fmt::{Display, Formatter}; -use std::ops::Deref; use std::sync::{Arc, Weak}; +use tokio::sync::RwLock; use tracing::{error, info, instrument}; pub trait FolderUser: Send + Sync { fn user_id(&self) -> Result; fn workspace_id(&self) -> Result; fn collab_db(&self, uid: i64) -> Result, FlowyError>; + + fn is_folder_exist_on_disk(&self, uid: i64, workspace_id: &str) -> FlowyResult; } pub struct FolderManager { + //FIXME: there's no sense in having a mutex_folder behind an RwLock. It's being obtained multiple + // times in the same function. FolderManager itself should be hidden behind RwLock if necessary. + // Unfortunately, this would require a changing the SyncPlugin architecture which requires access + // to Arc>>. Eventually SyncPlugin should be refactored. /// MutexFolder is the folder that is used to store the data. - pub(crate) mutex_folder: Arc, + pub(crate) mutex_folder: ArcSwapOption>, pub(crate) collab_builder: Arc, pub(crate) user: Arc, pub(crate) operation_handlers: FolderOperationHandlers, @@ -72,10 +79,9 @@ impl FolderManager { folder_indexer: Arc, store_preferences: Arc, ) -> FlowyResult { - let mutex_folder = Arc::new(MutexFolder::default()); let manager = Self { user, - mutex_folder, + mutex_folder: Default::default(), collab_builder, operation_handlers, cloud_service, @@ -89,12 +95,14 @@ impl FolderManager { #[instrument(level = "debug", skip(self), err)] pub async fn get_current_workspace(&self) -> FlowyResult { let workspace_id = self.user.workspace_id()?; - self.with_folder( - || { + + match self.mutex_folder.load_full() { + None => { let uid = self.user.user_id()?; Err(workspace_data_not_sync_error(uid, &workspace_id)) }, - |folder| { + Some(lock) => { + let folder = lock.read().await; let workspace_pb_from_workspace = |workspace: Workspace, folder: &Folder| { let views = get_workspace_public_view_pbs(&workspace_id, folder); let workspace: WorkspacePB = (workspace, views).into(); @@ -103,10 +111,10 @@ impl FolderManager { match folder.get_workspace_info(&workspace_id) { None => Err(FlowyError::record_not_found().with_context("Can not find the workspace")), - Some(workspace) => workspace_pb_from_workspace(workspace, folder), + Some(workspace) => workspace_pb_from_workspace(workspace, &folder), } }, - ) + } } /// Return a list of views of the current workspace. @@ -118,16 +126,24 @@ impl FolderManager { pub async fn get_workspace_public_views(&self) -> FlowyResult> { let workspace_id = self.user.workspace_id()?; - Ok(self.with_folder(Vec::new, |folder| { - get_workspace_public_view_pbs(&workspace_id, folder) - })) + match self.mutex_folder.load_full() { + None => Ok(Vec::default()), + Some(lock) => { + let folder = lock.read().await; + Ok(get_workspace_public_view_pbs(&workspace_id, &folder)) + }, + } } pub async fn get_workspace_private_views(&self) -> FlowyResult> { let workspace_id = self.user.workspace_id()?; - Ok(self.with_folder(Vec::new, |folder| { - get_workspace_private_view_pbs(&workspace_id, folder) - })) + match self.mutex_folder.load_full() { + None => Ok(Vec::default()), + Some(folder) => { + let folder = folder.read().await; + Ok(get_workspace_private_view_pbs(&workspace_id, &folder)) + }, + } } #[instrument(level = "trace", skip_all, err)] @@ -136,59 +152,48 @@ impl FolderManager { uid: i64, workspace_id: &str, collab_db: Weak, - doc_state: DataSource, + data_source: Option, folder_notifier: T, - ) -> Result { + ) -> Result>, FlowyError> { let folder_notifier = folder_notifier.into(); // only need the check the workspace id when the doc state is not from the disk. - let should_check_workspace_id = !matches!(doc_state, DataSource::Disk); - let should_auto_initialize = !should_check_workspace_id; let config = CollabBuilderConfig::default() .sync_enable(true) - .auto_initialize(should_auto_initialize); + .auto_initialize(true); + + let data_source = data_source + .unwrap_or_else(|| KVDBCollabPersistenceImpl::new(collab_db.clone(), uid).into_data_source()); let object_id = workspace_id; - let collab = self.collab_builder.build_with_config( - workspace_id, - uid, - object_id, - CollabType::Folder, + let collab_object = + self + .collab_builder + .collab_object(workspace_id, uid, object_id, CollabType::Folder)?; + let result = self.collab_builder.create_folder( + collab_object, + data_source, collab_db, - doc_state, config, - )?; - let (should_clear, err) = match Folder::open(UserId::from(uid), collab.clone(), folder_notifier) - { - Ok(folder) => { - if should_check_workspace_id { - // check the workspace id in the folder is matched with the workspace id. Just in case the folder - // is overwritten by another workspace. - let folder_workspace_id = folder.get_workspace_id(); - if folder_workspace_id != workspace_id { - error!( - "expect workspace_id: {}, actual workspace_id: {}", - workspace_id, folder_workspace_id - ); - return Err(FlowyError::workspace_data_not_match()); - } - // Initialize the folder manually - collab.lock().initialize(); - } - return Ok(folder); - }, - Err(err) => (matches!(err, FolderError::NoRequiredData(_)), err), - }; + folder_notifier, + None, + ); // If opening the folder fails due to missing required data (indicated by a `FolderError::NoRequiredData`), // the function logs an informational message and attempts to clear the folder data by deleting its // document from the collaborative database. It then returns the encountered error. - if should_clear { - info!("Clear the folder data and try to open the folder again"); - if let Some(db) = self.user.collab_db(uid).ok().and_then(|a| a.upgrade()) { - let _ = db.delete_doc(uid, workspace_id).await; - } + match result { + Ok(folder) => Ok(folder), + Err(err) => { + info!( + "Clear the folder data and try to open the folder again due to: {}", + err + ); + if let Some(db) = self.user.collab_db(uid).ok().and_then(|a| a.upgrade()) { + let _ = db.delete_doc(uid, workspace_id).await; + } + Err(err.into()) + }, } - Err(err.into()) } pub(crate) async fn create_empty_collab( @@ -196,18 +201,25 @@ impl FolderManager { uid: i64, workspace_id: &str, collab_db: Weak, - ) -> Result, FlowyError> { + notifier: Option, + folder_data: Option, + ) -> Result>, FlowyError> { let object_id = workspace_id; - let collab = self.collab_builder.build_with_config( - workspace_id, - uid, - object_id, - CollabType::Folder, + let collab_object = + self + .collab_builder + .collab_object(workspace_id, uid, object_id, CollabType::Folder)?; + + let doc_state = KVDBCollabPersistenceImpl::new(collab_db.clone(), uid).into_data_source(); + let folder = self.collab_builder.create_folder( + collab_object, + doc_state, collab_db, - DataSource::Disk, CollabBuilderConfig::default().sync_enable(true), + notifier, + folder_data, )?; - Ok(collab) + Ok(folder) } /// Initialize the folder with the given workspace id. @@ -216,21 +228,12 @@ impl FolderManager { pub async fn initialize_with_workspace_id(&self, user_id: i64) -> FlowyResult<()> { let workspace_id = self.user.workspace_id()?; let object_id = &workspace_id; - let folder_doc_state = self - .cloud_service - .get_folder_doc_state(&workspace_id, user_id, CollabType::Folder, object_id) - .await?; - if let Err(err) = self - .initialize( - user_id, - &workspace_id, - FolderInitDataSource::Cloud(folder_doc_state), - ) - .await - { - // If failed to open folder with remote data, open from local disk. After open from the local - // disk. the data will be synced to the remote server. - error!("initialize folder with error {:?}, fallback local", err); + + let is_exist = self + .user + .is_folder_exist_on_disk(user_id, &workspace_id) + .unwrap_or(false); + if is_exist { self .initialize( user_id, @@ -240,7 +243,34 @@ impl FolderManager { }, ) .await?; + } else { + let folder_doc_state = self + .cloud_service + .get_folder_doc_state(&workspace_id, user_id, CollabType::Folder, object_id) + .await?; + if let Err(err) = self + .initialize( + user_id, + &workspace_id, + FolderInitDataSource::Cloud(folder_doc_state), + ) + .await + { + // If failed to open folder with remote data, open from local disk. After open from the local + // disk. the data will be synced to the remote server. + error!("initialize folder with error {:?}, fallback local", err); + self + .initialize( + user_id, + &workspace_id, + FolderInitDataSource::LocalDisk { + create_if_not_exist: false, + }, + ) + .await?; + } } + Ok(()) } @@ -322,36 +352,34 @@ impl FolderManager { &self, views: Vec, ) -> Result<(), FlowyError> { - self.with_folder( - || Err(FlowyError::internal().with_context("The folder is not initialized")), - |folder| { + match self.mutex_folder.load_full() { + None => Err(FlowyError::internal().with_context("The folder is not initialized")), + Some(lock) => { + let mut folder = lock.write().await; for view in views { - insert_parent_child_views(folder, view); + insert_parent_child_views(&mut folder, view); } Ok(()) }, - )?; - - Ok(()) + } } pub async fn get_workspace_pb(&self) -> FlowyResult { let workspace_id = self.user.workspace_id()?; - let guard = self.mutex_folder.write(); - let folder = guard - .as_ref() - .ok_or(FlowyError::internal().with_context("folder is not initialized"))?; + let lock = self + .mutex_folder + .load_full() + .ok_or_else(|| FlowyError::internal().with_context("folder is not initialized"))?; + let folder = lock.read().await; let workspace = folder .get_workspace_info(&workspace_id) .ok_or_else(|| FlowyError::record_not_found().with_context("Can not find the workspace"))?; let views = folder - .views .get_views_belong_to(&workspace.id) .into_iter() .map(|view| view_pb_without_child_views(view.as_ref().clone())) .collect::>(); - drop(guard); Ok(WorkspacePB { id: workspace.id, @@ -361,25 +389,6 @@ impl FolderManager { }) } - /// This function acquires a lock on the `mutex_folder` and checks its state. - /// If the folder is `None`, it invokes the `none_callback`, otherwise, it passes the folder to the `f2` callback. - /// - /// # Parameters - /// - /// * `none_callback`: A callback function that is invoked when `mutex_folder` contains `None`. - /// * `f2`: A callback function that is invoked when `mutex_folder` contains a `Some` value. The contained folder is passed as an argument to this callback. - fn with_folder(&self, none_callback: F1, f2: F2) -> Output - where - F1: FnOnce() -> Output, - F2: FnOnce(&Folder) -> Output, - { - let folder = self.mutex_folder.write(); - match &*folder { - None => none_callback(), - Some(folder) => f2(folder), - } - } - /// Asynchronously creates a view with provided parameters and notifies the workspace if update is needed. /// /// Commonly, the notify_workspace_update parameter is set to true when the view is created in the workspace. @@ -412,20 +421,14 @@ impl FolderManager { let section = params.section.clone().unwrap_or(ViewSectionPB::Public); let is_private = section == ViewSectionPB::Private; let view = create_view(self.user.user_id()?, params, view_layout); - self.with_folder( - || (), - |folder| { - folder.insert_view(view.clone(), index); - if is_private { - folder.add_private_view_ids(vec![view.id.clone()]); - } - }, - ); - - if notify_workspace_update { - let folder = &self.mutex_folder.read(); - if let Some(folder) = folder.as_ref() { - notify_did_update_workspace(&workspace_id, folder); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.insert_view(view.clone(), index); + if is_private { + folder.add_private_view_ids(vec![view.id.clone()]); + } + if notify_workspace_update { + notify_did_update_workspace(&workspace_id, &folder); } } @@ -448,20 +451,24 @@ impl FolderManager { .await?; let view = create_view(self.user.user_id()?, params, view_layout); - self.with_folder( - || (), - |folder| { - folder.insert_view(view.clone(), None); - }, - ); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.insert_view(view.clone(), None); + } Ok(view) } #[tracing::instrument(level = "debug", skip(self), err)] pub(crate) async fn close_view(&self, view_id: &str) -> Result<(), FlowyError> { - if let Some(view) = self.with_folder(|| None, |folder| folder.views.get_view(view_id)) { - let handler = self.get_handler(&view.layout)?; - handler.close_view(view_id).await?; + if let Some(lock) = self.mutex_folder.load_full() { + let folder = lock.read().await; + if let Some(view) = folder.get_view(view_id) { + // Drop the folder lock explicitly to avoid deadlock when following calls contains 'self' + drop(folder); + + let handler = self.get_handler(&view.layout)?; + handler.close_view(view_id).await?; + } } Ok(()) } @@ -477,11 +484,14 @@ impl FolderManager { pub async fn get_view_pb(&self, view_id: &str) -> FlowyResult { let view_id = view_id.to_string(); - let folder = self.mutex_folder.read(); - let folder = folder.as_ref().ok_or_else(folder_not_init_error)?; + let lock = self + .mutex_folder + .load_full() + .ok_or_else(folder_not_init_error)?; + let folder = lock.read().await; // trash views and other private views should not be accessed - let view_ids_should_be_filtered = self.get_view_ids_should_be_filtered(folder); + let view_ids_should_be_filtered = Self::get_view_ids_should_be_filtered(&folder); if view_ids_should_be_filtered.contains(&view_id) { return Err(FlowyError::new( @@ -490,14 +500,13 @@ impl FolderManager { )); } - match folder.views.get_view(&view_id) { + match folder.get_view(&view_id) { None => { error!("Can't find the view with id: {}", view_id); Err(FlowyError::record_not_found()) }, Some(view) => { let child_views = folder - .views .get_views_belong_to(&view.id) .into_iter() .filter(|view| !view_ids_should_be_filtered.contains(&view.id)) @@ -520,11 +529,14 @@ impl FolderManager { &self, view_ids: Vec, ) -> FlowyResult> { - let folder = self.mutex_folder.read(); - let folder = folder.as_ref().ok_or_else(folder_not_init_error)?; + let lock = self + .mutex_folder + .load_full() + .ok_or_else(folder_not_init_error)?; // trash views and other private views should not be accessed - let view_ids_should_be_filtered = self.get_view_ids_should_be_filtered(folder); + let folder = lock.read().await; + let view_ids_should_be_filtered = Self::get_view_ids_should_be_filtered(&folder); let views = view_ids .into_iter() @@ -532,7 +544,7 @@ impl FolderManager { if view_ids_should_be_filtered.contains(&view_id) { return None; } - folder.views.get_view(&view_id) + folder.get_view(&view_id) }) .map(view_pb_without_child_views_from_arc) .collect::>(); @@ -548,13 +560,16 @@ impl FolderManager { /// #[tracing::instrument(level = "debug", skip(self))] pub async fn get_all_views_pb(&self) -> FlowyResult> { - let folder = self.mutex_folder.read(); - let folder = folder.as_ref().ok_or_else(folder_not_init_error)?; + let lock = self + .mutex_folder + .load_full() + .ok_or_else(folder_not_init_error)?; // trash views and other private views should not be accessed - let view_ids_should_be_filtered = self.get_view_ids_should_be_filtered(folder); + let folder = lock.read().await; + let view_ids_should_be_filtered = Self::get_view_ids_should_be_filtered(&folder); - let all_views = folder.views.get_all_views(); + let all_views = folder.get_all_views(); let views = all_views .into_iter() .filter(|view| !view_ids_should_be_filtered.contains(&view.id)) @@ -576,17 +591,18 @@ impl FolderManager { pub async fn get_view_ancestors_pb(&self, view_id: &str) -> FlowyResult> { let mut ancestors = vec![]; let mut parent_view_id = view_id.to_string(); - while let Some(view) = - self.with_folder(|| None, |folder| folder.views.get_view(&parent_view_id)) - { - // If the view is already in the ancestors list, then break the loop - if ancestors.iter().any(|v: &ViewPB| v.id == view.id) { - break; + if let Some(lock) = self.mutex_folder.load_full() { + let folder = lock.read().await; + while let Some(view) = folder.get_view(&parent_view_id) { + // If the view is already in the ancestors list, then break the loop + if ancestors.iter().any(|v: &ViewPB| v.id == view.id) { + break; + } + ancestors.push(view_pb_without_child_views(view.as_ref().clone())); + parent_view_id = view.parent_view_id.clone(); } - ancestors.push(view_pb_without_child_views(view.as_ref().clone())); - parent_view_id = view.parent_view_id.clone(); + ancestors.reverse(); } - ancestors.reverse(); Ok(ancestors) } @@ -595,34 +611,34 @@ impl FolderManager { /// All the favorite views being trashed will be unfavorited first to remove it from favorites list as well. The process of unfavoriting concerned view is handled by `unfavorite_view_and_decendants()` #[tracing::instrument(level = "debug", skip(self), err)] pub async fn move_view_to_trash(&self, view_id: &str) -> FlowyResult<()> { - self.with_folder( - || (), - |folder| { - if let Some(view) = folder.views.get_view(view_id) { - self.unfavorite_view_and_decendants(view.clone(), folder); - folder.add_trash_view_ids(vec![view_id.to_string()]); - // notify the parent view that the view is moved to trash - send_notification(view_id, FolderNotification::DidMoveViewToTrash) - .payload(DeletedViewPB { - view_id: view_id.to_string(), - index: None, - }) - .send(); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + if let Some(view) = folder.get_view(view_id) { + Self::unfavorite_view_and_decendants(view.clone(), &mut folder); + folder.add_trash_view_ids(vec![view_id.to_string()]); + drop(folder); - notify_child_views_changed( - view_pb_without_child_views(view.as_ref().clone()), - ChildViewChangeReason::Delete, - ); - } - }, - ); + // notify the parent view that the view is moved to trash + send_notification(view_id, FolderNotification::DidMoveViewToTrash) + .payload(DeletedViewPB { + view_id: view_id.to_string(), + index: None, + }) + .send(); + + notify_child_views_changed( + view_pb_without_child_views(view.as_ref().clone()), + ChildViewChangeReason::Delete, + ); + } + } Ok(()) } - fn unfavorite_view_and_decendants(&self, view: Arc, folder: &Folder) { + fn unfavorite_view_and_decendants(view: Arc, folder: &mut Folder) { let mut all_descendant_views: Vec> = vec![view.clone()]; - all_descendant_views.extend(folder.views.get_views_belong_to(&view.id)); + all_descendant_views.extend(folder.get_views_belong_to(&view.id)); let favorite_descendant_views: Vec = all_descendant_views .iter() @@ -672,25 +688,18 @@ impl FolderManager { let to_section = params.to_section; let view = self.get_view_pb(&view_id).await?; let old_parent_id = view.parent_view_id; - self.with_folder( - || (), - |folder| { - folder.move_nested_view(&view_id, &new_parent_id, prev_view_id); - - if from_section != to_section { - if to_section == Some(ViewSectionPB::Private) { - folder.add_private_view_ids(vec![view_id.clone()]); - } else { - folder.delete_private_view_ids(vec![view_id.clone()]); - } + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.move_nested_view(&view_id, &new_parent_id, prev_view_id); + if from_section != to_section { + if to_section == Some(ViewSectionPB::Private) { + folder.add_private_view_ids(vec![view_id.clone()]); + } else { + folder.delete_private_view_ids(vec![view_id.clone()]); } - }, - ); - notify_parent_view_did_change( - &workspace_id, - self.mutex_folder.clone(), - vec![new_parent_id, old_parent_id], - ); + } + notify_parent_view_did_change(&workspace_id, &folder, vec![new_parent_id, old_parent_id]); + } Ok(()) } @@ -731,17 +740,11 @@ impl FolderManager { if let (Some(actual_from_index), Some(actual_to_index)) = (actual_from_index, actual_to_index) { - self.with_folder( - || (), - |folder| { - folder.move_view(view_id, actual_from_index as u32, actual_to_index as u32); - }, - ); - notify_parent_view_did_change( - &workspace_id, - self.mutex_folder.clone(), - vec![parent_view_id], - ); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.move_view(view_id, actual_from_index as u32, actual_to_index as u32); + notify_parent_view_did_change(&workspace_id, &folder, vec![parent_view_id]); + } } } } @@ -751,10 +754,10 @@ impl FolderManager { /// Return a list of views that belong to the given parent view id. #[tracing::instrument(level = "debug", skip(self, parent_view_id), err)] pub async fn get_views_belong_to(&self, parent_view_id: &str) -> FlowyResult>> { - let views = self.with_folder(Vec::new, |folder| { - folder.views.get_views_belong_to(parent_view_id) - }); - Ok(views) + match self.mutex_folder.load_full() { + Some(folder) => Ok(folder.read().await.get_views_belong_to(parent_view_id)), + None => Ok(Vec::default()), + } } /// Update the view with the given params. @@ -791,9 +794,18 @@ impl FolderManager { /// Including the view data (icon, cover, extra) and the child views. #[tracing::instrument(level = "debug", skip(self), err)] pub(crate) async fn duplicate_view(&self, params: DuplicateViewParams) -> Result<(), FlowyError> { - let view = self - .with_folder(|| None, |folder| folder.views.get_view(¶ms.view_id)) + let lock = self + .mutex_folder + .load_full() .ok_or_else(|| FlowyError::record_not_found().with_context("Can't duplicate the view"))?; + let folder = lock.read().await; + let view = folder + .get_view(¶ms.view_id) + .ok_or_else(|| FlowyError::record_not_found().with_context("Can't duplicate the view"))?; + + // Explicitly drop the folder lock to avoid deadlock when following calls contains 'self' + drop(folder); + let parent_view_id = params .parent_view_id .clone() @@ -831,9 +843,13 @@ impl FolderManager { } // filter the view ids that in the trash or private section - let filtered_view_ids = self.with_folder(Vec::new, |folder| { - self.get_view_ids_should_be_filtered(folder) - }); + let filtered_view_ids = match self.mutex_folder.load_full() { + None => Vec::default(), + Some(lock) => { + let folder = lock.read().await; + Self::get_view_ids_should_be_filtered(&folder) + }, + }; // only apply the `open_after_duplicated` and the `include_children` to the first view let mut is_source_view = true; @@ -842,9 +858,20 @@ impl FolderManager { let mut objects = vec![]; let suffix = suffix.unwrap_or(" (copy)".to_string()); + let lock = match self.mutex_folder.load_full() { + None => { + return Err( + FlowyError::record_not_found() + .with_context(format!("Can't duplicate the view({})", view_id)), + ) + }, + Some(lock) => lock, + }; while let Some((current_view_id, current_parent_id)) = stack.pop() { - let view = self - .with_folder(|| None, |folder| folder.views.get_view(¤t_view_id)) + let view = lock + .read() + .await + .get_view(¤t_view_id) .ok_or_else(|| { FlowyError::record_not_found() .with_context(format!("Can't duplicate the view({})", view_id)) @@ -864,16 +891,14 @@ impl FolderManager { .map(|i| i as u32) }); - let section = self.with_folder( - || ViewSectionPB::Private, - |folder| { - if folder.is_view_in_section(Section::Private, &view.id) { - ViewSectionPB::Private - } else { - ViewSectionPB::Public - } - }, - ); + let section = { + let folder = lock.read().await; + if folder.is_view_in_section(Section::Private, &view.id) { + ViewSectionPB::Private + } else { + ViewSectionPB::Public + } + }; let name = if is_source_view { format!("{}{}", &view.name, suffix) @@ -946,32 +971,28 @@ impl FolderManager { } // notify the update here - notify_parent_view_did_change( - workspace_id, - self.mutex_folder.clone(), - vec![parent_view_id.to_string()], - ); + let folder = lock.read().await; + notify_parent_view_did_change(workspace_id, &folder, vec![parent_view_id.to_string()]); Ok(()) } #[tracing::instrument(level = "trace", skip(self), err)] - pub(crate) async fn set_current_view(&self, view_id: &str) -> Result<(), FlowyError> { - self.with_folder( - || Err(FlowyError::record_not_found()), - |folder| { - folder.set_current_view(view_id); - folder.add_recent_view_ids(vec![view_id.to_string()]); - Ok(()) - }, - )?; + pub(crate) async fn set_current_view(&self, view_id: String) -> Result<(), FlowyError> { + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.set_current_view(view_id.clone()); + folder.add_recent_view_ids(vec![view_id.clone()]); + } else { + return Err(FlowyError::record_not_found()); + } let view = self.get_current_view().await; if let Some(view) = &view { let view_layout: ViewLayout = view.layout.clone().into(); if let Some(handle) = self.operation_handlers.get(&view_layout) { info!("Open view: {}", view.id); - if let Err(err) = handle.open_view(view_id).await { + if let Err(err) = handle.open_view(&view_id).await { error!("Open view error: {:?}", err); } } @@ -988,25 +1009,29 @@ impl FolderManager { #[tracing::instrument(level = "trace", skip(self))] pub(crate) async fn get_current_view(&self) -> Option { - let view_id = self.with_folder(|| None, |folder| folder.get_current_view())?; + let view_id = { + let lock = self.mutex_folder.load_full()?; + let folder = lock.read().await; + let view = folder.get_current_view()?; + drop(folder); + view + }; self.get_view_pb(&view_id).await.ok() } /// Toggles the favorite status of a view identified by `view_id`If the view is not a favorite, it will be added to the favorites list; otherwise, it will be removed from the list. #[tracing::instrument(level = "debug", skip(self), err)] pub async fn toggle_favorites(&self, view_id: &str) -> FlowyResult<()> { - self.with_folder( - || (), - |folder| { - if let Some(old_view) = folder.views.get_view(view_id) { - if old_view.is_favorite { - folder.delete_favorite_view_ids(vec![view_id.to_string()]); - } else { - folder.add_favorite_view_ids(vec![view_id.to_string()]); - } + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + if let Some(old_view) = folder.get_view(view_id) { + if old_view.is_favorite { + folder.delete_favorite_view_ids(vec![view_id.to_string()]); + } else { + folder.add_favorite_view_ids(vec![view_id.to_string()]); } - }, - ); + } + } self.send_toggle_favorite_notification(view_id).await; Ok(()) } @@ -1014,12 +1039,10 @@ impl FolderManager { /// Add the view to the recent view list / history. #[tracing::instrument(level = "debug", skip(self), err)] pub async fn add_recent_views(&self, view_ids: Vec) -> FlowyResult<()> { - self.with_folder( - || (), - |folder| { - folder.add_recent_view_ids(view_ids); - }, - ); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.add_recent_view_ids(view_ids); + } self.send_update_recent_views_notification().await; Ok(()) } @@ -1027,12 +1050,10 @@ impl FolderManager { /// Add the view to the recent view list / history. #[tracing::instrument(level = "debug", skip(self), err)] pub async fn remove_recent_views(&self, view_ids: Vec) -> FlowyResult<()> { - self.with_folder( - || (), - |folder| { - folder.delete_recent_view_ids(view_ids); - }, - ); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.delete_recent_view_ids(view_ids); + } self.send_update_recent_views_notification().await; Ok(()) } @@ -1047,12 +1068,22 @@ impl FolderManager { publish_name: Option, selected_view_ids: Option>, ) -> FlowyResult<()> { - let view = self - .with_folder(|| None, |folder| folder.views.get_view(view_id)) - .ok_or_else(|| { + let view = { + let lock = match self.mutex_folder.load_full() { + None => { + return Err( + FlowyError::record_not_found() + .with_context(format!("Can't find the view with ID: {}", view_id)), + ) + }, + Some(lock) => lock, + }; + let read_guard = lock.read().await; + read_guard.get_view(view_id).ok_or_else(|| { FlowyError::record_not_found() .with_context(format!("Can't find the view with ID: {}", view_id)) - })?; + })? + }; if view.layout == ViewLayout::Chat { return Err(FlowyError::new( @@ -1311,52 +1342,57 @@ impl FolderManager { #[tracing::instrument(level = "trace", skip(self))] pub(crate) async fn get_all_favorites(&self) -> Vec { - self.get_sections(Section::Favorite) + self.get_sections(Section::Favorite).await } #[tracing::instrument(level = "debug", skip(self))] pub(crate) async fn get_my_recent_sections(&self) -> Vec { - self.get_sections(Section::Recent) + self.get_sections(Section::Recent).await } #[tracing::instrument(level = "trace", skip(self))] pub(crate) async fn get_my_trash_info(&self) -> Vec { - self.with_folder(Vec::new, |folder| folder.get_my_trash_info()) + match self.mutex_folder.load_full() { + None => Vec::default(), + Some(folder) => folder.read().await.get_my_trash_info(), + } } #[tracing::instrument(level = "trace", skip(self))] pub(crate) async fn restore_all_trash(&self) { - self.with_folder( - || (), - |folder| { - folder.remove_all_my_trash_sections(); - }, - ); - send_notification("trash", FolderNotification::DidUpdateTrash) - .payload(RepeatedTrashPB { items: vec![] }) - .send(); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.remove_all_my_trash_sections(); + send_notification("trash", FolderNotification::DidUpdateTrash) + .payload(RepeatedTrashPB { items: vec![] }) + .send(); + } } #[tracing::instrument(level = "trace", skip(self))] pub(crate) async fn restore_trash(&self, trash_id: &str) { - self.with_folder( - || (), - |folder| { - folder.delete_trash_view_ids(vec![trash_id.to_string()]); - }, - ); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.delete_trash_view_ids(vec![trash_id.to_string()]); + } } /// Delete all the trash permanently. #[tracing::instrument(level = "trace", skip(self))] pub(crate) async fn delete_my_trash(&self) { - let deleted_trash = self.with_folder(Vec::new, |folder| folder.get_my_trash_info()); - for trash in deleted_trash { - let _ = self.delete_trash(&trash.id).await; + if let Some(lock) = self.mutex_folder.load_full() { + let deleted_trash = lock.read().await.get_my_trash_info(); + + // Explicitly drop the folder lock to avoid deadlock when following calls contains 'self' + drop(lock); + + for trash in deleted_trash { + let _ = self.delete_trash(&trash.id).await; + } + send_notification("trash", FolderNotification::DidUpdateTrash) + .payload(RepeatedTrashPB { items: vec![] }) + .send(); } - send_notification("trash", FolderNotification::DidUpdateTrash) - .payload(RepeatedTrashPB { items: vec![] }) - .send(); } /// Delete the trash permanently. @@ -1364,17 +1400,19 @@ impl FolderManager { /// is a database view. Then the database will be deleted as well. #[tracing::instrument(level = "debug", skip(self, view_id), err)] pub async fn delete_trash(&self, view_id: &str) -> FlowyResult<()> { - let view = self.with_folder(|| None, |folder| folder.views.get_view(view_id)); - self.with_folder( - || (), - |folder| { + if let Some(lock) = self.mutex_folder.load_full() { + let view = { + let mut folder = lock.write().await; + let view = folder.get_view(view_id); folder.delete_trash_view_ids(vec![view_id.to_string()]); - folder.views.delete_views(vec![view_id]); - }, - ); - if let Some(view) = view { - if let Ok(handler) = self.get_handler(&view.layout) { - handler.delete_view(view_id).await?; + folder.delete_views(vec![view_id]); + view + }; + + if let Some(view) = view { + if let Ok(handler) = self.get_handler(&view.layout) { + handler.delete_view(view_id).await?; + } } } Ok(()) @@ -1416,7 +1454,6 @@ impl FolderManager { // Import data from file path if available if let Some(file_path) = import_data.file_path { - // TODO(Lucas): return the collab handler .import_from_file_path(&view_id, &import_data.name, file_path) .await?; @@ -1440,12 +1477,10 @@ impl FolderManager { let view = create_view(self.user.user_id()?, params, import_data.view_layout); // Insert the new view into the folder - self.with_folder( - || (), - |folder| { - folder.insert_view(view.clone(), None); - }, - ); + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + folder.insert_view(view.clone(), None); + } Ok((view, encoded_collab)) } @@ -1493,11 +1528,10 @@ impl FolderManager { } // Notify that the parent view has changed - notify_parent_view_did_change( - &workspace_id, - self.mutex_folder.clone(), - vec![import_data.parent_view_id], - ); + if let Some(lock) = self.mutex_folder.load_full() { + let folder = lock.read().await; + notify_parent_view_did_change(&workspace_id, &folder, vec![import_data.parent_view_id]); + } Ok(RepeatedViewPB { items: views }) } @@ -1508,15 +1542,16 @@ impl FolderManager { F: FnOnce(ViewUpdate) -> Option, { let workspace_id = self.user.workspace_id()?; - let value = self.with_folder( - || None, - |folder| { - let old_view = folder.views.get_view(view_id); - let new_view = folder.views.update_view(view_id, f); + let value = match self.mutex_folder.load_full() { + None => None, + Some(lock) => { + let mut folder = lock.write().await; + let old_view = folder.get_view(view_id); + let new_view = folder.update_view(view_id, f); Some((old_view, new_view)) }, - ); + }; if let Some((Some(old_view), Some(new_view))) = value { if let Ok(handler) = self.get_handler(&old_view.layout) { @@ -1529,9 +1564,9 @@ impl FolderManager { .payload(view_pb) .send(); - let folder = &self.mutex_folder.read(); - if let Some(folder) = folder.as_ref() { - notify_did_update_workspace(&workspace_id, folder); + if let Some(lock) = self.mutex_folder.load_full() { + let folder = lock.read().await; + notify_did_update_workspace(&workspace_id, &folder); } } @@ -1574,37 +1609,34 @@ impl FolderManager { /// child view ids of the view. async fn get_view_relation(&self, view_id: &str) -> Option<(bool, String, Vec)> { let workspace_id = self.user.workspace_id().ok()?; - self.with_folder( - || None, - |folder| { - let view = folder.views.get_view(view_id)?; - match folder.views.get_view(&view.parent_view_id) { - None => folder.get_workspace_info(&workspace_id).map(|workspace| { - ( - true, - workspace.id, - workspace - .child_views - .items - .into_iter() - .map(|view| view.id) - .collect::>(), - ) - }), - Some(parent_view) => Some(( - false, - parent_view.id.clone(), - parent_view - .children - .items - .clone() - .into_iter() - .map(|view| view.id) - .collect::>(), - )), - } - }, - ) + let lock = self.mutex_folder.load_full()?; + let folder = lock.read().await; + let view = folder.get_view(view_id)?; + match folder.get_view(&view.parent_view_id) { + None => folder.get_workspace_info(&workspace_id).map(|workspace| { + ( + true, + workspace.id, + workspace + .child_views + .items + .into_iter() + .map(|view| view.id) + .collect::>(), + ) + }), + Some(parent_view) => Some(( + false, + parent_view.id.clone(), + parent_view + .children + .items + .clone() + .into_iter() + .map(|view| view.id) + .collect::>(), + )), + } } pub async fn get_folder_snapshots( @@ -1628,39 +1660,41 @@ impl FolderManager { Ok(snapshots) } - pub fn set_views_visibility(&self, view_ids: Vec, is_public: bool) { - self.with_folder( - || (), - |folder| { - if is_public { - folder.delete_private_view_ids(view_ids); - } else { - folder.add_private_view_ids(view_ids); - } - }, - ); + pub async fn set_views_visibility(&self, view_ids: Vec, is_public: bool) { + if let Some(lock) = self.mutex_folder.load_full() { + let mut folder = lock.write().await; + if is_public { + folder.delete_private_view_ids(view_ids); + } else { + folder.add_private_view_ids(view_ids); + } + } } /// Only support getting the Favorite and Recent sections. - fn get_sections(&self, section_type: Section) -> Vec { - self.with_folder(Vec::new, |folder| { - let views = match section_type { - Section::Favorite => folder.get_my_favorite_sections(), - Section::Recent => folder.get_my_recent_sections(), - _ => vec![], - }; - let view_ids_should_be_filtered = self.get_view_ids_should_be_filtered(folder); - views - .into_iter() - .filter(|view| !view_ids_should_be_filtered.contains(&view.id)) - .collect() - }) + async fn get_sections(&self, section_type: Section) -> Vec { + match self.mutex_folder.load_full() { + None => Vec::default(), + Some(lock) => { + let folder = lock.read().await; + let views = match section_type { + Section::Favorite => folder.get_my_favorite_sections(), + Section::Recent => folder.get_my_recent_sections(), + _ => vec![], + }; + let view_ids_should_be_filtered = Self::get_view_ids_should_be_filtered(&folder); + views + .into_iter() + .filter(|view| !view_ids_should_be_filtered.contains(&view.id)) + .collect() + }, + } } /// Get all the view that are in the trash, including the child views of the child views. /// For example, if A view which is in the trash has a child view B, this function will return /// both A and B. - fn get_all_trash_ids(&self, folder: &Folder) -> Vec { + fn get_all_trash_ids(folder: &Folder) -> Vec { let trash_ids = folder .get_all_trash_sections() .into_iter() @@ -1674,13 +1708,13 @@ impl FolderManager { } /// Filter the views that are in the trash and belong to the other private sections. - fn get_view_ids_should_be_filtered(&self, folder: &Folder) -> Vec { - let trash_ids = self.get_all_trash_ids(folder); - let other_private_view_ids = self.get_other_private_view_ids(folder); + fn get_view_ids_should_be_filtered(folder: &Folder) -> Vec { + let trash_ids = Self::get_all_trash_ids(folder); + let other_private_view_ids = Self::get_other_private_view_ids(folder); [trash_ids, other_private_view_ids].concat() } - fn get_other_private_view_ids(&self, folder: &Folder) -> Vec { + fn get_other_private_view_ids(folder: &Folder) -> Vec { let my_private_view_ids = folder .get_my_private_sections() .into_iter() @@ -1724,7 +1758,7 @@ pub(crate) fn get_workspace_public_view_pbs(workspace_id: &str, folder: &Folder) .map(|view| view.id) .collect::>(); - let mut views = folder.views.get_views_belong_to(workspace_id); + let mut views = folder.get_views_belong_to(workspace_id); // filter the views that are in the trash and all the private views views.retain(|view| !trash_ids.contains(&view.id) && !private_view_ids.contains(&view.id)); @@ -1732,11 +1766,8 @@ pub(crate) fn get_workspace_public_view_pbs(workspace_id: &str, folder: &Folder) .into_iter() .map(|view| { // Get child views - let mut child_views: Vec> = folder - .views - .get_views_belong_to(&view.id) - .into_iter() - .collect(); + let mut child_views: Vec> = + folder.get_views_belong_to(&view.id).into_iter().collect(); child_views.retain(|view| !trash_ids.contains(&view.id)); view_pb_with_child_views(view, child_views) }) @@ -1746,7 +1777,6 @@ pub(crate) fn get_workspace_public_view_pbs(workspace_id: &str, folder: &Folder) /// Get all the child views belong to the view id, including the child views of the child views. fn get_all_child_view_ids(folder: &Folder, view_id: &str) -> Vec { let child_view_ids = folder - .views .get_views_belong_to(view_id) .into_iter() .map(|view| view.id.clone()) @@ -1774,7 +1804,7 @@ pub(crate) fn get_workspace_private_view_pbs(workspace_id: &str, folder: &Folder .map(|view| view.id) .collect::>(); - let mut views = folder.views.get_views_belong_to(workspace_id); + let mut views = folder.get_views_belong_to(workspace_id); // filter the views that are in the trash and not in the private view ids views.retain(|view| !trash_ids.contains(&view.id) && private_view_ids.contains(&view.id)); @@ -1782,30 +1812,14 @@ pub(crate) fn get_workspace_private_view_pbs(workspace_id: &str, folder: &Folder .into_iter() .map(|view| { // Get child views - let mut child_views: Vec> = folder - .views - .get_views_belong_to(&view.id) - .into_iter() - .collect(); + let mut child_views: Vec> = + folder.get_views_belong_to(&view.id).into_iter().collect(); child_views.retain(|view| !trash_ids.contains(&view.id)); view_pb_with_child_views(view, child_views) }) .collect() } -/// The MutexFolder is a wrapper of the [Folder] that is used to share the folder between different -/// threads. -#[derive(Clone, Default)] -pub struct MutexFolder(Arc>>); -impl Deref for MutexFolder { - type Target = Arc>>; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -unsafe impl Sync for MutexFolder {} -unsafe impl Send for MutexFolder {} - #[allow(clippy::large_enum_variant)] pub enum FolderInitDataSource { /// It means using the data stored on local disk to initialize the folder diff --git a/frontend/rust-lib/flowy-folder/src/manager_init.rs b/frontend/rust-lib/flowy-folder/src/manager_init.rs index d1266a146e..cd1c45882e 100644 --- a/frontend/rust-lib/flowy-folder/src/manager_init.rs +++ b/frontend/rust-lib/flowy-folder/src/manager_init.rs @@ -3,10 +3,11 @@ use crate::manager_observer::*; use crate::user_default::DefaultFolderBuilder; use collab::core::collab::DataSource; use collab_entity::{CollabType, EncodedCollab}; -use collab_folder::{Folder, FolderNotify, UserId}; +use collab_folder::{Folder, FolderNotify}; use collab_integrate::CollabKVDB; use flowy_error::{FlowyError, FlowyResult}; use std::sync::{Arc, Weak}; +use tokio::sync::RwLock; use tokio::task::spawn_blocking; use tracing::{event, info, Level}; @@ -27,9 +28,13 @@ impl FolderManager { initial_data ); - if let Some(old_folder) = self.mutex_folder.write().take() { + if let Some(old_folder) = self.mutex_folder.swap(None) { + let old_folder = old_folder.read().await; old_folder.close(); - info!("remove old folder: {}", old_folder.get_workspace_id()); + info!( + "remove old folder: {}", + old_folder.get_workspace_id().unwrap_or_default() + ); } let workspace_id = workspace_id.to_string(); @@ -47,18 +52,15 @@ impl FolderManager { FolderInitDataSource::LocalDisk { create_if_not_exist, } => { - let is_exist = self.is_workspace_exist_in_local(uid, &workspace_id).await; + let is_exist = self + .user + .is_folder_exist_on_disk(uid, &workspace_id) + .unwrap_or(false); // 1. if the folder exists, open it from local disk if is_exist { event!(Level::INFO, "Init folder from local disk"); self - .make_folder( - uid, - &workspace_id, - collab_db, - DataSource::Disk, - folder_notifier, - ) + .make_folder(uid, &workspace_id, collab_db, None, folder_notifier) .await? } else if create_if_not_exist { // 2. if the folder doesn't exist and create_if_not_exist is true, create a default folder @@ -80,7 +82,7 @@ impl FolderManager { uid, &workspace_id, collab_db.clone(), - DataSource::DocStateV1(doc_state), + Some(DataSource::DocStateV1(doc_state)), folder_notifier.clone(), ) .await? @@ -90,13 +92,7 @@ impl FolderManager { if doc_state.is_empty() { event!(Level::ERROR, "remote folder data is empty, open from local"); self - .make_folder( - uid, - &workspace_id, - collab_db, - DataSource::Disk, - folder_notifier, - ) + .make_folder(uid, &workspace_id, collab_db, None, folder_notifier) .await? } else { event!(Level::INFO, "Restore folder from remote data"); @@ -105,7 +101,7 @@ impl FolderManager { uid, &workspace_id, collab_db.clone(), - DataSource::DocStateV1(doc_state), + Some(DataSource::DocStateV1(doc_state)), folder_notifier.clone(), ) .await? @@ -113,16 +109,20 @@ impl FolderManager { }, }; - let folder_state_rx = folder.subscribe_sync_state(); - let index_content_rx = folder.subscribe_index_content(); - self - .folder_indexer - .set_index_content_receiver(index_content_rx, workspace_id.clone()); - self.handle_index_folder(workspace_id.clone(), &folder); + let folder_state_rx = { + let folder = folder.read().await; + let folder_state_rx = folder.subscribe_sync_state(); + let index_content_rx = folder.subscribe_index_content(); + self + .folder_indexer + .set_index_content_receiver(index_content_rx, workspace_id.clone()); + self.handle_index_folder(workspace_id.clone(), &folder); + folder_state_rx + }; - *self.mutex_folder.write() = Some(folder); + self.mutex_folder.store(Some(folder.clone())); - let weak_mutex_folder = Arc::downgrade(&self.mutex_folder); + let weak_mutex_folder = Arc::downgrade(&folder); subscribe_folder_sync_state_changed( workspace_id.clone(), folder_state_rx, @@ -130,41 +130,32 @@ impl FolderManager { ); subscribe_folder_snapshot_state_changed( workspace_id.clone(), - &weak_mutex_folder, + weak_mutex_folder.clone(), Arc::downgrade(&self.user), ); subscribe_folder_trash_changed( workspace_id.clone(), section_change_rx, - &weak_mutex_folder, + weak_mutex_folder.clone(), Arc::downgrade(&self.user), ); subscribe_folder_view_changed( workspace_id.clone(), view_rx, - &weak_mutex_folder, + weak_mutex_folder.clone(), Arc::downgrade(&self.user), ); Ok(()) } - async fn is_workspace_exist_in_local(&self, uid: i64, workspace_id: &str) -> bool { - if let Ok(weak_collab) = self.user.collab_db(uid) { - if let Some(collab_db) = weak_collab.upgrade() { - return collab_db.is_exist(uid, workspace_id).await.unwrap_or(false); - } - } - false - } - async fn create_default_folder( &self, uid: i64, workspace_id: &str, collab_db: Weak, folder_notifier: FolderNotify, - ) -> Result { + ) -> Result>, FlowyError> { event!( Level::INFO, "Create folder:{} with default folder builder", @@ -172,15 +163,16 @@ impl FolderManager { ); let folder_data = DefaultFolderBuilder::build(uid, workspace_id.to_string(), &self.operation_handlers).await; - let collab = self - .create_empty_collab(uid, workspace_id, collab_db) + let folder = self + .create_empty_collab( + uid, + workspace_id, + collab_db, + Some(folder_notifier), + Some(folder_data), + ) .await?; - Ok(Folder::create( - UserId::from(uid), - collab, - Some(folder_notifier), - folder_data, - )) + Ok(folder) } fn handle_index_folder(&self, workspace_id: String, folder: &Folder) { @@ -194,7 +186,7 @@ impl FolderManager { if let Ok(changes) = folder.calculate_view_changes(encoded_collab) { let folder_indexer = self.folder_indexer.clone(); - let views = folder.views.get_all_views(); + let views = folder.get_all_views(); let wid = workspace_id.clone(); if !changes.is_empty() && !views.is_empty() { @@ -208,7 +200,7 @@ impl FolderManager { } if index_all { - let views = folder.views.get_all_views(); + let views = folder.get_all_views(); let folder_indexer = self.folder_indexer.clone(); let wid = workspace_id.clone(); @@ -226,12 +218,12 @@ impl FolderManager { } fn save_collab_to_preferences(&self, folder: &Folder) { - let encoded_collab = folder.encode_collab_v1(); + if let Some(workspace_id) = folder.get_workspace_id() { + let encoded_collab = folder.encode_collab(); - if let Ok(encoded) = encoded_collab { - let _ = self - .store_preferences - .set_object(&folder.get_workspace_id(), encoded); + if let Ok(encoded) = encoded_collab { + let _ = self.store_preferences.set_object(&workspace_id, &encoded); + } } } } diff --git a/frontend/rust-lib/flowy-folder/src/manager_observer.rs b/frontend/rust-lib/flowy-folder/src/manager_observer.rs index 91bd450a70..e196f492f9 100644 --- a/frontend/rust-lib/flowy-folder/src/manager_observer.rs +++ b/frontend/rust-lib/flowy-folder/src/manager_observer.rs @@ -2,9 +2,7 @@ use crate::entities::{ view_pb_with_child_views, view_pb_without_child_views, ChildViewUpdatePB, FolderSnapshotStatePB, FolderSyncStatePB, RepeatedTrashPB, RepeatedViewPB, SectionViewsPB, ViewPB, ViewSectionPB, }; -use crate::manager::{ - get_workspace_private_view_pbs, get_workspace_public_view_pbs, FolderUser, MutexFolder, -}; +use crate::manager::{get_workspace_private_view_pbs, get_workspace_public_view_pbs, FolderUser}; use crate::notification::{send_notification, FolderNotification}; use collab::core::collab_state::SyncState; use collab_folder::{ @@ -13,7 +11,8 @@ use collab_folder::{ }; use lib_dispatch::prelude::af_spawn; use std::collections::HashSet; -use std::sync::{Arc, Weak}; +use std::sync::Weak; +use tokio::sync::RwLock; use tokio_stream::wrappers::WatchStream; use tokio_stream::StreamExt; use tracing::{event, trace, Level}; @@ -22,10 +21,9 @@ use tracing::{event, trace, Level}; pub(crate) fn subscribe_folder_view_changed( workspace_id: String, mut rx: ViewChangeReceiver, - weak_mutex_folder: &Weak, + weak_mutex_folder: Weak>, user: Weak, ) { - let weak_mutex_folder = weak_mutex_folder.clone(); af_spawn(async move { while let Ok(value) = rx.recv().await { if let Some(user) = user.upgrade() { @@ -38,7 +36,7 @@ pub(crate) fn subscribe_folder_view_changed( } } - if let Some(folder) = weak_mutex_folder.upgrade() { + if let Some(lock) = weak_mutex_folder.upgrade() { tracing::trace!("Did receive view change: {:?}", value); match value { ViewChange::DidCreateView { view } => { @@ -46,7 +44,8 @@ pub(crate) fn subscribe_folder_view_changed( view_pb_without_child_views(view.clone()), ChildViewChangeReason::Create, ); - notify_parent_view_did_change(&workspace_id, folder.clone(), vec![view.parent_view_id]); + let folder = lock.read().await; + notify_parent_view_did_change(&workspace_id, &folder, vec![view.parent_view_id]); }, ViewChange::DidDeleteView { views } => { for view in views { @@ -62,11 +61,8 @@ pub(crate) fn subscribe_folder_view_changed( view_pb_without_child_views(view.clone()), ChildViewChangeReason::Update, ); - notify_parent_view_did_change( - &workspace_id, - folder.clone(), - vec![view.parent_view_id.clone()], - ); + let folder = lock.read().await; + notify_parent_view_did_change(&workspace_id, &folder, vec![view.parent_view_id]); }, }; } @@ -76,35 +72,30 @@ pub(crate) fn subscribe_folder_view_changed( pub(crate) fn subscribe_folder_snapshot_state_changed( workspace_id: String, - weak_mutex_folder: &Weak, + weak_mutex_folder: Weak>, user: Weak, ) { - let weak_mutex_folder = weak_mutex_folder.clone(); af_spawn(async move { - if let Some(mutex_folder) = weak_mutex_folder.upgrade() { - let stream = mutex_folder - .read() - .as_ref() - .map(|folder| folder.subscribe_snapshot_state()); - if let Some(mut state_stream) = stream { - while let Some(snapshot_state) = state_stream.next().await { - if let Some(user) = user.upgrade() { - if let Ok(actual_workspace_id) = user.workspace_id() { - if actual_workspace_id != workspace_id { - // break the loop when the workspace id is not matched. - break; - } + if let Some(folder) = weak_mutex_folder.upgrade() { + let mut state_stream = folder.read().await.subscribe_snapshot_state(); + + while let Some(snapshot_state) = state_stream.next().await { + if let Some(user) = user.upgrade() { + if let Ok(actual_workspace_id) = user.workspace_id() { + if actual_workspace_id != workspace_id { + // break the loop when the workspace id is not matched. + break; } } - if let Some(new_snapshot_id) = snapshot_state.snapshot_id() { - tracing::debug!("Did create folder remote snapshot: {}", new_snapshot_id); - send_notification( - &workspace_id, - FolderNotification::DidUpdateFolderSnapshotState, - ) - .payload(FolderSnapshotStatePB { new_snapshot_id }) - .send(); - } + } + if let Some(new_snapshot_id) = snapshot_state.snapshot_id() { + tracing::debug!("Did create folder remote snapshot: {}", new_snapshot_id); + send_notification( + &workspace_id, + FolderNotification::DidUpdateFolderSnapshotState, + ) + .payload(FolderSnapshotStatePB { new_snapshot_id }) + .send(); } } } @@ -138,10 +129,9 @@ pub(crate) fn subscribe_folder_sync_state_changed( pub(crate) fn subscribe_folder_trash_changed( workspace_id: String, mut rx: SectionChangeReceiver, - weak_mutex_folder: &Weak, + weak_mutex_folder: Weak>, user: Weak, ) { - let weak_mutex_folder = weak_mutex_folder.clone(); af_spawn(async move { while let Ok(value) = rx.recv().await { if let Some(user) = user.upgrade() { @@ -153,7 +143,7 @@ pub(crate) fn subscribe_folder_trash_changed( } } - if let Some(folder) = weak_mutex_folder.upgrade() { + if let Some(lock) = weak_mutex_folder.upgrade() { let mut unique_ids = HashSet::new(); tracing::trace!("Did receive trash change: {:?}", value); @@ -163,20 +153,19 @@ pub(crate) fn subscribe_folder_trash_changed( TrashSectionChange::TrashItemAdded { ids } => ids, TrashSectionChange::TrashItemRemoved { ids } => ids, }; - if let Some(folder) = folder.read().as_ref() { - let views = folder.views.get_views(&ids); - for view in views { - unique_ids.insert(view.parent_view_id.clone()); - } - - let repeated_trash: RepeatedTrashPB = folder.get_my_trash_info().into(); - send_notification("trash", FolderNotification::DidUpdateTrash) - .payload(repeated_trash) - .send(); + let folder = lock.read().await; + let views = folder.get_views(&ids); + for view in views { + unique_ids.insert(view.parent_view_id.clone()); } + let repeated_trash: RepeatedTrashPB = folder.get_my_trash_info().into(); + send_notification("trash", FolderNotification::DidUpdateTrash) + .payload(repeated_trash) + .send(); + let parent_view_ids = unique_ids.into_iter().collect(); - notify_parent_view_did_change(&workspace_id, folder.clone(), parent_view_ids); + notify_parent_view_did_change(&workspace_id, &folder, parent_view_ids); }, } } @@ -188,11 +177,9 @@ pub(crate) fn subscribe_folder_trash_changed( #[tracing::instrument(level = "debug", skip(folder, parent_view_ids))] pub(crate) fn notify_parent_view_did_change>( workspace_id: &str, - folder: Arc, + folder: &Folder, parent_view_ids: Vec, ) -> Option<()> { - let folder = folder.read(); - let folder = folder.as_ref()?; let trash_ids = folder .get_all_trash_sections() .into_iter() @@ -210,8 +197,8 @@ pub(crate) fn notify_parent_view_did_change>( } else { // Parent view can contain a list of child views. Currently, only get the first level // child views. - let parent_view = folder.views.get_view(parent_view_id)?; - let mut child_views = folder.views.get_views_belong_to(parent_view_id); + let parent_view = folder.get_view(parent_view_id)?; + let mut child_views = folder.get_views_belong_to(parent_view_id); child_views.retain(|view| !trash_ids.contains(&view.id)); event!(Level::DEBUG, child_views_count = child_views.len()); diff --git a/frontend/rust-lib/flowy-folder/src/manager_test_util.rs b/frontend/rust-lib/flowy-folder/src/manager_test_util.rs index 4280c788d9..37e86d5867 100644 --- a/frontend/rust-lib/flowy-folder/src/manager_test_util.rs +++ b/frontend/rust-lib/flowy-folder/src/manager_test_util.rs @@ -1,13 +1,15 @@ -use crate::manager::{FolderManager, FolderUser, MutexFolder}; +use crate::manager::{FolderManager, FolderUser}; use crate::view_operation::FolderOperationHandlers; +use collab_folder::Folder; use collab_integrate::collab_builder::AppFlowyCollabBuilder; use flowy_folder_pub::cloud::FolderCloudService; use flowy_search_pub::entities::FolderIndexManager; use std::sync::Arc; +use tokio::sync::RwLock; impl FolderManager { - pub fn get_mutex_folder(&self) -> Arc { - self.mutex_folder.clone() + pub fn get_mutex_folder(&self) -> Option>> { + self.mutex_folder.load_full() } pub fn get_cloud_service(&self) -> Arc { diff --git a/frontend/rust-lib/flowy-folder/src/util.rs b/frontend/rust-lib/flowy-folder/src/util.rs index a56db33511..3466d7c527 100644 --- a/frontend/rust-lib/flowy-folder/src/util.rs +++ b/frontend/rust-lib/flowy-folder/src/util.rs @@ -16,7 +16,7 @@ pub(crate) fn workspace_data_not_sync_error(uid: i64, workspace_id: &str) -> Flo } #[instrument(level = "debug", skip(folder, view))] -pub(crate) fn insert_parent_child_views(folder: &Folder, view: ParentChildViews) { +pub(crate) fn insert_parent_child_views(folder: &mut Folder, view: ParentChildViews) { event!( tracing::Level::DEBUG, "Inserting view: {}, view children: {}", diff --git a/frontend/rust-lib/flowy-folder/src/view_operation.rs b/frontend/rust-lib/flowy-folder/src/view_operation.rs index fd5f90d206..5cafee0d25 100644 --- a/frontend/rust-lib/flowy-folder/src/view_operation.rs +++ b/frontend/rust-lib/flowy-folder/src/view_operation.rs @@ -40,7 +40,6 @@ pub struct DatabaseEncodedCollab { /// The handler will be used to handler the folder operation for a specific /// view layout. Each [ViewLayout] will have a handler. So when creating a new /// view, the [ViewLayout] will be used to get the handler. -/// #[async_trait] pub trait FolderOperationHandler { /// Create the view for the workspace of new user. diff --git a/frontend/rust-lib/flowy-server-pub/src/lib.rs b/frontend/rust-lib/flowy-server-pub/src/lib.rs index 4736587f4e..ee43b3c40c 100644 --- a/frontend/rust-lib/flowy-server-pub/src/lib.rs +++ b/frontend/rust-lib/flowy-server-pub/src/lib.rs @@ -28,15 +28,12 @@ if_wasm! { } } -pub mod supabase_config; - pub const CLOUT_TYPE_STR: &str = "APPFLOWY_CLOUD_ENV_CLOUD_TYPE"; #[derive(Deserialize_repr, Debug, Clone, PartialEq, Eq)] #[repr(u8)] pub enum AuthenticatorType { Local = 0, - Supabase = 1, AppFlowyCloud = 2, } @@ -50,7 +47,6 @@ impl AuthenticatorType { fn from_str(s: &str) -> Self { match s { "0" => AuthenticatorType::Local, - "1" => AuthenticatorType::Supabase, "2" => AuthenticatorType::AppFlowyCloud, _ => AuthenticatorType::Local, } diff --git a/frontend/rust-lib/flowy-server-pub/src/supabase_config.rs b/frontend/rust-lib/flowy-server-pub/src/supabase_config.rs deleted file mode 100644 index 90dbe39bc5..0000000000 --- a/frontend/rust-lib/flowy-server-pub/src/supabase_config.rs +++ /dev/null @@ -1,41 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use flowy_error::{ErrorCode, FlowyError}; - -pub const SUPABASE_URL: &str = "APPFLOWY_CLOUD_ENV_SUPABASE_URL"; -pub const SUPABASE_ANON_KEY: &str = "APPFLOWY_CLOUD_ENV_SUPABASE_ANON_KEY"; - -/// The configuration for the postgres database. It supports deserializing from the json string that -/// passed from the frontend application. [AppFlowyEnv::parser] -#[derive(Debug, Serialize, Deserialize, Clone, Default)] -pub struct SupabaseConfiguration { - /// The url of the supabase server. - pub url: String, - /// The key of the supabase server. - pub anon_key: String, -} - -impl SupabaseConfiguration { - pub fn from_env() -> Result { - let url = std::env::var(SUPABASE_URL) - .map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_URL"))?; - - let anon_key = std::env::var(SUPABASE_ANON_KEY) - .map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_ANON_KEY"))?; - - if url.is_empty() || anon_key.is_empty() { - return Err(FlowyError::new( - ErrorCode::InvalidAuthConfig, - "Missing SUPABASE_URL or SUPABASE_ANON_KEY", - )); - } - - Ok(Self { url, anon_key }) - } - - /// Write the configuration to the environment variables. - pub fn write_env(&self) { - std::env::set_var(SUPABASE_URL, &self.url); - std::env::set_var(SUPABASE_ANON_KEY, &self.anon_key); - } -} diff --git a/frontend/rust-lib/flowy-server/Cargo.toml b/frontend/rust-lib/flowy-server/Cargo.toml index 76fcd99a32..e746aca35a 100644 --- a/frontend/rust-lib/flowy-server/Cargo.toml +++ b/frontend/rust-lib/flowy-server/Cargo.toml @@ -18,11 +18,12 @@ serde.workspace = true serde_json.workspace = true thiserror = "1.0" tokio = { workspace = true, features = ["sync"] } -parking_lot.workspace = true lazy_static = "1.4.0" bytes = { workspace = true, features = ["serde"] } tokio-retry = "0.3" anyhow.workspace = true +arc-swap.workspace = true +dashmap.workspace = true uuid.workspace = true chrono = { workspace = true, default-features = false, features = ["clock", "serde"] } collab = { workspace = true } diff --git a/frontend/rust-lib/flowy-server/src/af_cloud/impls/database.rs b/frontend/rust-lib/flowy-server/src/af_cloud/impls/database.rs index 57c97a3a67..7ecebac485 100644 --- a/frontend/rust-lib/flowy-server/src/af_cloud/impls/database.rs +++ b/frontend/rust-lib/flowy-server/src/af_cloud/impls/database.rs @@ -5,7 +5,6 @@ use client_api::entity::ai_dto::{ use client_api::entity::QueryCollabResult::{Failed, Success}; use client_api::entity::{QueryCollab, QueryCollabParams}; use client_api::error::ErrorCode::RecordNotFound; -use collab::core::collab::DataSource; use collab::entity::EncodedCollab; use collab_entity::CollabType; use serde_json::{Map, Value}; @@ -13,12 +12,11 @@ use std::sync::Arc; use tracing::{error, instrument}; use flowy_database_pub::cloud::{ - CollabDocStateByOid, DatabaseAIService, DatabaseCloudService, DatabaseSnapshot, - SummaryRowContent, TranslateRowContent, TranslateRowResponse, + DatabaseAIService, DatabaseCloudService, DatabaseSnapshot, EncodeCollabByOid, SummaryRowContent, + TranslateRowContent, TranslateRowResponse, }; use flowy_error::FlowyError; use lib_infra::async_trait::async_trait; -use lib_infra::future::FutureResult; use crate::af_cloud::define::ServerUser; use crate::af_cloud::impls::util::check_request_workspace_id_is_match; @@ -35,93 +33,85 @@ where T: AFServer, { #[instrument(level = "debug", skip_all)] - fn get_database_object_doc_state( + async fn get_database_encode_collab( &self, object_id: &str, collab_type: CollabType, workspace_id: &str, - ) -> FutureResult>, Error> { + ) -> Result, Error> { let workspace_id = workspace_id.to_string(); let object_id = object_id.to_string(); let try_get_client = self.inner.try_get_client(); let cloned_user = self.user.clone(); - FutureResult::new(async move { - let params = QueryCollabParams { - workspace_id: workspace_id.clone(), - inner: QueryCollab::new(object_id.clone(), collab_type.clone()), - }; - match try_get_client?.get_collab(params).await { - Ok(data) => { - check_request_workspace_id_is_match( - &workspace_id, - &cloned_user, - format!("get database object: {}:{}", object_id, collab_type), - )?; - Ok(Some(data.encode_collab.doc_state.to_vec())) - }, - Err(err) => { - if err.code == RecordNotFound { - Ok(None) - } else { - Err(Error::new(err)) - } - }, - } - }) + let params = QueryCollabParams { + workspace_id: workspace_id.clone(), + inner: QueryCollab::new(object_id.clone(), collab_type.clone()), + }; + match try_get_client?.get_collab(params).await { + Ok(data) => { + check_request_workspace_id_is_match( + &workspace_id, + &cloned_user, + format!("get database object: {}:{}", object_id, collab_type), + )?; + Ok(Some(data.encode_collab)) + }, + Err(err) => { + if err.code == RecordNotFound { + Ok(None) + } else { + Err(Error::new(err)) + } + }, + } } #[instrument(level = "debug", skip_all)] - fn batch_get_database_object_doc_state( + async fn batch_get_database_encode_collab( &self, object_ids: Vec, object_ty: CollabType, workspace_id: &str, - ) -> FutureResult { + ) -> Result { let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); let cloned_user = self.user.clone(); - FutureResult::new(async move { - let client = try_get_client?; - let params = object_ids + let client = try_get_client?; + let params = object_ids + .into_iter() + .map(|object_id| QueryCollab::new(object_id, object_ty.clone())) + .collect(); + let results = client.batch_get_collab(&workspace_id, params).await?; + check_request_workspace_id_is_match(&workspace_id, &cloned_user, "batch get database object")?; + Ok( + results + .0 .into_iter() - .map(|object_id| QueryCollab::new(object_id, object_ty.clone())) - .collect(); - let results = client.batch_get_collab(&workspace_id, params).await?; - check_request_workspace_id_is_match( - &workspace_id, - &cloned_user, - "batch get database object", - )?; - Ok( - results - .0 - .into_iter() - .flat_map(|(object_id, result)| match result { - Success { encode_collab_v1 } => { - match EncodedCollab::decode_from_bytes(&encode_collab_v1) { - Ok(encode) => Some((object_id, DataSource::DocStateV1(encode.doc_state.to_vec()))), - Err(err) => { - error!("Failed to decode collab: {}", err); - None - }, - } - }, - Failed { error } => { - error!("Failed to get {} update: {}", object_id, error); - None - }, - }) - .collect::(), - ) - }) + .flat_map(|(object_id, result)| match result { + Success { encode_collab_v1 } => { + match EncodedCollab::decode_from_bytes(&encode_collab_v1) { + Ok(encode) => Some((object_id, encode)), + Err(err) => { + error!("Failed to decode collab: {}", err); + None + }, + } + }, + Failed { error } => { + error!("Failed to get {} update: {}", object_id, error); + None + }, + }) + .collect::(), + ) } - fn get_database_collab_object_snapshots( + async fn get_database_collab_object_snapshots( &self, _object_id: &str, _limit: usize, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(vec![]) }) + ) -> Result, Error> { + Ok(vec![]) } } diff --git a/frontend/rust-lib/flowy-server/src/af_cloud/impls/document.rs b/frontend/rust-lib/flowy-server/src/af_cloud/impls/document.rs index 98732aa521..fc4d1dff75 100644 --- a/frontend/rust-lib/flowy-server/src/af_cloud/impls/document.rs +++ b/frontend/rust-lib/flowy-server/src/af_cloud/impls/document.rs @@ -9,7 +9,7 @@ use tracing::instrument; use flowy_document_pub::cloud::*; use flowy_error::FlowyError; -use lib_infra::future::FutureResult; +use lib_infra::async_trait::async_trait; use crate::af_cloud::define::ServerUser; use crate::af_cloud::impls::util::check_request_workspace_id_is_match; @@ -20,86 +20,83 @@ pub(crate) struct AFCloudDocumentCloudServiceImpl { pub user: Arc, } +#[async_trait] impl DocumentCloudService for AFCloudDocumentCloudServiceImpl where T: AFServer, { #[instrument(level = "debug", skip_all, fields(document_id = %document_id))] - fn get_document_doc_state( + async fn get_document_doc_state( &self, document_id: &str, workspace_id: &str, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); let document_id = document_id.to_string(); let cloned_user = self.user.clone(); - FutureResult::new(async move { - let params = QueryCollabParams { - workspace_id: workspace_id.clone(), - inner: QueryCollab::new(document_id.to_string(), CollabType::Document), - }; - let doc_state = try_get_client? - .get_collab(params) - .await - .map_err(FlowyError::from)? - .encode_collab - .doc_state - .to_vec(); + let params = QueryCollabParams { + workspace_id: workspace_id.clone(), + inner: QueryCollab::new(document_id.to_string(), CollabType::Document), + }; + let doc_state = try_get_client? + .get_collab(params) + .await + .map_err(FlowyError::from)? + .encode_collab + .doc_state + .to_vec(); - check_request_workspace_id_is_match( - &workspace_id, - &cloned_user, - format!("get document doc state:{}", document_id), - )?; + check_request_workspace_id_is_match( + &workspace_id, + &cloned_user, + format!("get document doc state:{}", document_id), + )?; - Ok(doc_state) - }) + Ok(doc_state) } - fn get_document_snapshots( + async fn get_document_snapshots( &self, _document_id: &str, _limit: usize, _workspace_id: &str, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(vec![]) }) + ) -> Result, Error> { + Ok(vec![]) } #[instrument(level = "debug", skip_all)] - fn get_document_data( + async fn get_document_data( &self, document_id: &str, workspace_id: &str, - ) -> FutureResult, Error> { + ) -> Result, Error> { let try_get_client = self.inner.try_get_client(); let document_id = document_id.to_string(); let workspace_id = workspace_id.to_string(); let cloned_user = self.user.clone(); - FutureResult::new(async move { - let params = QueryCollabParams { - workspace_id: workspace_id.clone(), - inner: QueryCollab::new(document_id.clone(), CollabType::Document), - }; - let doc_state = try_get_client? - .get_collab(params) - .await - .map_err(FlowyError::from)? - .encode_collab - .doc_state - .to_vec(); - check_request_workspace_id_is_match( - &workspace_id, - &cloned_user, - format!("Get {} document", document_id), - )?; - let document = Document::from_doc_state( - CollabOrigin::Empty, - DataSource::DocStateV1(doc_state), - &document_id, - vec![], - )?; - Ok(document.get_document_data().ok()) - }) + let params = QueryCollabParams { + workspace_id: workspace_id.clone(), + inner: QueryCollab::new(document_id.clone(), CollabType::Document), + }; + let doc_state = try_get_client? + .get_collab(params) + .await + .map_err(FlowyError::from)? + .encode_collab + .doc_state + .to_vec(); + check_request_workspace_id_is_match( + &workspace_id, + &cloned_user, + format!("Get {} document", document_id), + )?; + let document = Document::open_with_options( + CollabOrigin::Empty, + DataSource::DocStateV1(doc_state), + &document_id, + vec![], + )?; + Ok(document.get_document_data().ok()) } } diff --git a/frontend/rust-lib/flowy-server/src/af_cloud/impls/folder.rs b/frontend/rust-lib/flowy-server/src/af_cloud/impls/folder.rs index c8296bd01e..ec9bf53d1d 100644 --- a/frontend/rust-lib/flowy-server/src/af_cloud/impls/folder.rs +++ b/frontend/rust-lib/flowy-server/src/af_cloud/impls/folder.rs @@ -18,7 +18,7 @@ use flowy_folder_pub::cloud::{ WorkspaceRecord, }; use flowy_folder_pub::entities::{PublishInfoResponse, PublishPayload}; -use lib_infra::future::FutureResult; +use lib_infra::async_trait::async_trait; use crate::af_cloud::define::ServerUser; use crate::af_cloud::impls::util::check_request_workspace_id_is_match; @@ -29,167 +29,160 @@ pub(crate) struct AFCloudFolderCloudServiceImpl { pub user: Arc, } +#[async_trait] impl FolderCloudService for AFCloudFolderCloudServiceImpl where T: AFServer, { - fn create_workspace(&self, _uid: i64, name: &str) -> FutureResult { + async fn create_workspace(&self, _uid: i64, name: &str) -> Result { let try_get_client = self.inner.try_get_client(); let cloned_name = name.to_string(); - FutureResult::new(async move { - let client = try_get_client?; - let new_workspace = client - .create_workspace(CreateWorkspaceParam { - workspace_name: Some(cloned_name), - }) - .await?; - Ok(Workspace { - id: new_workspace.workspace_id.to_string(), - name: new_workspace.workspace_name, - created_at: new_workspace.created_at.timestamp(), - child_views: RepeatedViewIdentifier::new(vec![]), - created_by: Some(new_workspace.owner_uid), - last_edited_time: new_workspace.created_at.timestamp(), - last_edited_by: Some(new_workspace.owner_uid), + let client = try_get_client?; + let new_workspace = client + .create_workspace(CreateWorkspaceParam { + workspace_name: Some(cloned_name), }) + .await?; + + Ok(Workspace { + id: new_workspace.workspace_id.to_string(), + name: new_workspace.workspace_name, + created_at: new_workspace.created_at.timestamp(), + child_views: RepeatedViewIdentifier::new(vec![]), + created_by: Some(new_workspace.owner_uid), + last_edited_time: new_workspace.created_at.timestamp(), + last_edited_by: Some(new_workspace.owner_uid), }) } - fn open_workspace(&self, workspace_id: &str) -> FutureResult<(), Error> { + async fn open_workspace(&self, workspace_id: &str) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let _ = client.open_workspace(&workspace_id).await?; - Ok(()) - }) + + let client = try_get_client?; + let _ = client.open_workspace(&workspace_id).await?; + Ok(()) } - fn get_all_workspace(&self) -> FutureResult, Error> { + async fn get_all_workspace(&self) -> Result, Error> { let try_get_client = self.inner.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let records = client - .get_user_workspace_info() - .await? - .workspaces - .into_iter() - .map(|af_workspace| WorkspaceRecord { - id: af_workspace.workspace_id.to_string(), - name: af_workspace.workspace_name, - created_at: af_workspace.created_at.timestamp(), - }) - .collect::>(); - Ok(records) - }) + + let client = try_get_client?; + let records = client + .get_user_workspace_info() + .await? + .workspaces + .into_iter() + .map(|af_workspace| WorkspaceRecord { + id: af_workspace.workspace_id.to_string(), + name: af_workspace.workspace_name, + created_at: af_workspace.created_at.timestamp(), + }) + .collect::>(); + Ok(records) } + #[instrument(level = "debug", skip_all)] - fn get_folder_data( + async fn get_folder_data( &self, workspace_id: &str, uid: &i64, - ) -> FutureResult, Error> { + ) -> Result, Error> { let uid = *uid; let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); let cloned_user = self.user.clone(); - FutureResult::new(async move { - let params = QueryCollabParams { - workspace_id: workspace_id.clone(), - inner: QueryCollab::new(workspace_id.clone(), CollabType::Folder), - }; - let doc_state = try_get_client? - .get_collab(params) - .await - .map_err(FlowyError::from)? - .encode_collab - .doc_state - .to_vec(); - check_request_workspace_id_is_match(&workspace_id, &cloned_user, "get folder data")?; - let folder = Folder::from_collab_doc_state( - uid, - CollabOrigin::Empty, - DataSource::DocStateV1(doc_state), - &workspace_id, - vec![], - )?; - Ok(folder.get_folder_data(&workspace_id)) - }) + let params = QueryCollabParams { + workspace_id: workspace_id.clone(), + inner: QueryCollab::new(workspace_id.clone(), CollabType::Folder), + }; + let doc_state = try_get_client? + .get_collab(params) + .await + .map_err(FlowyError::from)? + .encode_collab + .doc_state + .to_vec(); + check_request_workspace_id_is_match(&workspace_id, &cloned_user, "get folder data")?; + let folder = Folder::from_collab_doc_state( + uid, + CollabOrigin::Empty, + DataSource::DocStateV1(doc_state), + &workspace_id, + vec![], + )?; + Ok(folder.get_folder_data(&workspace_id)) } - fn get_folder_snapshots( + async fn get_folder_snapshots( &self, _workspace_id: &str, _limit: usize, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(vec![]) }) + ) -> Result, Error> { + Ok(vec![]) } #[instrument(level = "debug", skip_all)] - fn get_folder_doc_state( + async fn get_folder_doc_state( &self, workspace_id: &str, _uid: i64, collab_type: CollabType, object_id: &str, - ) -> FutureResult, Error> { + ) -> Result, Error> { let object_id = object_id.to_string(); let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); let cloned_user = self.user.clone(); - FutureResult::new(async move { - let params = QueryCollabParams { - workspace_id: workspace_id.clone(), - inner: QueryCollab::new(object_id, collab_type), - }; - let doc_state = try_get_client? - .get_collab(params) - .await - .map_err(FlowyError::from)? - .encode_collab - .doc_state - .to_vec(); - check_request_workspace_id_is_match(&workspace_id, &cloned_user, "get folder doc state")?; - Ok(doc_state) - }) + let params = QueryCollabParams { + workspace_id: workspace_id.clone(), + inner: QueryCollab::new(object_id, collab_type), + }; + let doc_state = try_get_client? + .get_collab(params) + .await + .map_err(FlowyError::from)? + .encode_collab + .doc_state + .to_vec(); + check_request_workspace_id_is_match(&workspace_id, &cloned_user, "get folder doc state")?; + Ok(doc_state) } - fn batch_create_folder_collab_objects( + async fn batch_create_folder_collab_objects( &self, workspace_id: &str, objects: Vec, - ) -> FutureResult<(), Error> { + ) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); - FutureResult::new(async move { - let params = objects - .into_iter() - .map(|object| { - CollabParams::new( - object.object_id, - object.collab_type, - object.encoded_collab_v1, - ) - }) - .collect::>(); - try_get_client? - .create_collab_list(&workspace_id, params) - .await - .map_err(FlowyError::from)?; - Ok(()) - }) + let params = objects + .into_iter() + .map(|object| { + CollabParams::new( + object.object_id, + object.collab_type, + object.encoded_collab_v1, + ) + }) + .collect::>(); + try_get_client? + .create_collab_list(&workspace_id, params) + .await + .map_err(FlowyError::from)?; + Ok(()) } fn service_name(&self) -> String { "AppFlowy Cloud".to_string() } - fn publish_view( + async fn publish_view( &self, workspace_id: &str, payload: Vec, - ) -> FutureResult<(), Error> { + ) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); let params = payload @@ -212,76 +205,66 @@ where }) }) .collect::>(); - FutureResult::new(async move { - try_get_client? - .publish_collabs(&workspace_id, params) - .await - .map_err(FlowyError::from)?; - Ok(()) - }) + try_get_client? + .publish_collabs(&workspace_id, params) + .await + .map_err(FlowyError::from)?; + Ok(()) } - fn unpublish_views(&self, workspace_id: &str, view_ids: Vec) -> FutureResult<(), Error> { + async fn unpublish_views(&self, workspace_id: &str, view_ids: Vec) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); let view_uuids = view_ids .iter() .map(|id| Uuid::parse_str(id).unwrap_or(Uuid::nil())) .collect::>(); - FutureResult::new(async move { - try_get_client? - .unpublish_collabs(&workspace_id, &view_uuids) - .await - .map_err(FlowyError::from)?; - Ok(()) - }) + try_get_client? + .unpublish_collabs(&workspace_id, &view_uuids) + .await + .map_err(FlowyError::from)?; + Ok(()) } - fn get_publish_info(&self, view_id: &str) -> FutureResult { + async fn get_publish_info(&self, view_id: &str) -> Result { let try_get_client = self.inner.try_get_client(); let view_id = Uuid::parse_str(view_id) .map_err(|_| FlowyError::new(ErrorCode::InvalidParams, "Invalid view id")); - FutureResult::new(async move { - let view_id = view_id?; - let info = try_get_client? - .get_published_collab_info(&view_id) - .await - .map_err(FlowyError::from)?; - Ok(PublishInfoResponse { - view_id: info.view_id.to_string(), - publish_name: info.publish_name, - namespace: info.namespace, - }) + let view_id = view_id?; + let info = try_get_client? + .get_published_collab_info(&view_id) + .await + .map_err(FlowyError::from)?; + Ok(PublishInfoResponse { + view_id: info.view_id.to_string(), + publish_name: info.publish_name, + namespace: info.namespace, }) } - fn set_publish_namespace( + async fn set_publish_namespace( &self, workspace_id: &str, new_namespace: &str, - ) -> FutureResult<(), Error> { + ) -> Result<(), Error> { let workspace_id = workspace_id.to_string(); let namespace = new_namespace.to_string(); let try_get_client = self.inner.try_get_client(); - FutureResult::new(async move { - try_get_client? - .set_workspace_publish_namespace(&workspace_id, &namespace) - .await - .map_err(FlowyError::from)?; - Ok(()) - }) + try_get_client? + .set_workspace_publish_namespace(&workspace_id, &namespace) + .await + .map_err(FlowyError::from)?; + Ok(()) } - fn get_publish_namespace(&self, workspace_id: &str) -> FutureResult { + async fn get_publish_namespace(&self, workspace_id: &str) -> Result { let workspace_id = workspace_id.to_string(); let try_get_client = self.inner.try_get_client(); - FutureResult::new(async move { - let namespace = try_get_client? - .get_workspace_publish_namespace(&workspace_id) - .await - .map_err(FlowyError::from)?; - Ok(namespace) - }) + let namespace = try_get_client? + .get_workspace_publish_namespace(&workspace_id) + .await + .map_err(FlowyError::from)?; + Ok(namespace) } } diff --git a/frontend/rust-lib/flowy-server/src/af_cloud/impls/user/cloud_service_impl.rs b/frontend/rust-lib/flowy-server/src/af_cloud/impls/user/cloud_service_impl.rs index e8a14e5ee4..5df9dce1dd 100644 --- a/frontend/rust-lib/flowy-server/src/af_cloud/impls/user/cloud_service_impl.rs +++ b/frontend/rust-lib/flowy-server/src/af_cloud/impls/user/cloud_service_impl.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use std::sync::Arc; use anyhow::anyhow; +use arc_swap::ArcSwapOption; use client_api::entity::billing_dto::{ RecurringInterval, SetSubscriptionRecurringInterval, SubscriptionCancelRequest, SubscriptionPlan, SubscriptionPlanDetail, WorkspaceSubscriptionStatus, WorkspaceUsageAndLimit, @@ -16,7 +17,6 @@ use client_api::entity::{ use client_api::entity::{QueryCollab, QueryCollabParams}; use client_api::{Client, ClientConfiguration}; use collab_entity::{CollabObject, CollabType}; -use parking_lot::RwLock; use tracing::instrument; use flowy_error::{ErrorCode, FlowyError, FlowyResult}; @@ -25,8 +25,8 @@ use flowy_user_pub::entities::{ AFCloudOAuthParams, AuthResponse, Role, UpdateUserProfileParams, UserCredentials, UserProfile, UserWorkspace, WorkspaceInvitation, WorkspaceInvitationStatus, WorkspaceMember, }; +use lib_infra::async_trait::async_trait; use lib_infra::box_any::BoxAny; -use lib_infra::future::FutureResult; use uuid::Uuid; use crate::af_cloud::define::{ServerUser, USER_SIGN_IN_URL}; @@ -41,7 +41,7 @@ use super::dto::{from_af_workspace_invitation_status, to_workspace_invitation_st pub(crate) struct AFCloudUserAuthServiceImpl { server: T, - user_change_recv: RwLock>>, + user_change_recv: ArcSwapOption>, user: Arc, } @@ -53,613 +53,533 @@ impl AFCloudUserAuthServiceImpl { ) -> Self { Self { server, - user_change_recv: RwLock::new(Some(user_change_recv)), + user_change_recv: ArcSwapOption::new(Some(Arc::new(user_change_recv))), user, } } } +#[async_trait] impl UserCloudService for AFCloudUserAuthServiceImpl where T: AFServer, { - fn sign_up(&self, params: BoxAny) -> FutureResult { + async fn sign_up(&self, params: BoxAny) -> Result { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let params = oauth_params_from_box_any(params)?; - let resp = user_sign_up_request(try_get_client?, params).await?; - Ok(resp) - }) + let params = oauth_params_from_box_any(params)?; + let resp = user_sign_up_request(try_get_client?, params).await?; + Ok(resp) } // Zack: Not sure if this is needed anymore since sign_up handles both cases - fn sign_in(&self, params: BoxAny) -> FutureResult { + async fn sign_in(&self, params: BoxAny) -> Result { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let params = oauth_params_from_box_any(params)?; - let resp = user_sign_in_with_url(client, params).await?; - Ok(resp) - }) + let client = try_get_client?; + let params = oauth_params_from_box_any(params)?; + let resp = user_sign_in_with_url(client, params).await?; + Ok(resp) } - fn sign_out(&self, _token: Option) -> FutureResult<(), FlowyError> { + async fn sign_out(&self, _token: Option) -> Result<(), FlowyError> { // Calling the sign_out method that will revoke all connected devices' refresh tokens. // So do nothing here. - FutureResult::new(async move { Ok(()) }) + Ok(()) } - fn generate_sign_in_url_with_email(&self, email: &str) -> FutureResult { + async fn generate_sign_in_url_with_email(&self, email: &str) -> Result { let email = email.to_string(); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let admin_client = get_admin_client(&client).await?; - let action_link = admin_client.generate_sign_in_action_link(&email).await?; - let sign_in_url = client.extract_sign_in_url(&action_link).await?; - Ok(sign_in_url) - }) + let client = try_get_client?; + let admin_client = get_admin_client(&client).await?; + let action_link = admin_client.generate_sign_in_action_link(&email).await?; + let sign_in_url = client.extract_sign_in_url(&action_link).await?; + Ok(sign_in_url) } - fn create_user(&self, email: &str, password: &str) -> FutureResult<(), FlowyError> { + async fn create_user(&self, email: &str, password: &str) -> Result<(), FlowyError> { let password = password.to_string(); let email = email.to_string(); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let admin_client = get_admin_client(&client).await?; - admin_client - .create_email_verified_user(&email, &password) - .await?; + let client = try_get_client?; + let admin_client = get_admin_client(&client).await?; + admin_client + .create_email_verified_user(&email, &password) + .await?; - Ok(()) - }) + Ok(()) } - fn sign_in_with_password( + async fn sign_in_with_password( &self, email: &str, password: &str, - ) -> FutureResult { + ) -> Result { let password = password.to_string(); let email = email.to_string(); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - client.sign_in_password(&email, &password).await?; - let profile = client.get_profile().await?; - let token = client.get_token()?; - let profile = user_profile_from_af_profile(token, profile)?; - Ok(profile) - }) + let client = try_get_client?; + client.sign_in_password(&email, &password).await?; + let profile = client.get_profile().await?; + let token = client.get_token()?; + let profile = user_profile_from_af_profile(token, profile)?; + Ok(profile) } - fn sign_in_with_magic_link( + async fn sign_in_with_magic_link( &self, email: &str, redirect_to: &str, - ) -> FutureResult<(), FlowyError> { + ) -> Result<(), FlowyError> { let email = email.to_owned(); let redirect_to = redirect_to.to_owned(); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - client - .sign_in_with_magic_link(&email, Some(redirect_to)) - .await?; - Ok(()) - }) + let client = try_get_client?; + client + .sign_in_with_magic_link(&email, Some(redirect_to)) + .await?; + Ok(()) } - fn generate_oauth_url_with_provider(&self, provider: &str) -> FutureResult { + async fn generate_oauth_url_with_provider(&self, provider: &str) -> Result { let provider = AuthProvider::from(provider); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let provider = provider.ok_or(anyhow!("invalid provider"))?; - let url = try_get_client? - .generate_oauth_url_with_provider(&provider) - .await?; - Ok(url) - }) + let provider = provider.ok_or(anyhow!("invalid provider"))?; + let url = try_get_client? + .generate_oauth_url_with_provider(&provider) + .await?; + Ok(url) } - fn update_user( + async fn update_user( &self, _credential: UserCredentials, params: UpdateUserProfileParams, - ) -> FutureResult<(), FlowyError> { + ) -> Result<(), FlowyError> { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - client - .update_user(af_update_from_update_params(params)) - .await?; - Ok(()) - }) + let client = try_get_client?; + client + .update_user(af_update_from_update_params(params)) + .await?; + Ok(()) } #[instrument(level = "debug", skip_all)] - fn get_user_profile( + async fn get_user_profile( &self, _credential: UserCredentials, - ) -> FutureResult { + ) -> Result { let try_get_client = self.server.try_get_client(); let cloned_user = self.user.clone(); - FutureResult::new(async move { - let expected_workspace_id = cloned_user.workspace_id()?; - let client = try_get_client?; - let profile = client.get_profile().await?; - let token = client.get_token()?; - let profile = user_profile_from_af_profile(token, profile)?; + let expected_workspace_id = cloned_user.workspace_id()?; + let client = try_get_client?; + let profile = client.get_profile().await?; + let token = client.get_token()?; + let profile = user_profile_from_af_profile(token, profile)?; - // Discard the response if the user has switched to a new workspace. This avoids updating the - // user profile with potentially outdated information when the workspace ID no longer matches. - check_request_workspace_id_is_match( - &expected_workspace_id, - &cloned_user, - "get user profile", - )?; - Ok(profile) - }) + // Discard the response if the user has switched to a new workspace. This avoids updating the + // user profile with potentially outdated information when the workspace ID no longer matches. + check_request_workspace_id_is_match(&expected_workspace_id, &cloned_user, "get user profile")?; + Ok(profile) } - fn open_workspace(&self, workspace_id: &str) -> FutureResult { + async fn open_workspace(&self, workspace_id: &str) -> Result { let try_get_client = self.server.try_get_client(); let workspace_id = workspace_id.to_string(); - FutureResult::new(async move { - let client = try_get_client?; - let af_workspace = client.open_workspace(&workspace_id).await?; - Ok(to_user_workspace(af_workspace)) - }) + let client = try_get_client?; + let af_workspace = client.open_workspace(&workspace_id).await?; + Ok(to_user_workspace(af_workspace)) } - fn get_all_workspace(&self, _uid: i64) -> FutureResult, FlowyError> { + async fn get_all_workspace(&self, _uid: i64) -> Result, FlowyError> { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let workspaces = try_get_client?.get_workspaces().await?; - to_user_workspaces(workspaces) - }) + let workspaces = try_get_client?.get_workspaces().await?; + to_user_workspaces(workspaces) } - fn invite_workspace_member( - &self, - invitee_email: String, - workspace_id: String, - role: Role, - ) -> FutureResult<(), FlowyError> { - let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - try_get_client? - .invite_workspace_members( - &workspace_id, - vec![WorkspaceMemberInvitation { - email: invitee_email, - role: to_af_role(role), - }], - ) - .await?; - Ok(()) - }) - } - - fn list_workspace_invitations( - &self, - filter: Option, - ) -> FutureResult, FlowyError> { - let try_get_client = self.server.try_get_client(); - let filter = filter.map(to_workspace_invitation_status); - - FutureResult::new(async move { - let r = try_get_client? - .list_workspace_invitations(filter) - .await? - .into_iter() - .map(to_workspace_invitation) - .collect(); - Ok(r) - }) - } - - fn accept_workspace_invitations(&self, invite_id: String) -> FutureResult<(), FlowyError> { - let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - try_get_client? - .accept_workspace_invitation(&invite_id) - .await?; - Ok(()) - }) - } - - fn remove_workspace_member( - &self, - user_email: String, - workspace_id: String, - ) -> FutureResult<(), FlowyError> { - let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - try_get_client? - .remove_workspace_members(workspace_id, vec![user_email]) - .await?; - Ok(()) - }) - } - - fn update_workspace_member( - &self, - user_email: String, - workspace_id: String, - role: Role, - ) -> FutureResult<(), FlowyError> { - let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let changeset = WorkspaceMemberChangeset::new(user_email).with_role(to_af_role(role)); - try_get_client? - .update_workspace_member(workspace_id, changeset) - .await?; - Ok(()) - }) - } - - fn get_workspace_members( - &self, - workspace_id: String, - ) -> FutureResult, FlowyError> { - let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let members = try_get_client? - .get_workspace_members(&workspace_id) - .await? - .into_iter() - .map(from_af_workspace_member) - .collect(); - Ok(members) - }) - } - - fn get_workspace_member( - &self, - workspace_id: String, - uid: i64, - ) -> FutureResult { - let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let query = QueryWorkspaceMember { - workspace_id: workspace_id.clone(), - uid, - }; - let member = client.get_workspace_member(query).await?; - Ok(from_af_workspace_member(member)) - }) - } - - #[instrument(level = "debug", skip_all)] - fn get_user_awareness_doc_state( - &self, - _uid: i64, - workspace_id: &str, - object_id: &str, - ) -> FutureResult, FlowyError> { - let workspace_id = workspace_id.to_string(); - let object_id = object_id.to_string(); - let try_get_client = self.server.try_get_client(); - let cloned_user = self.user.clone(); - FutureResult::new(async move { - let params = QueryCollabParams { - workspace_id: workspace_id.clone(), - inner: QueryCollab::new(object_id, CollabType::UserAwareness), - }; - let resp = try_get_client?.get_collab(params).await?; - check_request_workspace_id_is_match( - &workspace_id, - &cloned_user, - "get user awareness object", - )?; - Ok(resp.encode_collab.doc_state.to_vec()) - }) - } - - fn subscribe_user_update(&self) -> Option { - self.user_change_recv.write().take() - } - - fn reset_workspace(&self, _collab_object: CollabObject) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) - } - - fn create_collab_object( - &self, - collab_object: &CollabObject, - data: Vec, - ) -> FutureResult<(), FlowyError> { - let try_get_client = self.server.try_get_client(); - let collab_object = collab_object.clone(); - FutureResult::new(async move { - let client = try_get_client?; - let params = CreateCollabParams { - workspace_id: collab_object.workspace_id, - object_id: collab_object.object_id, - collab_type: collab_object.collab_type, - encoded_collab_v1: data, - }; - client.create_collab(params).await?; - Ok(()) - }) - } - - fn batch_create_collab_object( - &self, - workspace_id: &str, - objects: Vec, - ) -> FutureResult<(), FlowyError> { - let workspace_id = workspace_id.to_string(); - let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let params = objects - .into_iter() - .map(|object| { - CollabParams::new( - object.object_id, - u8::from(object.collab_type).into(), - object.encoded_collab, - ) - }) - .collect::>(); - try_get_client? - .create_collab_list(&workspace_id, params) - .await - .map_err(FlowyError::from)?; - Ok(()) - }) - } - - fn create_workspace(&self, workspace_name: &str) -> FutureResult { + async fn create_workspace(&self, workspace_name: &str) -> Result { let try_get_client = self.server.try_get_client(); let workspace_name_owned = workspace_name.to_owned(); - FutureResult::new(async move { - let client = try_get_client?; - let new_workspace = client - .create_workspace(CreateWorkspaceParam { - workspace_name: Some(workspace_name_owned), - }) - .await?; - Ok(to_user_workspace(new_workspace)) - }) + let client = try_get_client?; + let new_workspace = client + .create_workspace(CreateWorkspaceParam { + workspace_name: Some(workspace_name_owned), + }) + .await?; + Ok(to_user_workspace(new_workspace)) } - fn delete_workspace(&self, workspace_id: &str) -> FutureResult<(), FlowyError> { - let try_get_client = self.server.try_get_client(); - let workspace_id_owned = workspace_id.to_owned(); - FutureResult::new(async move { - let client = try_get_client?; - client.delete_workspace(&workspace_id_owned).await?; - Ok(()) - }) - } - - fn patch_workspace( + async fn patch_workspace( &self, workspace_id: &str, new_workspace_name: Option<&str>, new_workspace_icon: Option<&str>, - ) -> FutureResult<(), FlowyError> { + ) -> Result<(), FlowyError> { let try_get_client = self.server.try_get_client(); let owned_workspace_id = workspace_id.to_owned(); let owned_workspace_name = new_workspace_name.map(|s| s.to_owned()); let owned_workspace_icon = new_workspace_icon.map(|s| s.to_owned()); - FutureResult::new(async move { - let workspace_id: Uuid = owned_workspace_id - .parse() - .map_err(|_| ErrorCode::InvalidParams)?; - let client = try_get_client?; - client - .patch_workspace(PatchWorkspaceParam { - workspace_id, - workspace_name: owned_workspace_name, - workspace_icon: owned_workspace_icon, - }) - .await?; - Ok(()) - }) + let workspace_id: Uuid = owned_workspace_id + .parse() + .map_err(|_| ErrorCode::InvalidParams)?; + let client = try_get_client?; + client + .patch_workspace(PatchWorkspaceParam { + workspace_id, + workspace_name: owned_workspace_name, + workspace_icon: owned_workspace_icon, + }) + .await?; + Ok(()) } - fn leave_workspace(&self, workspace_id: &str) -> FutureResult<(), FlowyError> { + async fn delete_workspace(&self, workspace_id: &str) -> Result<(), FlowyError> { + let try_get_client = self.server.try_get_client(); + let workspace_id_owned = workspace_id.to_owned(); + let client = try_get_client?; + client.delete_workspace(&workspace_id_owned).await?; + Ok(()) + } + + async fn invite_workspace_member( + &self, + invitee_email: String, + workspace_id: String, + role: Role, + ) -> Result<(), FlowyError> { + let try_get_client = self.server.try_get_client(); + try_get_client? + .invite_workspace_members( + &workspace_id, + vec![WorkspaceMemberInvitation { + email: invitee_email, + role: to_af_role(role), + }], + ) + .await?; + Ok(()) + } + + async fn list_workspace_invitations( + &self, + filter: Option, + ) -> Result, FlowyError> { + let try_get_client = self.server.try_get_client(); + let filter = filter.map(to_workspace_invitation_status); + + let r = try_get_client? + .list_workspace_invitations(filter) + .await? + .into_iter() + .map(to_workspace_invitation) + .collect(); + Ok(r) + } + + async fn accept_workspace_invitations(&self, invite_id: String) -> Result<(), FlowyError> { + let try_get_client = self.server.try_get_client(); + try_get_client? + .accept_workspace_invitation(&invite_id) + .await?; + Ok(()) + } + + async fn remove_workspace_member( + &self, + user_email: String, + workspace_id: String, + ) -> Result<(), FlowyError> { + let try_get_client = self.server.try_get_client(); + try_get_client? + .remove_workspace_members(workspace_id, vec![user_email]) + .await?; + Ok(()) + } + + async fn update_workspace_member( + &self, + user_email: String, + workspace_id: String, + role: Role, + ) -> Result<(), FlowyError> { + let try_get_client = self.server.try_get_client(); + let changeset = WorkspaceMemberChangeset::new(user_email).with_role(to_af_role(role)); + try_get_client? + .update_workspace_member(workspace_id, changeset) + .await?; + Ok(()) + } + + async fn get_workspace_members( + &self, + workspace_id: String, + ) -> Result, FlowyError> { + let try_get_client = self.server.try_get_client(); + let members = try_get_client? + .get_workspace_members(&workspace_id) + .await? + .into_iter() + .map(from_af_workspace_member) + .collect(); + Ok(members) + } + + async fn get_workspace_member( + &self, + workspace_id: String, + uid: i64, + ) -> Result { + let try_get_client = self.server.try_get_client(); + let client = try_get_client?; + let query = QueryWorkspaceMember { + workspace_id: workspace_id.clone(), + uid, + }; + let member = client.get_workspace_member(query).await?; + Ok(from_af_workspace_member(member)) + } + + #[instrument(level = "debug", skip_all)] + async fn get_user_awareness_doc_state( + &self, + _uid: i64, + workspace_id: &str, + object_id: &str, + ) -> Result, FlowyError> { + let workspace_id = workspace_id.to_string(); + let object_id = object_id.to_string(); + let try_get_client = self.server.try_get_client(); + let cloned_user = self.user.clone(); + let params = QueryCollabParams { + workspace_id: workspace_id.clone(), + inner: QueryCollab::new(object_id, CollabType::UserAwareness), + }; + let resp = try_get_client?.get_collab(params).await?; + check_request_workspace_id_is_match(&workspace_id, &cloned_user, "get user awareness object")?; + Ok(resp.encode_collab.doc_state.to_vec()) + } + + fn subscribe_user_update(&self) -> Option { + let rx = self.user_change_recv.swap(None)?; + Arc::into_inner(rx) + } + + async fn reset_workspace(&self, _collab_object: CollabObject) -> Result<(), FlowyError> { + Ok(()) + } + + async fn create_collab_object( + &self, + collab_object: &CollabObject, + data: Vec, + ) -> Result<(), FlowyError> { + let try_get_client = self.server.try_get_client(); + let collab_object = collab_object.clone(); + let client = try_get_client?; + let params = CreateCollabParams { + workspace_id: collab_object.workspace_id, + object_id: collab_object.object_id, + collab_type: collab_object.collab_type, + encoded_collab_v1: data, + }; + client.create_collab(params).await?; + Ok(()) + } + + async fn batch_create_collab_object( + &self, + workspace_id: &str, + objects: Vec, + ) -> Result<(), FlowyError> { + let workspace_id = workspace_id.to_string(); + let try_get_client = self.server.try_get_client(); + let params = objects + .into_iter() + .map(|object| { + CollabParams::new( + object.object_id, + u8::from(object.collab_type).into(), + object.encoded_collab, + ) + }) + .collect::>(); + try_get_client? + .create_collab_list(&workspace_id, params) + .await + .map_err(FlowyError::from)?; + Ok(()) + } + + async fn leave_workspace(&self, workspace_id: &str) -> Result<(), FlowyError> { let try_get_client = self.server.try_get_client(); let workspace_id = workspace_id.to_string(); - FutureResult::new(async move { - let client = try_get_client?; - client.leave_workspace(&workspace_id).await?; - Ok(()) - }) + let client = try_get_client?; + client.leave_workspace(&workspace_id).await?; + Ok(()) } - fn subscribe_workspace( + async fn subscribe_workspace( &self, workspace_id: String, recurring_interval: RecurringInterval, subscription_plan: SubscriptionPlan, success_url: String, - ) -> FutureResult { + ) -> Result { let try_get_client = self.server.try_get_client(); let workspace_id = workspace_id.to_string(); - FutureResult::new(async move { - let client = try_get_client?; - let payment_link = client - .create_subscription( - &workspace_id, - recurring_interval, - subscription_plan, - &success_url, - ) - .await?; - Ok(payment_link) - }) + let client = try_get_client?; + let payment_link = client + .create_subscription( + &workspace_id, + recurring_interval, + subscription_plan, + &success_url, + ) + .await?; + Ok(payment_link) } - fn get_workspace_member_info( + async fn get_workspace_member_info( &self, workspace_id: &str, uid: i64, - ) -> FutureResult { + ) -> Result { let try_get_client = self.server.try_get_client(); let workspace_id = workspace_id.to_string(); - FutureResult::new(async move { - let client = try_get_client?; - let params = QueryWorkspaceMember { - workspace_id: workspace_id.to_string(), - uid, - }; - let member = client.get_workspace_member(params).await?; - let role = match member.role { - AFRole::Owner => Role::Owner, - AFRole::Member => Role::Member, - AFRole::Guest => Role::Guest, - }; - Ok(WorkspaceMember { - email: member.email, - role, - name: member.name, - avatar_url: member.avatar_url, - }) + let client = try_get_client?; + let params = QueryWorkspaceMember { + workspace_id: workspace_id.to_string(), + uid, + }; + let member = client.get_workspace_member(params).await?; + let role = match member.role { + AFRole::Owner => Role::Owner, + AFRole::Member => Role::Member, + AFRole::Guest => Role::Guest, + }; + Ok(WorkspaceMember { + email: member.email, + role, + name: member.name, + avatar_url: member.avatar_url, }) } - fn get_workspace_subscriptions( + async fn get_workspace_subscriptions( &self, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let workspace_subscriptions = client.list_subscription().await?; - Ok(workspace_subscriptions) - }) + let client = try_get_client?; + let workspace_subscriptions = client.list_subscription().await?; + Ok(workspace_subscriptions) } - fn get_workspace_subscription_one( + async fn get_workspace_subscription_one( &self, workspace_id: String, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let workspace_subscriptions = client.get_workspace_subscriptions(&workspace_id).await?; - Ok(workspace_subscriptions) - }) + let client = try_get_client?; + let workspace_subscriptions = client.get_workspace_subscriptions(&workspace_id).await?; + Ok(workspace_subscriptions) } - fn cancel_workspace_subscription( + async fn cancel_workspace_subscription( &self, workspace_id: String, plan: SubscriptionPlan, reason: Option, - ) -> FutureResult<(), FlowyError> { + ) -> Result<(), FlowyError> { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - client - .cancel_subscription(&SubscriptionCancelRequest { - workspace_id, - plan, - sync: true, - reason, - }) - .await?; - Ok(()) - }) + let client = try_get_client?; + client + .cancel_subscription(&SubscriptionCancelRequest { + workspace_id, + plan, + sync: true, + reason, + }) + .await?; + Ok(()) } - fn get_workspace_plan( + async fn get_workspace_plan( &self, workspace_id: String, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { let workspace_id = workspace_id.to_string(); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let plans = client - .get_active_workspace_subscriptions(&workspace_id) - .await?; - Ok(plans) - }) + let client = try_get_client?; + let plans = client + .get_active_workspace_subscriptions(&workspace_id) + .await?; + Ok(plans) } - fn get_workspace_usage( + async fn get_workspace_usage( &self, workspace_id: String, - ) -> FutureResult { + ) -> Result { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let usage = client.get_workspace_usage_and_limit(&workspace_id).await?; - Ok(usage) - }) + let client = try_get_client?; + let usage = client.get_workspace_usage_and_limit(&workspace_id).await?; + Ok(usage) } - fn get_billing_portal_url(&self) -> FutureResult { + async fn get_billing_portal_url(&self) -> Result { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let url = client.get_portal_session_link().await?; - Ok(url) - }) + let client = try_get_client?; + let url = client.get_portal_session_link().await?; + Ok(url) } - fn update_workspace_subscription_payment_period( + async fn update_workspace_subscription_payment_period( &self, workspace_id: String, plan: SubscriptionPlan, recurring_interval: RecurringInterval, - ) -> FutureResult<(), FlowyError> { + ) -> Result<(), FlowyError> { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - client - .set_subscription_recurring_interval(&SetSubscriptionRecurringInterval { - workspace_id, - plan, - recurring_interval, - }) - .await?; - Ok(()) - }) + let client = try_get_client?; + client + .set_subscription_recurring_interval(&SetSubscriptionRecurringInterval { + workspace_id, + plan, + recurring_interval, + }) + .await?; + Ok(()) } - fn get_subscription_plan_details(&self) -> FutureResult, FlowyError> { + async fn get_subscription_plan_details(&self) -> Result, FlowyError> { let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let plan_details = client.get_subscription_plan_details().await?; - Ok(plan_details) - }) + let client = try_get_client?; + let plan_details = client.get_subscription_plan_details().await?; + Ok(plan_details) } - fn get_workspace_setting( + async fn get_workspace_setting( &self, workspace_id: &str, - ) -> FutureResult { + ) -> Result { let workspace_id = workspace_id.to_string(); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let settings = client.get_workspace_settings(&workspace_id).await?; - Ok(settings) - }) + let client = try_get_client?; + let settings = client.get_workspace_settings(&workspace_id).await?; + Ok(settings) } - fn update_workspace_setting( + async fn update_workspace_setting( &self, workspace_id: &str, workspace_settings: AFWorkspaceSettingsChange, - ) -> FutureResult { + ) -> Result { let workspace_id = workspace_id.to_string(); let try_get_client = self.server.try_get_client(); - FutureResult::new(async move { - let client = try_get_client?; - let settings = client - .update_workspace_settings(&workspace_id, &workspace_settings) - .await?; - Ok(settings) - }) + let client = try_get_client?; + let settings = client + .update_workspace_settings(&workspace_id, &workspace_settings) + .await?; + Ok(settings) } } diff --git a/frontend/rust-lib/flowy-server/src/lib.rs b/frontend/rust-lib/flowy-server/src/lib.rs index 704e9e0e49..33f4b0c0d8 100644 --- a/frontend/rust-lib/flowy-server/src/lib.rs +++ b/frontend/rust-lib/flowy-server/src/lib.rs @@ -5,8 +5,5 @@ pub mod local_server; mod response; mod server; -#[cfg(feature = "enable_supabase")] -pub mod supabase; - mod default_impl; pub mod util; diff --git a/frontend/rust-lib/flowy-server/src/local_server/impls/database.rs b/frontend/rust-lib/flowy-server/src/local_server/impls/database.rs index 7195430a8f..6d2ad4deab 100644 --- a/frontend/rust-lib/flowy-server/src/local_server/impls/database.rs +++ b/frontend/rust-lib/flowy-server/src/local_server/impls/database.rs @@ -1,79 +1,64 @@ use anyhow::Error; +use collab::core::transaction::DocTransactionExtension; +use collab::entity::EncodedCollab; use collab::preclude::Collab; use collab_entity::define::{DATABASE, DATABASE_ROW_DATA, WORKSPACE_DATABASES}; use collab_entity::CollabType; -use yrs::MapPrelim; +use yrs::{ArrayPrelim, Map, MapPrelim}; -use flowy_database_pub::cloud::{CollabDocStateByOid, DatabaseCloudService, DatabaseSnapshot}; +use flowy_database_pub::cloud::{DatabaseCloudService, DatabaseSnapshot, EncodeCollabByOid}; use lib_infra::async_trait::async_trait; -use lib_infra::future::FutureResult; pub(crate) struct LocalServerDatabaseCloudServiceImpl(); #[async_trait] impl DatabaseCloudService for LocalServerDatabaseCloudServiceImpl { - fn get_database_object_doc_state( + async fn get_database_encode_collab( &self, object_id: &str, collab_type: CollabType, _workspace_id: &str, - ) -> FutureResult>, Error> { + ) -> Result, Error> { let object_id = object_id.to_string(); // create the minimal required data for the given collab type - FutureResult::new(async move { - let data = match collab_type { - CollabType::Database => { - let collab = Collab::new(1, object_id, collab_type, vec![], false); - collab.with_origin_transact_mut(|txn| { - collab.insert_map_with_txn(txn, DATABASE); - }); - collab - .encode_collab_v1(|_| Ok::<(), Error>(()))? - .doc_state - .to_vec() - }, - CollabType::WorkspaceDatabase => { - let collab = Collab::new(1, object_id, collab_type, vec![], false); - collab.with_origin_transact_mut(|txn| { - collab.create_array_with_txn::(txn, WORKSPACE_DATABASES, vec![]); - }); - collab - .encode_collab_v1(|_| Ok::<(), Error>(()))? - .doc_state - .to_vec() - }, - CollabType::DatabaseRow => { - let collab = Collab::new(1, object_id, collab_type, vec![], false); - collab.with_origin_transact_mut(|txn| { - collab.insert_map_with_txn(txn, DATABASE_ROW_DATA); - }); - collab - .encode_collab_v1(|_| Ok::<(), Error>(()))? - .doc_state - .to_vec() - }, - _ => vec![], - }; - Ok(Some(data)) - }) + let mut collab = Collab::new(1, object_id, collab_type.clone(), vec![], false); + let mut txn = collab.context.transact_mut(); + match collab_type { + CollabType::Database => { + collab.data.insert(&mut txn, DATABASE, MapPrelim::default()); + }, + CollabType::WorkspaceDatabase => { + collab + .data + .insert(&mut txn, WORKSPACE_DATABASES, ArrayPrelim::default()); + }, + CollabType::DatabaseRow => { + collab + .data + .insert(&mut txn, DATABASE_ROW_DATA, MapPrelim::default()); + }, + _ => { /* do nothing */ }, + }; + + Ok(Some(txn.get_encoded_collab_v1())) } - fn batch_get_database_object_doc_state( + async fn batch_get_database_encode_collab( &self, _object_ids: Vec, _object_ty: CollabType, _workspace_id: &str, - ) -> FutureResult { - FutureResult::new(async move { Ok(CollabDocStateByOid::default()) }) + ) -> Result { + Ok(EncodeCollabByOid::default()) } - fn get_database_collab_object_snapshots( + async fn get_database_collab_object_snapshots( &self, _object_id: &str, _limit: usize, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(vec![]) }) + ) -> Result, Error> { + Ok(vec![]) } } diff --git a/frontend/rust-lib/flowy-server/src/local_server/impls/document.rs b/frontend/rust-lib/flowy-server/src/local_server/impls/document.rs index bc712d03d0..2a69a361f8 100644 --- a/frontend/rust-lib/flowy-server/src/local_server/impls/document.rs +++ b/frontend/rust-lib/flowy-server/src/local_server/impls/document.rs @@ -2,39 +2,39 @@ use anyhow::Error; use flowy_document_pub::cloud::*; use flowy_error::{ErrorCode, FlowyError}; -use lib_infra::future::FutureResult; +use lib_infra::async_trait::async_trait; pub(crate) struct LocalServerDocumentCloudServiceImpl(); +#[async_trait] impl DocumentCloudService for LocalServerDocumentCloudServiceImpl { - fn get_document_doc_state( + async fn get_document_doc_state( &self, document_id: &str, _workspace_id: &str, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { let document_id = document_id.to_string(); - FutureResult::new(async move { - Err(FlowyError::new( - ErrorCode::RecordNotFound, - format!("Document {} not found", document_id), - )) - }) + + Err(FlowyError::new( + ErrorCode::RecordNotFound, + format!("Document {} not found", document_id), + )) } - fn get_document_snapshots( + async fn get_document_snapshots( &self, _document_id: &str, _limit: usize, _workspace_id: &str, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(vec![]) }) + ) -> Result, Error> { + Ok(vec![]) } - fn get_document_data( + async fn get_document_data( &self, _document_id: &str, _workspace_id: &str, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(None) }) + ) -> Result, Error> { + Ok(None) } } diff --git a/frontend/rust-lib/flowy-server/src/local_server/impls/folder.rs b/frontend/rust-lib/flowy-server/src/local_server/impls/folder.rs index 3451212f6f..9c2802b46a 100644 --- a/frontend/rust-lib/flowy-server/src/local_server/impls/folder.rs +++ b/frontend/rust-lib/flowy-server/src/local_server/impls/folder.rs @@ -3,123 +3,113 @@ use std::sync::Arc; use anyhow::{anyhow, Error}; use collab_entity::CollabType; +use crate::local_server::LocalServerDB; use flowy_folder_pub::cloud::{ gen_workspace_id, FolderCloudService, FolderCollabParams, FolderData, FolderSnapshot, Workspace, WorkspaceRecord, }; use flowy_folder_pub::entities::{PublishInfoResponse, PublishPayload}; -use lib_infra::future::FutureResult; - -use crate::local_server::LocalServerDB; +use lib_infra::async_trait::async_trait; pub(crate) struct LocalServerFolderCloudServiceImpl { #[allow(dead_code)] pub db: Arc, } +#[async_trait] impl FolderCloudService for LocalServerFolderCloudServiceImpl { - fn create_workspace(&self, uid: i64, name: &str) -> FutureResult { + async fn create_workspace(&self, uid: i64, name: &str) -> Result { let name = name.to_string(); - FutureResult::new(async move { - Ok(Workspace::new( - gen_workspace_id().to_string(), - name.to_string(), - uid, - )) - }) + Ok(Workspace::new( + gen_workspace_id().to_string(), + name.to_string(), + uid, + )) } - fn open_workspace(&self, _workspace_id: &str) -> FutureResult<(), Error> { - FutureResult::new(async { Ok(()) }) + async fn open_workspace(&self, _workspace_id: &str) -> Result<(), Error> { + Ok(()) } - fn get_all_workspace(&self) -> FutureResult, Error> { - FutureResult::new(async { Ok(vec![]) }) + async fn get_all_workspace(&self) -> Result, Error> { + Ok(vec![]) } - fn get_folder_data( + async fn get_folder_data( &self, _workspace_id: &str, _uid: &i64, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(None) }) + ) -> Result, Error> { + Ok(None) } - fn get_folder_snapshots( + async fn get_folder_snapshots( &self, _workspace_id: &str, _limit: usize, - ) -> FutureResult, Error> { - FutureResult::new(async move { Ok(vec![]) }) + ) -> Result, Error> { + Ok(vec![]) } - fn get_folder_doc_state( + async fn get_folder_doc_state( &self, _workspace_id: &str, _uid: i64, _collab_type: CollabType, _object_id: &str, - ) -> FutureResult, Error> { - FutureResult::new(async { - Err(anyhow!( - "Local server doesn't support get collab doc state from remote" - )) - }) + ) -> Result, Error> { + Err(anyhow!( + "Local server doesn't support get collab doc state from remote" + )) } - fn batch_create_folder_collab_objects( + async fn batch_create_folder_collab_objects( &self, _workspace_id: &str, _objects: Vec, - ) -> FutureResult<(), Error> { - FutureResult::new(async { Err(anyhow!("Local server doesn't support create collab")) }) + ) -> Result<(), Error> { + Err(anyhow!("Local server doesn't support create collab")) } fn service_name(&self) -> String { "Local".to_string() } - fn publish_view( + async fn publish_view( &self, _workspace_id: &str, _payload: Vec, - ) -> FutureResult<(), Error> { - FutureResult::new(async { Err(anyhow!("Local server doesn't support publish view")) }) + ) -> Result<(), Error> { + Err(anyhow!("Local server doesn't support publish view")) } - fn unpublish_views( + async fn unpublish_views( &self, _workspace_id: &str, _view_ids: Vec, - ) -> FutureResult<(), Error> { - FutureResult::new(async { Err(anyhow!("Local server doesn't support unpublish views")) }) + ) -> Result<(), Error> { + Err(anyhow!("Local server doesn't support unpublish views")) } - fn get_publish_info(&self, _view_id: &str) -> FutureResult { - FutureResult::new(async move { - Err(anyhow!( - "Local server doesn't support get publish info from remote" - )) - }) + async fn get_publish_info(&self, _view_id: &str) -> Result { + Err(anyhow!( + "Local server doesn't support get publish info from remote" + )) } - fn set_publish_namespace( + async fn set_publish_namespace( &self, _workspace_id: &str, _new_namespace: &str, - ) -> FutureResult<(), Error> { - FutureResult::new(async { - Err(anyhow!( - "Local server doesn't support set publish namespace" - )) - }) + ) -> Result<(), Error> { + Err(anyhow!( + "Local server doesn't support set publish namespace" + )) } - fn get_publish_namespace(&self, _workspace_id: &str) -> FutureResult { - FutureResult::new(async { - Err(anyhow!( - "Local server doesn't support get publish namespace" - )) - }) + async fn get_publish_namespace(&self, _workspace_id: &str) -> Result { + Err(anyhow!( + "Local server doesn't support get publish namespace" + )) } } diff --git a/frontend/rust-lib/flowy-server/src/local_server/impls/user.rs b/frontend/rust-lib/flowy-server/src/local_server/impls/user.rs index d5fa1524b6..092eb946ef 100644 --- a/frontend/rust-lib/flowy-server/src/local_server/impls/user.rs +++ b/frontend/rust-lib/flowy-server/src/local_server/impls/user.rs @@ -2,21 +2,22 @@ use std::sync::Arc; use collab_entity::CollabObject; use lazy_static::lazy_static; -use parking_lot::Mutex; +use tokio::sync::Mutex; use uuid::Uuid; use flowy_error::FlowyError; use flowy_user_pub::cloud::{UserCloudService, UserCollabParams}; use flowy_user_pub::entities::*; use flowy_user_pub::DEFAULT_USER_NAME; +use lib_infra::async_trait::async_trait; use lib_infra::box_any::BoxAny; -use lib_infra::future::FutureResult; use lib_infra::util::timestamp; use crate::local_server::uid::UserIDGenerator; use crate::local_server::LocalServerDB; lazy_static! { + //FIXME: seriously, userID generation should work using lock-free algorithm static ref ID_GEN: Mutex = Mutex::new(UserIDGenerator::new(1)); } @@ -25,114 +26,101 @@ pub(crate) struct LocalServerUserAuthServiceImpl { pub db: Arc, } +#[async_trait] impl UserCloudService for LocalServerUserAuthServiceImpl { - fn sign_up(&self, params: BoxAny) -> FutureResult { - FutureResult::new(async move { - let params = params.unbox_or_error::()?; - let uid = ID_GEN.lock().next_id(); - let workspace_id = uuid::Uuid::new_v4().to_string(); - let user_workspace = UserWorkspace::new(&workspace_id, uid); - let user_name = if params.name.is_empty() { - DEFAULT_USER_NAME() - } else { - params.name.clone() - }; - Ok(AuthResponse { - user_id: uid, - user_uuid: Uuid::new_v4(), - name: user_name, - latest_workspace: user_workspace.clone(), - user_workspaces: vec![user_workspace], - is_new_user: true, - email: Some(params.email), - token: None, - encryption_type: EncryptionType::NoEncryption, - updated_at: timestamp(), - metadata: None, - }) + async fn sign_up(&self, params: BoxAny) -> Result { + let params = params.unbox_or_error::()?; + let uid = ID_GEN.lock().await.next_id(); + let workspace_id = uuid::Uuid::new_v4().to_string(); + let user_workspace = UserWorkspace::new(&workspace_id, uid); + let user_name = if params.name.is_empty() { + DEFAULT_USER_NAME() + } else { + params.name.clone() + }; + Ok(AuthResponse { + user_id: uid, + user_uuid: Uuid::new_v4(), + name: user_name, + latest_workspace: user_workspace.clone(), + user_workspaces: vec![user_workspace], + is_new_user: true, + email: Some(params.email), + token: None, + encryption_type: EncryptionType::NoEncryption, + updated_at: timestamp(), + metadata: None, }) } - fn sign_in(&self, params: BoxAny) -> FutureResult { + async fn sign_in(&self, params: BoxAny) -> Result { let db = self.db.clone(); - FutureResult::new(async move { - let params: SignInParams = params.unbox_or_error::()?; - let uid = ID_GEN.lock().next_id(); + let params: SignInParams = params.unbox_or_error::()?; + let uid = ID_GEN.lock().await.next_id(); - let user_workspace = db - .get_user_workspace(uid)? - .unwrap_or_else(make_user_workspace); - Ok(AuthResponse { - user_id: uid, - user_uuid: Uuid::new_v4(), - name: params.name, - latest_workspace: user_workspace.clone(), - user_workspaces: vec![user_workspace], - is_new_user: false, - email: Some(params.email), - token: None, - encryption_type: EncryptionType::NoEncryption, - updated_at: timestamp(), - metadata: None, - }) + let user_workspace = db + .get_user_workspace(uid)? + .unwrap_or_else(make_user_workspace); + Ok(AuthResponse { + user_id: uid, + user_uuid: Uuid::new_v4(), + name: params.name, + latest_workspace: user_workspace.clone(), + user_workspaces: vec![user_workspace], + is_new_user: false, + email: Some(params.email), + token: None, + encryption_type: EncryptionType::NoEncryption, + updated_at: timestamp(), + metadata: None, }) } - fn sign_out(&self, _token: Option) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + async fn sign_out(&self, _token: Option) -> Result<(), FlowyError> { + Ok(()) } - fn generate_sign_in_url_with_email(&self, _email: &str) -> FutureResult { - FutureResult::new(async { - Err( - FlowyError::local_version_not_support() - .with_context("Not support generate sign in url with email"), - ) - }) + async fn generate_sign_in_url_with_email(&self, _email: &str) -> Result { + Err( + FlowyError::local_version_not_support() + .with_context("Not support generate sign in url with email"), + ) } - fn create_user(&self, _email: &str, _password: &str) -> FutureResult<(), FlowyError> { - FutureResult::new(async { - Err(FlowyError::local_version_not_support().with_context("Not support create user")) - }) + async fn create_user(&self, _email: &str, _password: &str) -> Result<(), FlowyError> { + Err(FlowyError::local_version_not_support().with_context("Not support create user")) } - fn sign_in_with_password( + async fn sign_in_with_password( &self, _email: &str, _password: &str, - ) -> FutureResult { - FutureResult::new(async { - Err(FlowyError::local_version_not_support().with_context("Not support")) - }) + ) -> Result { + Err(FlowyError::local_version_not_support().with_context("Not support")) } - fn sign_in_with_magic_link( + async fn sign_in_with_magic_link( &self, _email: &str, _redirect_to: &str, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { - Err(FlowyError::local_version_not_support().with_context("Not support")) - }) + ) -> Result<(), FlowyError> { + Err(FlowyError::local_version_not_support().with_context("Not support")) } - fn generate_oauth_url_with_provider(&self, _provider: &str) -> FutureResult { - FutureResult::new(async { - Err(FlowyError::internal().with_context("Can't oauth url when using offline mode")) - }) + async fn generate_oauth_url_with_provider(&self, _provider: &str) -> Result { + Err(FlowyError::internal().with_context("Can't oauth url when using offline mode")) } - fn update_user( + async fn update_user( &self, _credential: UserCredentials, _params: UpdateUserProfileParams, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + ) -> Result<(), FlowyError> { + Ok(()) } - fn get_user_profile(&self, credential: UserCredentials) -> FutureResult { - let result = match credential.uid { + async fn get_user_profile(&self, credential: UserCredentials) -> Result { + match credential.uid { None => Err(FlowyError::record_not_found()), Some(uid) => { self.db.get_user_profile(uid).map(|mut profile| { @@ -141,88 +129,77 @@ impl UserCloudService for LocalServerUserAuthServiceImpl { profile }) }, - }; - FutureResult::new(async { result }) + } } - fn open_workspace(&self, _workspace_id: &str) -> FutureResult { - FutureResult::new(async { - Err( - FlowyError::local_version_not_support() - .with_context("local server doesn't support open workspace"), - ) - }) + async fn open_workspace(&self, _workspace_id: &str) -> Result { + Err( + FlowyError::local_version_not_support() + .with_context("local server doesn't support open workspace"), + ) } - fn get_all_workspace(&self, _uid: i64) -> FutureResult, FlowyError> { - FutureResult::new(async { Ok(vec![]) }) + async fn get_all_workspace(&self, _uid: i64) -> Result, FlowyError> { + Ok(vec![]) } - fn get_user_awareness_doc_state( + async fn get_user_awareness_doc_state( &self, _uid: i64, _workspace_id: &str, _object_id: &str, - ) -> FutureResult, FlowyError> { + ) -> Result, FlowyError> { // must return record not found error - FutureResult::new(async { Err(FlowyError::record_not_found()) }) + Err(FlowyError::record_not_found()) } - fn reset_workspace(&self, _collab_object: CollabObject) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + async fn reset_workspace(&self, _collab_object: CollabObject) -> Result<(), FlowyError> { + Ok(()) } - fn create_collab_object( + async fn create_collab_object( &self, _collab_object: &CollabObject, _data: Vec, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + ) -> Result<(), FlowyError> { + Ok(()) } - fn batch_create_collab_object( + async fn batch_create_collab_object( &self, _workspace_id: &str, _objects: Vec, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { - Err( - FlowyError::local_version_not_support() - .with_context("local server doesn't support batch create collab object"), - ) - }) + ) -> Result<(), FlowyError> { + Err( + FlowyError::local_version_not_support() + .with_context("local server doesn't support batch create collab object"), + ) } - fn create_workspace(&self, _workspace_name: &str) -> FutureResult { - FutureResult::new(async { - Err( - FlowyError::local_version_not_support() - .with_context("local server doesn't support multiple workspaces"), - ) - }) + async fn create_workspace(&self, _workspace_name: &str) -> Result { + Err( + FlowyError::local_version_not_support() + .with_context("local server doesn't support multiple workspaces"), + ) } - fn delete_workspace(&self, _workspace_id: &str) -> FutureResult<(), FlowyError> { - FutureResult::new(async { - Err( - FlowyError::local_version_not_support() - .with_context("local server doesn't support multiple workspaces"), - ) - }) + async fn delete_workspace(&self, _workspace_id: &str) -> Result<(), FlowyError> { + Err( + FlowyError::local_version_not_support() + .with_context("local server doesn't support multiple workspaces"), + ) } - fn patch_workspace( + async fn patch_workspace( &self, _workspace_id: &str, _new_workspace_name: Option<&str>, _new_workspace_icon: Option<&str>, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { - Err( - FlowyError::local_version_not_support() - .with_context("local server doesn't support multiple workspaces"), - ) - }) + ) -> Result<(), FlowyError> { + Err( + FlowyError::local_version_not_support() + .with_context("local server doesn't support multiple workspaces"), + ) } } diff --git a/frontend/rust-lib/flowy-server/src/local_server/server.rs b/frontend/rust-lib/flowy-server/src/local_server/server.rs index e0ab174f75..cb8b545c53 100644 --- a/frontend/rust-lib/flowy-server/src/local_server/server.rs +++ b/frontend/rust-lib/flowy-server/src/local_server/server.rs @@ -1,7 +1,6 @@ use flowy_search_pub::cloud::SearchCloudService; use std::sync::Arc; -use parking_lot::RwLock; use tokio::sync::mpsc; use flowy_database_pub::cloud::{DatabaseAIService, DatabaseCloudService}; @@ -28,7 +27,7 @@ pub trait LocalServerDB: Send + Sync + 'static { pub struct LocalServer { local_db: Arc, - stop_tx: RwLock>>, + stop_tx: Option>, } impl LocalServer { @@ -40,7 +39,7 @@ impl LocalServer { } pub async fn stop(&self) { - let sender = self.stop_tx.read().clone(); + let sender = self.stop_tx.clone(); if let Some(stop_tx) = sender { let _ = stop_tx.send(()).await; } diff --git a/frontend/rust-lib/flowy-server/src/server.rs b/frontend/rust-lib/flowy-server/src/server.rs index 2c4ee66b03..ee07eefa5a 100644 --- a/frontend/rust-lib/flowy-server/src/server.rs +++ b/frontend/rust-lib/flowy-server/src/server.rs @@ -5,9 +5,9 @@ use flowy_search_pub::cloud::SearchCloudService; use std::sync::Arc; use anyhow::Error; +use arc_swap::ArcSwapOption; use client_api::collab_sync::ServerCollabMessage; use flowy_ai_pub::cloud::ChatCloudService; -use parking_lot::RwLock; use tokio_stream::wrappers::WatchStream; #[cfg(feature = "enable_supabase")] use {collab_entity::CollabObject, collab_plugins::cloud_storage::RemoteCollabStorage}; @@ -154,23 +154,23 @@ pub trait AppFlowyServer: Send + Sync + 'static { } pub struct EncryptionImpl { - secret: RwLock>, + secret: ArcSwapOption, } impl EncryptionImpl { pub fn new(secret: Option) -> Self { Self { - secret: RwLock::new(secret), + secret: ArcSwapOption::from(secret.map(Arc::new)), } } } impl AppFlowyEncryption for EncryptionImpl { fn get_secret(&self) -> Option { - self.secret.read().clone() + self.secret.load().as_ref().map(|s| s.to_string()) } fn set_secret(&self, secret: String) { - *self.secret.write() = Some(secret); + self.secret.store(Some(secret.into())); } } diff --git a/frontend/rust-lib/flowy-server/src/supabase/api/collab_storage.rs b/frontend/rust-lib/flowy-server/src/supabase/api/collab_storage.rs index c442e686ea..bb5705cbc8 100644 --- a/frontend/rust-lib/flowy-server/src/supabase/api/collab_storage.rs +++ b/frontend/rust-lib/flowy-server/src/supabase/api/collab_storage.rs @@ -2,6 +2,7 @@ use std::str::FromStr; use std::sync::{Arc, Weak}; use anyhow::Error; +use arc_swap::ArcSwapOption; use chrono::{DateTime, Utc}; use client_api::collab_sync::MsgId; use collab::core::collab::DataSource; @@ -10,7 +11,6 @@ use collab_entity::CollabObject; use collab_plugins::cloud_storage::{ RemoteCollabSnapshot, RemoteCollabState, RemoteCollabStorage, RemoteUpdateReceiver, }; -use parking_lot::Mutex; use tokio::task::spawn_blocking; use lib_infra::async_trait::async_trait; @@ -28,7 +28,7 @@ use crate::AppFlowyEncryption; pub struct SupabaseCollabStorageImpl { server: T, - rx: Mutex>, + rx: ArcSwapOption, encryption: Weak, } @@ -40,7 +40,7 @@ impl SupabaseCollabStorageImpl { ) -> Self { Self { server, - rx: Mutex::new(rx), + rx: ArcSwapOption::new(rx.map(Arc::new)), encryption, } } @@ -186,11 +186,14 @@ where } fn subscribe_remote_updates(&self, _object: &CollabObject) -> Option { - let rx = self.rx.lock().take(); - if rx.is_none() { - tracing::warn!("The receiver is already taken"); + let rx = self.rx.swap(None); + match rx { + Some(rx) => Arc::into_inner(rx), + None => { + tracing::warn!("The receiver is already taken"); + None + }, } - rx } } diff --git a/frontend/rust-lib/flowy-server/src/supabase/api/document.rs b/frontend/rust-lib/flowy-server/src/supabase/api/document.rs index a0e5087938..66edb7ed7c 100644 --- a/frontend/rust-lib/flowy-server/src/supabase/api/document.rs +++ b/frontend/rust-lib/flowy-server/src/supabase/api/document.rs @@ -94,7 +94,7 @@ where let action = FetchObjectUpdateAction::new(document_id.clone(), CollabType::Document, postgrest); let doc_state = action.run_with_fix_interval(5, 10).await?; - let document = Document::from_doc_state( + let document = Document::open_with_options( CollabOrigin::Empty, DataSource::DocStateV1(doc_state), &document_id, diff --git a/frontend/rust-lib/flowy-server/src/supabase/api/postgres_server.rs b/frontend/rust-lib/flowy-server/src/supabase/api/postgres_server.rs index 8db0910896..9ab3379486 100644 --- a/frontend/rust-lib/flowy-server/src/supabase/api/postgres_server.rs +++ b/frontend/rust-lib/flowy-server/src/supabase/api/postgres_server.rs @@ -2,7 +2,7 @@ use std::ops::Deref; use std::sync::{Arc, Weak}; use anyhow::Error; -use parking_lot::RwLock; +use arc_swap::ArcSwapOption; use postgrest::Postgrest; use flowy_error::{ErrorCode, FlowyError}; @@ -77,11 +77,11 @@ where } #[derive(Clone)] -pub struct SupabaseServerServiceImpl(pub Arc>>>); +pub struct SupabaseServerServiceImpl(pub Arc>); impl SupabaseServerServiceImpl { pub fn new(postgrest: Arc) -> Self { - Self(Arc::new(RwLock::new(Some(postgrest)))) + Self(Arc::new(ArcSwapOption::from(Some(postgrest)))) } } @@ -89,7 +89,7 @@ impl SupabaseServerService for SupabaseServerServiceImpl { fn get_postgrest(&self) -> Option> { self .0 - .read() + .load() .as_ref() .map(|server| server.postgrest.clone()) } @@ -97,7 +97,7 @@ impl SupabaseServerService for SupabaseServerServiceImpl { fn try_get_postgrest(&self) -> Result, Error> { self .0 - .read() + .load() .as_ref() .map(|server| server.postgrest.clone()) .ok_or_else(|| { diff --git a/frontend/rust-lib/flowy-server/src/supabase/api/user.rs b/frontend/rust-lib/flowy-server/src/supabase/api/user.rs index b537a5689a..b8c55cf535 100644 --- a/frontend/rust-lib/flowy-server/src/supabase/api/user.rs +++ b/frontend/rust-lib/flowy-server/src/supabase/api/user.rs @@ -6,11 +6,10 @@ use std::sync::{Arc, Weak}; use std::time::Duration; use anyhow::Error; -use collab::core::collab::MutexCollab; +use arc_swap::ArcSwapOption; use collab::core::origin::CollabOrigin; use collab::preclude::Collab; use collab_entity::{CollabObject, CollabType}; -use parking_lot::RwLock; use serde_json::Value; use tokio::sync::oneshot::channel; use tokio_retry::strategy::FixedInterval; @@ -44,7 +43,7 @@ use crate::AppFlowyEncryption; pub struct SupabaseUserServiceImpl { server: T, realtime_event_handlers: Vec>, - user_update_rx: RwLock>, + user_update_rx: ArcSwapOption, } impl SupabaseUserServiceImpl { @@ -56,7 +55,7 @@ impl SupabaseUserServiceImpl { Self { server, realtime_event_handlers, - user_update_rx: RwLock::new(user_update_rx), + user_update_rx: ArcSwapOption::from(user_update_rx.map(Arc::new)), } } } @@ -306,7 +305,8 @@ where } fn subscribe_user_update(&self) -> Option { - self.user_update_rx.write().take() + let rx = self.user_update_rx.swap(None)?; + Arc::into_inner(rx) } fn reset_workspace(&self, collab_object: CollabObject) -> FutureResult<(), FlowyError> { @@ -647,7 +647,7 @@ impl RealtimeEventHandler for RealtimeCollabUpdateHandler { serde_json::from_value::(event.new.clone()) { if let Some(sender_by_oid) = self.sender_by_oid.upgrade() { - if let Some(sender) = sender_by_oid.read().get(collab_update.oid.as_str()) { + if let Some(sender) = sender_by_oid.get(collab_update.oid.as_str()) { tracing::trace!( "current device: {}, event device: {}", self.device_id, @@ -688,15 +688,16 @@ impl RealtimeEventHandler for RealtimeCollabUpdateHandler { fn default_workspace_doc_state(collab_object: &CollabObject) -> Vec { let workspace_id = collab_object.object_id.clone(); - let collab = Arc::new(MutexCollab::new(Collab::new_with_origin( - CollabOrigin::Empty, - &collab_object.object_id, - vec![], - false, - ))); + let collab = + Collab::new_with_origin(CollabOrigin::Empty, &collab_object.object_id, vec![], false); let workspace = Workspace::new(workspace_id, "My workspace".to_string(), collab_object.uid); - let folder = Folder::create(collab_object.uid, collab, None, FolderData::new(workspace)); - folder.encode_collab_v1().unwrap().doc_state.to_vec() + let folder = Folder::open_with( + collab_object.uid, + collab, + None, + Some(FolderData::new(workspace)), + ); + folder.encode_collab().unwrap().doc_state.to_vec() } fn oauth_params_from_box_any(any: BoxAny) -> Result { diff --git a/frontend/rust-lib/flowy-server/src/supabase/file_storage/plan.rs b/frontend/rust-lib/flowy-server/src/supabase/file_storage/plan.rs new file mode 100644 index 0000000000..39a33c8853 --- /dev/null +++ b/frontend/rust-lib/flowy-server/src/supabase/file_storage/plan.rs @@ -0,0 +1,37 @@ +use std::sync::Weak; + +use flowy_error::FlowyError; +use flowy_storage_pub::cloud::{FileStoragePlan, StorageObject}; +use lib_infra::future::FutureResult; + +use crate::supabase::api::RESTfulPostgresServer; + +#[derive(Default)] +pub struct FileStoragePlanImpl { + #[allow(dead_code)] + uid: Weak>, + #[allow(dead_code)] + postgrest: Option>, +} + +impl FileStoragePlanImpl { + pub fn new(uid: Weak>, postgrest: Option>) -> Self { + Self { uid, postgrest } + } +} + +impl FileStoragePlan for FileStoragePlanImpl { + fn storage_size(&self) -> FutureResult { + // 1 GB + FutureResult::new(async { Ok(1024 * 1024 * 1024) }) + } + + fn maximum_file_size(&self) -> FutureResult { + // 5 MB + FutureResult::new(async { Ok(5 * 1024 * 1024) }) + } + + fn check_upload_object(&self, _object: &StorageObject) -> FutureResult<(), FlowyError> { + FutureResult::new(async { Ok(()) }) + } +} diff --git a/frontend/rust-lib/flowy-server/src/supabase/server.rs b/frontend/rust-lib/flowy-server/src/supabase/server.rs index b02d7a9030..00dd46e8ba 100644 --- a/frontend/rust-lib/flowy-server/src/supabase/server.rs +++ b/frontend/rust-lib/flowy-server/src/supabase/server.rs @@ -1,10 +1,10 @@ +use arc_swap::ArcSwapOption; use flowy_search_pub::cloud::SearchCloudService; -use std::collections::HashMap; use std::sync::{Arc, Weak}; use collab_entity::CollabObject; use collab_plugins::cloud_storage::{RemoteCollabStorage, RemoteUpdateSender}; -use parking_lot::RwLock; +use dashmap::DashMap; use flowy_database_pub::cloud::{DatabaseAIService, DatabaseCloudService}; use flowy_document_pub::cloud::DocumentCloudService; @@ -55,7 +55,7 @@ impl PgPoolMode { } } -pub type CollabUpdateSenderByOid = RwLock>; +pub type CollabUpdateSenderByOid = DashMap; /// Supabase server is used to provide the implementation of the [AppFlowyServer] trait. /// It contains the configuration of the supabase server and the postgres server. pub struct SupabaseServer { @@ -63,15 +63,15 @@ pub struct SupabaseServer { config: SupabaseConfiguration, device_id: String, #[allow(dead_code)] - uid: Arc>>, + uid: Arc>, collab_update_sender: Arc, - restful_postgres: Arc>>>, + restful_postgres: Arc>, encryption: Weak, } impl SupabaseServer { pub fn new( - uid: Arc>>, + uid: Arc>, config: SupabaseConfiguration, enable_sync: bool, device_id: String, @@ -90,7 +90,7 @@ impl SupabaseServer { config, device_id, collab_update_sender, - restful_postgres: Arc::new(RwLock::new(restful_postgres)), + restful_postgres: Arc::new(ArcSwapOption::from(restful_postgres)), encryption, uid, } @@ -102,12 +102,18 @@ impl AppFlowyServer for SupabaseServer { tracing::info!("{} supabase sync: {}", uid, enable); if enable { - if self.restful_postgres.read().is_none() { - let postgres = RESTfulPostgresServer::new(self.config.clone(), self.encryption.clone()); - *self.restful_postgres.write() = Some(Arc::new(postgres)); - } + self.restful_postgres.rcu(|old| match old { + Some(existing) => Some(existing.clone()), + None => { + let postgres = Arc::new(RESTfulPostgresServer::new( + self.config.clone(), + self.encryption.clone(), + )); + Some(postgres) + }, + }); } else { - *self.restful_postgres.write() = None; + self.restful_postgres.store(None); } } @@ -158,7 +164,6 @@ impl AppFlowyServer for SupabaseServer { let (tx, rx) = tokio::sync::mpsc::unbounded_channel(); self .collab_update_sender - .write() .insert(collab_object.object_id.clone(), tx); Some(Arc::new(SupabaseCollabStorageImpl::new( diff --git a/frontend/rust-lib/flowy-server/tests/af_cloud_test/util.rs b/frontend/rust-lib/flowy-server/tests/af_cloud_test/util.rs index 71dacfab04..ecf34ec31d 100644 --- a/frontend/rust-lib/flowy-server/tests/af_cloud_test/util.rs +++ b/frontend/rust-lib/flowy-server/tests/af_cloud_test/util.rs @@ -8,7 +8,6 @@ use uuid::Uuid; use flowy_server::af_cloud::define::ServerUser; use flowy_server::af_cloud::AppFlowyCloudServer; -use flowy_server::supabase::define::{USER_DEVICE_ID, USER_SIGN_IN_URL}; use flowy_server_pub::af_cloud_config::AFCloudConfiguration; use crate::setup_log; @@ -82,10 +81,10 @@ pub async fn af_cloud_sign_up_param( ) -> HashMap { let mut params = HashMap::new(); params.insert( - USER_SIGN_IN_URL.to_string(), + "sign_in_url".to_string(), generate_sign_in_url(email, config).await, ); - params.insert(USER_DEVICE_ID.to_string(), Uuid::new_v4().to_string()); + params.insert("device_id".to_string(), Uuid::new_v4().to_string()); params } diff --git a/frontend/rust-lib/flowy-sqlite/Cargo.toml b/frontend/rust-lib/flowy-sqlite/Cargo.toml index e49452df75..0e85aebee5 100644 --- a/frontend/rust-lib/flowy-sqlite/Cargo.toml +++ b/frontend/rust-lib/flowy-sqlite/Cargo.toml @@ -13,7 +13,6 @@ tracing.workspace = true serde.workspace = true serde_json.workspace = true anyhow.workspace = true -parking_lot.workspace = true r2d2 = "0.8.10" libsqlite3-sys = { version = "0.27.0", features = ["bundled"] } diff --git a/frontend/rust-lib/flowy-sqlite/src/kv/kv.rs b/frontend/rust-lib/flowy-sqlite/src/kv/kv.rs index da35facaf2..799f5b0666 100644 --- a/frontend/rust-lib/flowy-sqlite/src/kv/kv.rs +++ b/frontend/rust-lib/flowy-sqlite/src/kv/kv.rs @@ -46,8 +46,8 @@ impl KVStorePreferences { } /// Set a object that implements [Serialize] trait of a key - pub fn set_object(&self, key: &str, value: T) -> Result<(), anyhow::Error> { - let value = serde_json::to_string(&value)?; + pub fn set_object(&self, key: &str, value: &T) -> Result<(), anyhow::Error> { + let value = serde_json::to_string(value)?; self.set_key_value(key, Some(value))?; Ok(()) } @@ -175,7 +175,7 @@ mod tests { name: "nathan".to_string(), age: 30, }; - store.set_object("1", person.clone()).unwrap(); + store.set_object("1", &person.clone()).unwrap(); assert_eq!(store.get_object::("1").unwrap(), person); } } diff --git a/frontend/rust-lib/flowy-user-pub/src/cloud.rs b/frontend/rust-lib/flowy-user-pub/src/cloud.rs index 117efac414..f20d66edbe 100644 --- a/frontend/rust-lib/flowy-user-pub/src/cloud.rs +++ b/frontend/rust-lib/flowy-user-pub/src/cloud.rs @@ -7,8 +7,8 @@ use client_api::entity::billing_dto::WorkspaceUsageAndLimit; pub use client_api::entity::{AFWorkspaceSettings, AFWorkspaceSettingsChange}; use collab_entity::{CollabObject, CollabType}; use flowy_error::{internal_error, ErrorCode, FlowyError}; +use lib_infra::async_trait::async_trait; use lib_infra::box_any::BoxAny; -use lib_infra::future::FutureResult; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; @@ -119,130 +119,131 @@ pub trait UserCloudServiceProvider: Send + Sync { /// Provide the generic interface for the user cloud service /// The user cloud service is responsible for the user authentication and user profile management #[allow(unused_variables)] +#[async_trait] pub trait UserCloudService: Send + Sync + 'static { /// Sign up a new account. /// The type of the params is defined the this trait's implementation. /// Use the `unbox_or_error` of the [BoxAny] to get the params. - fn sign_up(&self, params: BoxAny) -> FutureResult; + async fn sign_up(&self, params: BoxAny) -> Result; /// Sign in an account /// The type of the params is defined the this trait's implementation. - fn sign_in(&self, params: BoxAny) -> FutureResult; + async fn sign_in(&self, params: BoxAny) -> Result; /// Sign out an account - fn sign_out(&self, token: Option) -> FutureResult<(), FlowyError>; + async fn sign_out(&self, token: Option) -> Result<(), FlowyError>; /// Generate a sign in url for the user with the given email /// Currently, only use the admin client for testing - fn generate_sign_in_url_with_email(&self, email: &str) -> FutureResult; + async fn generate_sign_in_url_with_email(&self, email: &str) -> Result; - fn create_user(&self, email: &str, password: &str) -> FutureResult<(), FlowyError>; + async fn create_user(&self, email: &str, password: &str) -> Result<(), FlowyError>; - fn sign_in_with_password( + async fn sign_in_with_password( &self, email: &str, password: &str, - ) -> FutureResult; + ) -> Result; - fn sign_in_with_magic_link(&self, email: &str, redirect_to: &str) - -> FutureResult<(), FlowyError>; + async fn sign_in_with_magic_link(&self, email: &str, redirect_to: &str) + -> Result<(), FlowyError>; /// When the user opens the OAuth URL, it redirects to the corresponding provider's OAuth web page. /// After the user is authenticated, the browser will open a deep link to the AppFlowy app (iOS, macOS, etc.), /// which will call [Client::sign_in_with_url]generate_sign_in_url_with_email to sign in. /// /// For example, the OAuth URL on Google looks like `https://appflowy.io/authorize?provider=google`. - fn generate_oauth_url_with_provider(&self, provider: &str) -> FutureResult; + async fn generate_oauth_url_with_provider(&self, provider: &str) -> Result; /// Using the user's token to update the user information - fn update_user( + async fn update_user( &self, credential: UserCredentials, params: UpdateUserProfileParams, - ) -> FutureResult<(), FlowyError>; + ) -> Result<(), FlowyError>; /// Get the user information using the user's token or uid /// return None if the user is not found - fn get_user_profile(&self, credential: UserCredentials) -> FutureResult; + async fn get_user_profile(&self, credential: UserCredentials) -> Result; - fn open_workspace(&self, workspace_id: &str) -> FutureResult; + async fn open_workspace(&self, workspace_id: &str) -> Result; /// Return the all the workspaces of the user - fn get_all_workspace(&self, uid: i64) -> FutureResult, FlowyError>; + async fn get_all_workspace(&self, uid: i64) -> Result, FlowyError>; /// Creates a new workspace for the user. /// Returns the new workspace if successful - fn create_workspace(&self, workspace_name: &str) -> FutureResult; + async fn create_workspace(&self, workspace_name: &str) -> Result; // Updates the workspace name and icon - fn patch_workspace( + async fn patch_workspace( &self, workspace_id: &str, new_workspace_name: Option<&str>, new_workspace_icon: Option<&str>, - ) -> FutureResult<(), FlowyError>; + ) -> Result<(), FlowyError>; /// Deletes a workspace owned by the user. - fn delete_workspace(&self, workspace_id: &str) -> FutureResult<(), FlowyError>; + async fn delete_workspace(&self, workspace_id: &str) -> Result<(), FlowyError>; - fn invite_workspace_member( + async fn invite_workspace_member( &self, invitee_email: String, workspace_id: String, role: Role, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + ) -> Result<(), FlowyError> { + Ok(()) } - fn list_workspace_invitations( + async fn list_workspace_invitations( &self, filter: Option, - ) -> FutureResult, FlowyError> { - FutureResult::new(async { Ok(vec![]) }) + ) -> Result, FlowyError> { + Ok(vec![]) } - fn accept_workspace_invitations(&self, invite_id: String) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + async fn accept_workspace_invitations(&self, invite_id: String) -> Result<(), FlowyError> { + Ok(()) } - fn remove_workspace_member( + async fn remove_workspace_member( &self, user_email: String, workspace_id: String, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + ) -> Result<(), FlowyError> { + Ok(()) } - fn update_workspace_member( + async fn update_workspace_member( &self, user_email: String, workspace_id: String, role: Role, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + ) -> Result<(), FlowyError> { + Ok(()) } - fn get_workspace_members( + async fn get_workspace_members( &self, workspace_id: String, - ) -> FutureResult, FlowyError> { - FutureResult::new(async { Ok(vec![]) }) + ) -> Result, FlowyError> { + Ok(vec![]) } - fn get_workspace_member( + async fn get_workspace_member( &self, workspace_id: String, uid: i64, - ) -> FutureResult { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result { + Err(FlowyError::not_support()) } - fn get_user_awareness_doc_state( + async fn get_user_awareness_doc_state( &self, uid: i64, workspace_id: &str, object_id: &str, - ) -> FutureResult, FlowyError>; + ) -> Result, FlowyError>; fn receive_realtime_event(&self, _json: Value) {} @@ -250,110 +251,110 @@ pub trait UserCloudService: Send + Sync + 'static { None } - fn reset_workspace(&self, collab_object: CollabObject) -> FutureResult<(), FlowyError>; + async fn reset_workspace(&self, collab_object: CollabObject) -> Result<(), FlowyError>; - fn create_collab_object( + async fn create_collab_object( &self, collab_object: &CollabObject, data: Vec, - ) -> FutureResult<(), FlowyError>; + ) -> Result<(), FlowyError>; - fn batch_create_collab_object( + async fn batch_create_collab_object( &self, workspace_id: &str, objects: Vec, - ) -> FutureResult<(), FlowyError>; + ) -> Result<(), FlowyError>; - fn leave_workspace(&self, workspace_id: &str) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Ok(()) }) + async fn leave_workspace(&self, workspace_id: &str) -> Result<(), FlowyError> { + Ok(()) } - fn subscribe_workspace( + async fn subscribe_workspace( &self, workspace_id: String, recurring_interval: RecurringInterval, workspace_subscription_plan: SubscriptionPlan, success_url: String, - ) -> FutureResult { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result { + Err(FlowyError::not_support()) } - fn get_workspace_member_info( + async fn get_workspace_member_info( &self, workspace_id: &str, uid: i64, - ) -> FutureResult { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result { + Err(FlowyError::not_support()) } /// Get all subscriptions for all workspaces for a user (email) - fn get_workspace_subscriptions( + async fn get_workspace_subscriptions( &self, - ) -> FutureResult, FlowyError> { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result, FlowyError> { + Err(FlowyError::not_support()) } /// Get the workspace subscriptions for a workspace - fn get_workspace_subscription_one( + async fn get_workspace_subscription_one( &self, workspace_id: String, - ) -> FutureResult, FlowyError> { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result, FlowyError> { + Err(FlowyError::not_support()) } - fn cancel_workspace_subscription( + async fn cancel_workspace_subscription( &self, workspace_id: String, plan: SubscriptionPlan, reason: Option, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result<(), FlowyError> { + Err(FlowyError::not_support()) } - fn get_workspace_plan( + async fn get_workspace_plan( &self, workspace_id: String, - ) -> FutureResult, FlowyError> { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result, FlowyError> { + Err(FlowyError::not_support()) } - fn get_workspace_usage( + async fn get_workspace_usage( &self, workspace_id: String, - ) -> FutureResult { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result { + Err(FlowyError::not_support()) } - fn get_billing_portal_url(&self) -> FutureResult { - FutureResult::new(async { Err(FlowyError::not_support()) }) + async fn get_billing_portal_url(&self) -> Result { + Err(FlowyError::not_support()) } - fn update_workspace_subscription_payment_period( + async fn update_workspace_subscription_payment_period( &self, workspace_id: String, plan: SubscriptionPlan, recurring_interval: RecurringInterval, - ) -> FutureResult<(), FlowyError> { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result<(), FlowyError> { + Err(FlowyError::not_support()) } - fn get_subscription_plan_details(&self) -> FutureResult, FlowyError> { - FutureResult::new(async { Err(FlowyError::not_support()) }) + async fn get_subscription_plan_details(&self) -> Result, FlowyError> { + Err(FlowyError::not_support()) } - fn get_workspace_setting( + async fn get_workspace_setting( &self, workspace_id: &str, - ) -> FutureResult { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result { + Err(FlowyError::not_support()) } - fn update_workspace_setting( + async fn update_workspace_setting( &self, workspace_id: &str, workspace_settings: AFWorkspaceSettingsChange, - ) -> FutureResult { - FutureResult::new(async { Err(FlowyError::not_support()) }) + ) -> Result { + Err(FlowyError::not_support()) } } diff --git a/frontend/rust-lib/flowy-user-pub/src/entities.rs b/frontend/rust-lib/flowy-user-pub/src/entities.rs index 95a38ab3c0..4b2ab3fdd0 100644 --- a/frontend/rust-lib/flowy-user-pub/src/entities.rs +++ b/frontend/rust-lib/flowy-user-pub/src/entities.rs @@ -346,8 +346,6 @@ pub enum Authenticator { /// Currently not supported. It will be supported in the future when the /// [AppFlowy-Server](https://github.com/AppFlowy-IO/AppFlowy-Server) ready. AppFlowyCloud = 1, - /// It uses Supabase as the backend. - Supabase = 2, } impl Default for Authenticator { @@ -371,7 +369,6 @@ impl From for Authenticator { match value { 0 => Authenticator::Local, 1 => Authenticator::AppFlowyCloud, - 2 => Authenticator::Supabase, _ => Authenticator::Local, } } diff --git a/frontend/rust-lib/flowy-user/Cargo.toml b/frontend/rust-lib/flowy-user/Cargo.toml index f2eb89dc4c..3894ce0ee6 100644 --- a/frontend/rust-lib/flowy-user/Cargo.toml +++ b/frontend/rust-lib/flowy-user/Cargo.toml @@ -26,9 +26,11 @@ collab-plugins = { workspace = true } flowy-user-pub = { workspace = true } client-api = { workspace = true } anyhow.workspace = true +arc-swap.workspace = true +dashmap.workspace = true tracing.workspace = true bytes.workspace = true -serde.workspace = true +serde = { workspace = true, features = ["rc"] } serde_json.workspace = true serde_repr.workspace = true protobuf.workspace = true @@ -36,7 +38,6 @@ lazy_static = "1.4.0" diesel.workspace = true diesel_derives = { version = "2.1.0", features = ["sqlite", "r2d2"] } once_cell = "1.17.1" -parking_lot.workspace = true strum = "0.25" strum_macros = "0.25.2" tokio = { workspace = true, features = ["rt"] } diff --git a/frontend/rust-lib/flowy-user/src/anon_user/migrate_anon_user_collab.rs b/frontend/rust-lib/flowy-user/src/anon_user/migrate_anon_user_collab.rs index a7adcbe803..ae7d5329bf 100644 --- a/frontend/rust-lib/flowy-user/src/anon_user/migrate_anon_user_collab.rs +++ b/frontend/rust-lib/flowy-user/src/anon_user/migrate_anon_user_collab.rs @@ -3,7 +3,7 @@ use std::ops::{Deref, DerefMut}; use std::sync::Arc; use anyhow::anyhow; -use collab::core::collab::{DataSource, MutexCollab}; +use collab::core::collab::DataSource; use collab::core::origin::{CollabClient, CollabOrigin}; use collab::preclude::Collab; use collab_database::database::{ @@ -13,7 +13,6 @@ use collab_database::rows::{database_row_document_id_from_row_id, mut_row_with_c use collab_database::workspace_database::DatabaseMetaList; use collab_folder::{Folder, UserId}; use collab_plugins::local_storage::kv::KVTransactionDB; -use parking_lot::{Mutex, RwLock}; use tracing::info; use collab_integrate::{CollabKVAction, CollabKVDB, PersistenceError}; @@ -34,16 +33,12 @@ pub fn migration_anon_user_on_sign_up( new_collab_db .with_write_txn(|new_collab_w_txn| { let old_collab_r_txn = old_collab_db.read_txn(); - let old_to_new_id_map = Arc::new(Mutex::new(OldToNewIdMap::new())); + let mut old_to_new_id_map = OldToNewIdMap::new(); - migrate_user_awareness( - old_to_new_id_map.lock().deref_mut(), - old_user, - new_user_session, - )?; + migrate_user_awareness(&mut old_to_new_id_map, old_user, new_user_session)?; migrate_database_with_views_object( - &mut old_to_new_id_map.lock(), + &mut old_to_new_id_map, old_user, &old_collab_r_txn, new_user_session, @@ -62,20 +57,20 @@ pub fn migration_anon_user_on_sign_up( }); info!("migrate collab objects: {:?}", object_ids.len()); - let collab_by_oid = make_collab_by_oid(old_user, &old_collab_r_txn, &object_ids); + let mut collab_by_oid = make_collab_by_oid(old_user, &old_collab_r_txn, &object_ids); migrate_databases( - &old_to_new_id_map, + &mut old_to_new_id_map, new_user_session, new_collab_w_txn, &mut object_ids, - &collab_by_oid, + &mut collab_by_oid, )?; // Migrates the folder, replacing all existing view IDs with new ones. // This function handles the process of migrating folder data between two users. As a part of this migration, // all existing view IDs associated with the old user will be replaced by new IDs relevant to the new user. migrate_workspace_folder( - &mut old_to_new_id_map.lock(), + &mut old_to_new_id_map, old_user, &old_collab_r_txn, new_user_session, @@ -85,7 +80,7 @@ pub fn migration_anon_user_on_sign_up( // Migrate other collab objects for object_id in &object_ids { if let Some(collab) = collab_by_oid.get(object_id) { - let new_object_id = old_to_new_id_map.lock().exchange_new_id(object_id); + let new_object_id = old_to_new_id_map.exchange_new_id(object_id); tracing::debug!("migrate from: {}, to: {}", object_id, new_object_id,); migrate_collab_object( collab, @@ -147,27 +142,26 @@ where PersistenceError: From, PersistenceError: From, { - let database_with_views_collab = Collab::new( + let mut database_with_views_collab = Collab::new( old_user.session.user_id, &old_user.session.user_workspace.database_indexer_id, "phantom", vec![], false, ); - database_with_views_collab.with_origin_transact_mut(|txn| { - old_collab_r_txn.load_doc_with_txn( - old_user.session.user_id, - &old_user.session.user_workspace.database_indexer_id, - txn, - ) - })?; + old_collab_r_txn.load_doc_with_txn( + old_user.session.user_id, + &old_user.session.user_workspace.database_indexer_id, + &mut database_with_views_collab.transact_mut(), + )?; let new_uid = new_user_session.user_id; let new_object_id = &new_user_session.user_workspace.database_indexer_id; - let array = DatabaseMetaList::from_collab(&database_with_views_collab); - for database_meta in array.get_all_database_meta() { - array.update_database(&database_meta.database_id, |update| { + let array = DatabaseMetaList::new(&mut database_with_views_collab); + let mut txn = database_with_views_collab.transact_mut(); + for database_meta in array.get_all_database_meta(&txn) { + array.update_database(&mut txn, &database_meta.database_id, |update| { let new_linked_views = update .linked_views .iter() @@ -178,7 +172,6 @@ where }) } - let txn = database_with_views_collab.transact(); if let Err(err) = new_collab_w_txn.create_new_doc(new_uid, new_object_id, &txn) { tracing::error!("🔴migrate database storage failed: {:?}", err); } @@ -216,17 +209,15 @@ where let new_uid = new_user_session.user_id; let new_workspace_id = &new_user_session.user_workspace.id; - let old_folder_collab = Collab::new(old_uid, old_workspace_id, "phantom", vec![], false); - old_folder_collab.with_origin_transact_mut(|txn| { - old_collab_r_txn.load_doc_with_txn(old_uid, old_workspace_id, txn) - })?; + let mut old_folder_collab = Collab::new(old_uid, old_workspace_id, "phantom", vec![], false); + old_collab_r_txn.load_doc_with_txn( + old_uid, + old_workspace_id, + &mut old_folder_collab.transact_mut(), + )?; let old_user_id = UserId::from(old_uid); - let old_folder = Folder::open( - old_user_id.clone(), - Arc::new(MutexCollab::new(old_folder_collab)), - None, - ) - .map_err(|err| PersistenceError::InvalidData(err.to_string()))?; + let old_folder = Folder::open(old_user_id.clone(), old_folder_collab, None) + .map_err(|err| PersistenceError::InvalidData(err.to_string()))?; let mut folder_data = old_folder .get_folder_data(old_workspace_id) @@ -310,14 +301,12 @@ where let new_folder_collab = Collab::new_with_source(origin, new_workspace_id, DataSource::Disk, vec![], false) .map_err(|err| PersistenceError::Internal(err.into()))?; - let mutex_collab = Arc::new(MutexCollab::new(new_folder_collab)); let new_user_id = UserId::from(new_uid); info!("migrated folder: {:?}", folder_data); - let _ = Folder::create(new_user_id, mutex_collab.clone(), None, folder_data); + let folder = Folder::open_with(new_user_id, new_folder_collab, None, Some(folder_data)); { - let mutex_collab = mutex_collab.lock(); - let txn = mutex_collab.transact(); + let txn = folder.transact(); if let Err(err) = new_collab_w_txn.create_new_doc(new_uid, new_workspace_id, &txn) { tracing::error!("🔴migrate folder failed: {:?}", err); } @@ -338,11 +327,11 @@ fn migrate_user_awareness( } fn migrate_databases<'a, W>( - old_to_new_id_map: &Arc>, + old_to_new_id_map: &mut OldToNewIdMap, new_user_session: &Session, new_collab_w_txn: &'a W, object_ids: &mut Vec, - collab_by_oid: &HashMap, + collab_by_oid: &mut HashMap, ) -> Result<(), PersistenceError> where W: CollabKVAction<'a>, @@ -350,28 +339,23 @@ where { // Migrate databases let mut database_object_ids = vec![]; - let imported_database_row_object_ids: RwLock>> = - RwLock::new(HashMap::new()); + let mut imported_database_row_object_ids: HashMap> = HashMap::new(); - for object_id in &mut *object_ids { - if let Some(collab) = collab_by_oid.get(object_id) { + for object_id in object_ids.iter() { + if let Some(collab) = collab_by_oid.get_mut(object_id) { if !is_database_collab(collab) { continue; } database_object_ids.push(object_id.clone()); reset_inline_view_id(collab, |old_inline_view_id| { - old_to_new_id_map - .lock() - .exchange_new_id(&old_inline_view_id) + old_to_new_id_map.exchange_new_id(&old_inline_view_id) }); mut_database_views_with_collab(collab, |database_view| { let old_database_id = database_view.database_id.clone(); - let new_view_id = old_to_new_id_map.lock().exchange_new_id(&database_view.id); - let new_database_id = old_to_new_id_map - .lock() - .exchange_new_id(&database_view.database_id); + let new_view_id = old_to_new_id_map.exchange_new_id(&database_view.id); + let new_database_id = old_to_new_id_map.exchange_new_id(&database_view.database_id); tracing::trace!( "migrate database view id from: {}, to: {}", @@ -389,7 +373,7 @@ where database_view.row_orders.iter_mut().for_each(|row_order| { let old_row_id = String::from(row_order.id.clone()); let old_row_document_id = database_row_document_id_from_row_id(&old_row_id); - let new_row_id = old_to_new_id_map.lock().exchange_new_id(&old_row_id); + let new_row_id = old_to_new_id_map.exchange_new_id(&old_row_id); let new_row_document_id = database_row_document_id_from_row_id(&new_row_id); tracing::debug!("migrate row id: {} to {}", row_order.id, new_row_id); tracing::debug!( @@ -397,20 +381,17 @@ where old_row_document_id, new_row_document_id ); - old_to_new_id_map - .lock() - .insert(old_row_document_id, new_row_document_id); + old_to_new_id_map.insert(old_row_document_id, new_row_document_id); row_order.id = RowId::from(new_row_id); imported_database_row_object_ids - .write() .entry(old_database_id.clone()) .or_default() .insert(old_row_id); }); }); - let new_object_id = old_to_new_id_map.lock().exchange_new_id(object_id); + let new_object_id = old_to_new_id_map.exchange_new_id(object_id); tracing::debug!( "migrate database from: {}, to: {}", object_id, @@ -425,7 +406,6 @@ where } } - let imported_database_row_object_ids = imported_database_row_object_ids.read(); // remove the database object ids from the object ids object_ids.retain(|id| !database_object_ids.contains(id)); @@ -436,11 +416,11 @@ where .flatten() .any(|row_id| row_id == id) }); - for (database_id, imported_row_ids) in &*imported_database_row_object_ids { + for (database_id, imported_row_ids) in imported_database_row_object_ids { for imported_row_id in imported_row_ids { - if let Some(imported_collab) = collab_by_oid.get(imported_row_id) { - let new_database_id = old_to_new_id_map.lock().exchange_new_id(database_id); - let new_row_id = old_to_new_id_map.lock().exchange_new_id(imported_row_id); + if let Some(imported_collab) = collab_by_oid.get_mut(&imported_row_id) { + let new_database_id = old_to_new_id_map.exchange_new_id(&database_id); + let new_row_id = old_to_new_id_map.exchange_new_id(&imported_row_id); info!( "import database row from: {}, to: {}", imported_row_id, new_row_id, @@ -458,11 +438,9 @@ where // imported_collab_by_oid contains all the collab object ids, including the row document collab object ids. // So, if the id exist in the imported_collab_by_oid, it means the row document collab object is exist. - let imported_row_document_id = database_row_document_id_from_row_id(imported_row_id); + let imported_row_document_id = database_row_document_id_from_row_id(&imported_row_id); if collab_by_oid.get(&imported_row_document_id).is_some() { - let _ = old_to_new_id_map - .lock() - .exchange_new_id(&imported_row_document_id); + let _ = old_to_new_id_map.exchange_new_id(&imported_row_document_id); } } } @@ -481,21 +459,21 @@ where { let mut collab_by_oid = HashMap::new(); for object_id in object_ids { - let collab = Collab::new( + let mut collab = Collab::new( old_user.session.user_id, object_id, "migrate_device", vec![], false, ); - match collab.with_origin_transact_mut(|txn| { - old_collab_r_txn.load_doc_with_txn(old_user.session.user_id, &object_id, txn) - }) { + let mut txn = collab.transact_mut(); + match old_collab_r_txn.load_doc_with_txn(old_user.session.user_id, &object_id, &mut txn) { Ok(_) => { + drop(txn); collab_by_oid.insert(object_id.clone(), collab); }, Err(err) => tracing::error!("🔴Initialize migration collab failed: {:?} ", err), - } + }; } collab_by_oid diff --git a/frontend/rust-lib/flowy-user/src/anon_user/mod.rs b/frontend/rust-lib/flowy-user/src/anon_user/mod.rs index 974850755f..8a65b6fa94 100644 --- a/frontend/rust-lib/flowy-user/src/anon_user/mod.rs +++ b/frontend/rust-lib/flowy-user/src/anon_user/mod.rs @@ -1,5 +1,3 @@ -pub use migrate_anon_user_collab::*; -pub use sync_supabase_user_collab::*; +//pub use migrate_anon_user_collab::*; -mod migrate_anon_user_collab; -mod sync_supabase_user_collab; +//mod migrate_anon_user_collab; diff --git a/frontend/rust-lib/flowy-user/src/anon_user/sync_supabase_user_collab.rs b/frontend/rust-lib/flowy-user/src/anon_user/sync_supabase_user_collab.rs index cee388e77b..7e95d9887c 100644 --- a/frontend/rust-lib/flowy-user/src/anon_user/sync_supabase_user_collab.rs +++ b/frontend/rust-lib/flowy-user/src/anon_user/sync_supabase_user_collab.rs @@ -1,18 +1,13 @@ -use std::future::Future; -use std::ops::Deref; -use std::pin::Pin; use std::sync::Arc; use anyhow::{anyhow, Error}; -use collab::core::collab::MutexCollab; -use collab::preclude::Collab; +use collab::preclude::{Collab, ReadTxn, StateVector}; use collab_database::database::get_database_row_ids; use collab_database::rows::database_row_document_id_from_row_id; -use collab_database::workspace_database::{get_all_database_meta, DatabaseMeta}; +use collab_database::workspace_database::{DatabaseMeta, DatabaseMetaList}; use collab_entity::{CollabObject, CollabType}; use collab_folder::{Folder, View, ViewLayout}; use collab_plugins::local_storage::kv::KVTransactionDB; -use parking_lot::Mutex; use collab_integrate::{CollabKVAction, CollabKVDB, PersistenceError}; use flowy_error::FlowyResult; @@ -28,16 +23,14 @@ pub async fn sync_supabase_user_data_to_cloud( ) -> FlowyResult<()> { let workspace_id = new_user_session.user_workspace.id.clone(); let uid = new_user_session.user_id; - let folder = Arc::new( - sync_folder( - uid, - &workspace_id, - device_id, - collab_db, - user_service.clone(), - ) - .await?, - ); + let folder = sync_folder( + uid, + &workspace_id, + device_id, + collab_db, + user_service.clone(), + ) + .await?; let database_records = sync_database_views( uid, @@ -49,12 +42,12 @@ pub async fn sync_supabase_user_data_to_cloud( ) .await; - let views = folder.lock().get_views_belong_to(&workspace_id); + let views = folder.get_views_belong_to(&workspace_id); for view in views { let view_id = view.id.clone(); if let Err(err) = sync_view( uid, - folder.clone(), + &folder, database_records.clone(), workspace_id.to_string(), device_id.to_string(), @@ -72,135 +65,132 @@ pub async fn sync_supabase_user_data_to_cloud( } #[allow(clippy::too_many_arguments)] -fn sync_view( +async fn sync_view( uid: i64, - folder: Arc, + folder: &Folder, database_metas: Vec>, workspace_id: String, device_id: String, view: Arc, collab_db: Arc, user_service: Arc, -) -> Pin> + Send + Sync>> { - Box::pin(async move { - let collab_type = collab_type_from_view_layout(&view.layout); - let object_id = object_id_from_view(&view, &database_metas)?; - tracing::debug!( - "sync view: {:?}:{} with object_id: {}", - view.layout, - view.id, - object_id - ); +) -> Result<(), Error> { + let collab_type = collab_type_from_view_layout(&view.layout); + let object_id = object_id_from_view(&view, &database_metas)?; + tracing::debug!( + "sync view: {:?}:{} with object_id: {}", + view.layout, + view.id, + object_id + ); - let collab_object = CollabObject::new( - uid, - object_id, - collab_type, - workspace_id.to_string(), - device_id.clone(), - ); + let collab_object = CollabObject::new( + uid, + object_id, + collab_type, + workspace_id.to_string(), + device_id.clone(), + ); - match view.layout { - ViewLayout::Document => { - let doc_state = get_collab_doc_state(uid, &collab_object, &collab_db)?; + match view.layout { + ViewLayout::Document => { + let doc_state = get_collab_doc_state(uid, &collab_object, &collab_db)?; + tracing::info!( + "sync object: {} with update: {}", + collab_object, + doc_state.len() + ); + user_service + .create_collab_object(&collab_object, doc_state) + .await?; + }, + ViewLayout::Grid | ViewLayout::Board | ViewLayout::Calendar => { + let (database_doc_state, row_ids) = get_database_doc_state(uid, &collab_object, &collab_db)?; + tracing::info!( + "sync object: {} with update: {}", + collab_object, + database_doc_state.len() + ); + user_service + .create_collab_object(&collab_object, database_doc_state) + .await?; + + // sync database's row + for row_id in row_ids { + tracing::debug!("sync row: {}", row_id); + let document_id = database_row_document_id_from_row_id(&row_id); + + let database_row_collab_object = CollabObject::new( + uid, + row_id, + CollabType::DatabaseRow, + workspace_id.to_string(), + device_id.clone(), + ); + let database_row_doc_state = + get_collab_doc_state(uid, &database_row_collab_object, &collab_db)?; tracing::info!( "sync object: {} with update: {}", - collab_object, - doc_state.len() + database_row_collab_object, + database_row_doc_state.len() ); - user_service - .create_collab_object(&collab_object, doc_state) - .await?; - }, - ViewLayout::Grid | ViewLayout::Board | ViewLayout::Calendar => { - let (database_doc_state, row_ids) = - get_database_doc_state(uid, &collab_object, &collab_db)?; - tracing::info!( - "sync object: {} with update: {}", - collab_object, - database_doc_state.len() + + let _ = user_service + .create_collab_object(&database_row_collab_object, database_row_doc_state) + .await; + + let database_row_document = CollabObject::new( + uid, + document_id, + CollabType::Document, + workspace_id.to_string(), + device_id.to_string(), ); - user_service - .create_collab_object(&collab_object, database_doc_state) - .await?; - - // sync database's row - for row_id in row_ids { - tracing::debug!("sync row: {}", row_id); - let document_id = database_row_document_id_from_row_id(&row_id); - - let database_row_collab_object = CollabObject::new( - uid, - row_id, - CollabType::DatabaseRow, - workspace_id.to_string(), - device_id.clone(), - ); - let database_row_doc_state = - get_collab_doc_state(uid, &database_row_collab_object, &collab_db)?; + // sync document in the row if exist + if let Ok(document_doc_state) = + get_collab_doc_state(uid, &database_row_document, &collab_db) + { tracing::info!( - "sync object: {} with update: {}", - database_row_collab_object, - database_row_doc_state.len() + "sync database row document: {} with update: {}", + database_row_document, + document_doc_state.len() ); - let _ = user_service - .create_collab_object(&database_row_collab_object, database_row_doc_state) + .create_collab_object(&database_row_document, document_doc_state) .await; - - let database_row_document = CollabObject::new( - uid, - document_id, - CollabType::Document, - workspace_id.to_string(), - device_id.to_string(), - ); - // sync document in the row if exist - if let Ok(document_doc_state) = - get_collab_doc_state(uid, &database_row_document, &collab_db) - { - tracing::info!( - "sync database row document: {} with update: {}", - database_row_document, - document_doc_state.len() - ); - let _ = user_service - .create_collab_object(&database_row_document, document_doc_state) - .await; - } } - }, - ViewLayout::Chat => {}, - } - - tokio::task::yield_now().await; - - let child_views = folder.lock().views.get_views_belong_to(&view.id); - for child_view in child_views { - let cloned_child_view = child_view.clone(); - if let Err(err) = Box::pin(sync_view( - uid, - folder.clone(), - database_metas.clone(), - workspace_id.clone(), - device_id.to_string(), - child_view, - collab_db.clone(), - user_service.clone(), - )) - .await - { - tracing::error!( - "🔴sync {:?}:{} failed: {:?}", - cloned_child_view.layout, - cloned_child_view.id, - err - ) } - tokio::task::yield_now().await; + }, + ViewLayout::Chat => {}, + } + + tokio::task::yield_now().await; + + let child_views = folder.get_views_belong_to(&view.id); + for child_view in child_views { + let cloned_child_view = child_view.clone(); + if let Err(err) = Box::pin(sync_view( + uid, + folder, + database_metas.clone(), + workspace_id.clone(), + device_id.to_string(), + child_view, + collab_db.clone(), + user_service.clone(), + )) + .await + { + tracing::error!( + "🔴sync {:?}:{} failed: {:?}", + cloned_child_view.layout, + cloned_child_view.id, + err + ) } - Ok(()) - }) + tokio::task::yield_now().await; + } + Ok(()) } fn get_collab_doc_state( @@ -208,12 +198,12 @@ fn get_collab_doc_state( collab_object: &CollabObject, collab_db: &Arc, ) -> Result, PersistenceError> { - let collab = Collab::new(uid, &collab_object.object_id, "phantom", vec![], false); - let _ = collab.with_origin_transact_mut(|txn| { - collab_db - .read_txn() - .load_doc_with_txn(uid, &collab_object.object_id, txn) - })?; + let mut collab = Collab::new(uid, &collab_object.object_id, "phantom", vec![], false); + collab_db.read_txn().load_doc_with_txn( + uid, + &collab_object.object_id, + &mut collab.transact_mut(), + )?; let doc_state = collab .encode_collab_v1(|_| Ok::<(), PersistenceError>(()))? .doc_state; @@ -229,12 +219,12 @@ fn get_database_doc_state( collab_object: &CollabObject, collab_db: &Arc, ) -> Result<(Vec, Vec), PersistenceError> { - let collab = Collab::new(uid, &collab_object.object_id, "phantom", vec![], false); - let _ = collab.with_origin_transact_mut(|txn| { - collab_db - .read_txn() - .load_doc_with_txn(uid, &collab_object.object_id, txn) - })?; + let mut collab = Collab::new(uid, &collab_object.object_id, "phantom", vec![], false); + collab_db.read_txn().load_doc_with_txn( + uid, + &collab_object.object_id, + &mut collab.transact_mut(), + )?; let row_ids = get_database_row_ids(&collab).unwrap_or_default(); let doc_state = collab @@ -253,22 +243,17 @@ async fn sync_folder( device_id: &str, collab_db: &Arc, user_service: Arc, -) -> Result { +) -> Result { let (folder, update) = { - let collab = Collab::new(uid, workspace_id, "phantom", vec![], false); + let mut collab = Collab::new(uid, workspace_id, "phantom", vec![], false); // Use the temporary result to short the lifetime of the TransactionMut - collab.with_origin_transact_mut(|txn| { - collab_db - .read_txn() - .load_doc_with_txn(uid, workspace_id, txn) - })?; + collab_db + .read_txn() + .load_doc_with_txn(uid, workspace_id, &mut collab.transact_mut())?; let doc_state = collab .encode_collab_v1(|_| Ok::<(), PersistenceError>(()))? .doc_state; - ( - MutexFolder::new(Folder::open(uid, Arc::new(MutexCollab::new(collab)), None)?), - doc_state, - ) + (Folder::open(uid, collab, None)?, doc_state) }; let collab_object = CollabObject::new( @@ -311,49 +296,38 @@ async fn sync_database_views( // Use the temporary result to short the lifetime of the TransactionMut let result = { - let collab = Collab::new(uid, database_views_aggregate_id, "phantom", vec![], false); - collab - .with_origin_transact_mut(|txn| { - collab_db - .read_txn() - .load_doc_with_txn(uid, database_views_aggregate_id, txn) - }) + let mut collab = Collab::new(uid, database_views_aggregate_id, "phantom", vec![], false); + let meta_list = DatabaseMetaList::new(&mut collab); + let mut txn = collab.transact_mut(); + collab_db + .read_txn() + .load_doc_with_txn(uid, database_views_aggregate_id, &mut txn) .map(|_| { - ( - get_all_database_meta(&collab), - collab - .encode_collab_v1(|_| Ok::<(), PersistenceError>(())) - .unwrap() - .doc_state, - ) + let records = meta_list.get_all_database_meta(&txn); + let doc_state = txn.encode_state_as_update_v2(&StateVector::default()); + (records, doc_state) }) }; - - if let Ok((records, doc_state)) = result { - let _ = user_service - .create_collab_object(&collab_object, doc_state.to_vec()) - .await; - records.into_iter().map(Arc::new).collect() - } else { - vec![] + match result { + Ok((records, doc_state)) => { + if let Err(e) = user_service + .create_collab_object(&collab_object, doc_state) + .await + { + tracing::error!( + "sync database views failed to create collab object: {:?}", + e + ); + } + records.into_iter().map(Arc::new).collect() + }, + Err(e) => { + tracing::error!("load doc {} failed: {:?}", database_views_aggregate_id, e); + vec![] + }, } } -struct MutexFolder(Mutex); -impl MutexFolder { - pub fn new(folder: Folder) -> Self { - Self(Mutex::new(folder)) - } -} -impl Deref for MutexFolder { - type Target = Mutex; - fn deref(&self) -> &Self::Target { - &self.0 - } -} -unsafe impl Sync for MutexFolder {} -unsafe impl Send for MutexFolder {} - fn collab_type_from_view_layout(view_layout: &ViewLayout) -> CollabType { match view_layout { ViewLayout::Document => CollabType::Document, diff --git a/frontend/rust-lib/flowy-user/src/entities/auth.rs b/frontend/rust-lib/flowy-user/src/entities/auth.rs index edad70387b..dbfd9b811a 100644 --- a/frontend/rust-lib/flowy-user/src/entities/auth.rs +++ b/frontend/rust-lib/flowy-user/src/entities/auth.rs @@ -181,17 +181,16 @@ pub struct OauthProviderDataPB { pub oauth_url: String, } +#[repr(u8)] #[derive(ProtoBuf_Enum, Eq, PartialEq, Debug, Clone)] pub enum AuthenticatorPB { Local = 0, - Supabase = 1, AppFlowyCloud = 2, } impl From for AuthenticatorPB { fn from(auth_type: Authenticator) -> Self { match auth_type { - Authenticator::Supabase => AuthenticatorPB::Supabase, Authenticator::Local => AuthenticatorPB::Local, Authenticator::AppFlowyCloud => AuthenticatorPB::AppFlowyCloud, } @@ -201,7 +200,6 @@ impl From for AuthenticatorPB { impl From for Authenticator { fn from(pb: AuthenticatorPB) -> Self { match pb { - AuthenticatorPB::Supabase => Authenticator::Supabase, AuthenticatorPB::Local => Authenticator::Local, AuthenticatorPB::AppFlowyCloud => Authenticator::AppFlowyCloud, } diff --git a/frontend/rust-lib/flowy-user/src/event_handler.rs b/frontend/rust-lib/flowy-user/src/event_handler.rs index 2ecd11608c..c2e0c0d917 100644 --- a/frontend/rust-lib/flowy-user/src/event_handler.rs +++ b/frontend/rust-lib/flowy-user/src/event_handler.rs @@ -161,7 +161,7 @@ pub async fn set_appearance_setting( if setting.theme.is_empty() { setting.theme = APPEARANCE_DEFAULT_THEME.to_string(); } - store_preferences.set_object(APPEARANCE_SETTING_CACHE_KEY, setting)?; + store_preferences.set_object(APPEARANCE_SETTING_CACHE_KEY, &setting)?; Ok(()) } @@ -198,7 +198,7 @@ pub async fn set_date_time_settings( setting.timezone_id = "".to_string(); } - store_preferences.set_object(DATE_TIME_SETTINGS_CACHE_KEY, setting)?; + store_preferences.set_object(DATE_TIME_SETTINGS_CACHE_KEY, &setting)?; Ok(()) } @@ -234,7 +234,7 @@ pub async fn set_notification_settings( ) -> Result<(), FlowyError> { let store_preferences = upgrade_store_preferences(store_preferences)?; let setting = data.into_inner(); - store_preferences.set_object(NOTIFICATION_SETTINGS_CACHE_KEY, setting)?; + store_preferences.set_object(NOTIFICATION_SETTINGS_CACHE_KEY, &setting)?; Ok(()) } @@ -374,7 +374,7 @@ pub async fn set_encrypt_secret_handler( EncryptionType::SelfEncryption(data.encryption_sign), ) .await?; - save_cloud_config(data.user_id, &store_preferences, config)?; + save_cloud_config(data.user_id, &store_preferences, &config)?; }, } @@ -448,7 +448,7 @@ pub async fn set_cloud_config_handler( } } - save_cloud_config(session.user_id, &store_preferences, config.clone())?; + save_cloud_config(session.user_id, &store_preferences, &config)?; let payload = CloudSettingPB { enable_sync: config.enable_sync, diff --git a/frontend/rust-lib/flowy-user/src/migrations/document_empty_content.rs b/frontend/rust-lib/flowy-user/src/migrations/document_empty_content.rs index cf59bac68c..8b0b0694b5 100644 --- a/frontend/rust-lib/flowy-user/src/migrations/document_empty_content.rs +++ b/frontend/rust-lib/flowy-user/src/migrations/document_empty_content.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use collab::core::collab::MutexCollab; use collab::core::origin::{CollabClient, CollabOrigin}; use collab::preclude::Collab; use collab_document::document::Document; @@ -53,8 +52,8 @@ impl UserDataMigration for HistoricalEmptyDocumentMigration { let folder = Folder::open(session.user_id, folder_collab, None) .map_err(|err| PersistenceError::Internal(err.into()))?; - if let Ok(workspace_id) = folder.try_get_workspace_id() { - let migration_views = folder.views.get_views_belong_to(&workspace_id); + if let Some(workspace_id) = folder.get_workspace_id() { + let migration_views = folder.get_views_belong_to(&workspace_id); // For historical reasons, the first level documents are empty. So migrate them by inserting // the default document data. for view in migration_views { @@ -87,17 +86,9 @@ where { // If the document is not exist, we don't need to migrate it. if load_collab(user_id, write_txn, &view.id).is_err() { - let collab = Arc::new(MutexCollab::new(Collab::new_with_origin( - origin.clone(), - &view.id, - vec![], - false, - ))); - let document = Document::create_with_data(collab, default_document_data(&view.id))?; - let encode = document - .get_collab() - .lock() - .encode_collab_v1(|_| Ok::<(), PersistenceError>(()))?; + let collab = Collab::new_with_origin(origin.clone(), &view.id, vec![], false); + let document = Document::open_with(collab, Some(default_document_data(&view.id)))?; + let encode = document.encode_collab_v1(|_| Ok::<(), PersistenceError>(()))?; write_txn.flush_doc_with(user_id, &view.id, &encode.doc_state, &encode.state_vector)?; event!( tracing::Level::INFO, diff --git a/frontend/rust-lib/flowy-user/src/migrations/mod.rs b/frontend/rust-lib/flowy-user/src/migrations/mod.rs index d5f83d47c9..bb43426e8a 100644 --- a/frontend/rust-lib/flowy-user/src/migrations/mod.rs +++ b/frontend/rust-lib/flowy-user/src/migrations/mod.rs @@ -1,4 +1,5 @@ use flowy_user_pub::session::Session; +use std::sync::Arc; pub mod document_empty_content; pub mod migration; @@ -9,5 +10,5 @@ pub mod workspace_trash_v1; #[derive(Clone, Debug)] pub struct AnonUser { - pub session: Session, + pub session: Arc, } diff --git a/frontend/rust-lib/flowy-user/src/migrations/util.rs b/frontend/rust-lib/flowy-user/src/migrations/util.rs index f0c4c3f7f7..f432ce05a6 100644 --- a/frontend/rust-lib/flowy-user/src/migrations/util.rs +++ b/frontend/rust-lib/flowy-user/src/migrations/util.rs @@ -1,21 +1,14 @@ -use std::sync::Arc; - -use collab::core::collab::MutexCollab; use collab::preclude::Collab; use collab_integrate::{CollabKVAction, PersistenceError}; use flowy_error::FlowyResult; -pub(crate) fn load_collab<'a, R>( - uid: i64, - collab_r_txn: &R, - object_id: &str, -) -> FlowyResult> +pub(crate) fn load_collab<'a, R>(uid: i64, collab_r_txn: &R, object_id: &str) -> FlowyResult where R: CollabKVAction<'a>, PersistenceError: From, { - let collab = Collab::new(uid, object_id, "phantom", vec![], false); - collab.with_origin_transact_mut(|txn| collab_r_txn.load_doc_with_txn(uid, &object_id, txn))?; - Ok(Arc::new(MutexCollab::new(collab))) + let mut collab = Collab::new(uid, object_id, "phantom", vec![], false); + collab_r_txn.load_doc_with_txn(uid, &object_id, &mut collab.transact_mut())?; + Ok(collab) } diff --git a/frontend/rust-lib/flowy-user/src/migrations/workspace_and_favorite_v1.rs b/frontend/rust-lib/flowy-user/src/migrations/workspace_and_favorite_v1.rs index b6d5e3e8ff..e15bc5109c 100644 --- a/frontend/rust-lib/flowy-user/src/migrations/workspace_and_favorite_v1.rs +++ b/frontend/rust-lib/flowy-user/src/migrations/workspace_and_favorite_v1.rs @@ -36,9 +36,11 @@ impl UserDataMigration for FavoriteV1AndWorkspaceArrayMigration { ) -> FlowyResult<()> { collab_db.with_write_txn(|write_txn| { if let Ok(collab) = load_collab(session.user_id, write_txn, &session.user_workspace.id) { - let folder = Folder::open(session.user_id, collab, None) + let mut folder = Folder::open(session.user_id, collab, None) .map_err(|err| PersistenceError::Internal(err.into()))?; - folder.migrate_workspace_to_view(); + folder + .body + .migrate_workspace_to_view(&mut folder.collab.transact_mut()); let favorite_view_ids = folder .get_favorite_v1() @@ -51,7 +53,7 @@ impl UserDataMigration for FavoriteV1AndWorkspaceArrayMigration { } let encode = folder - .encode_collab_v1() + .encode_collab() .map_err(|err| PersistenceError::Internal(err.into()))?; write_txn.flush_doc_with( session.user_id, diff --git a/frontend/rust-lib/flowy-user/src/migrations/workspace_trash_v1.rs b/frontend/rust-lib/flowy-user/src/migrations/workspace_trash_v1.rs index e15f2597b4..168c7e2510 100644 --- a/frontend/rust-lib/flowy-user/src/migrations/workspace_trash_v1.rs +++ b/frontend/rust-lib/flowy-user/src/migrations/workspace_trash_v1.rs @@ -34,7 +34,7 @@ impl UserDataMigration for WorkspaceTrashMapToSectionMigration { ) -> FlowyResult<()> { collab_db.with_write_txn(|write_txn| { if let Ok(collab) = load_collab(session.user_id, write_txn, &session.user_workspace.id) { - let folder = Folder::open(session.user_id, collab, None) + let mut folder = Folder::open(session.user_id, collab, None) .map_err(|err| PersistenceError::Internal(err.into()))?; let trash_ids = folder .get_trash_v1() @@ -47,7 +47,7 @@ impl UserDataMigration for WorkspaceTrashMapToSectionMigration { } let encode = folder - .encode_collab_v1() + .encode_collab() .map_err(|err| PersistenceError::Internal(err.into()))?; write_txn.flush_doc_with( session.user_id, diff --git a/frontend/rust-lib/flowy-user/src/services/authenticate_user.rs b/frontend/rust-lib/flowy-user/src/services/authenticate_user.rs index 1df4fda3e2..a0d507a347 100644 --- a/frontend/rust-lib/flowy-user/src/services/authenticate_user.rs +++ b/frontend/rust-lib/flowy-user/src/services/authenticate_user.rs @@ -4,6 +4,9 @@ use crate::services::entities::{UserConfig, UserPaths}; use crate::services::sqlite_sql::user_sql::vacuum_database; use collab_integrate::CollabKVDB; +use arc_swap::ArcSwapOption; +use collab_plugins::local_storage::kv::doc::CollabKVAction; +use collab_plugins::local_storage::kv::KVTransactionDB; use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult}; use flowy_sqlite::kv::KVStorePreferences; use flowy_sqlite::DBConnection; @@ -20,22 +23,22 @@ pub struct AuthenticateUser { pub(crate) database: Arc, pub(crate) user_paths: UserPaths, store_preferences: Arc, - session: Arc>>, + session: ArcSwapOption, } impl AuthenticateUser { pub fn new(user_config: UserConfig, store_preferences: Arc) -> Self { let user_paths = UserPaths::new(user_config.storage_path.clone()); let database = Arc::new(UserDB::new(user_paths.clone())); - let session = Arc::new(parking_lot::RwLock::new(None)); - *session.write() = - migrate_session_with_user_uuid(&user_config.session_cache_key, &store_preferences); + let session = + migrate_session_with_user_uuid(&user_config.session_cache_key, &store_preferences) + .map(Arc::new); Self { user_config, database, user_paths, store_preferences, - session, + session: ArcSwapOption::from(session), } } @@ -67,7 +70,7 @@ impl AuthenticateUser { pub fn workspace_id(&self) -> FlowyResult { let session = self.get_session()?; - Ok(session.user_workspace.id) + Ok(session.user_workspace.id.clone()) } pub fn workspace_database_object_id(&self) -> FlowyResult { @@ -107,49 +110,57 @@ impl AuthenticateUser { Ok(()) } - pub fn set_session(&self, session: Option) -> Result<(), FlowyError> { - match &session { + pub fn is_collab_on_disk(&self, uid: i64, object_id: &str) -> FlowyResult { + let collab_db = self.database.get_collab_db(uid)?; + let read_txn = collab_db.read_txn(); + Ok(read_txn.is_exist(uid, &object_id)) + } + + pub fn set_session(&self, session: Option>) -> Result<(), FlowyError> { + match session { None => { - let removed_session = self.session.write().take(); - info!("remove session: {:?}", removed_session); + let previous = self.session.swap(session); + info!("remove session: {:?}", previous); self .store_preferences .remove(self.user_config.session_cache_key.as_ref()); - Ok(()) }, Some(session) => { + self.session.swap(Some(session.clone())); info!("Set current session: {:?}", session); - self.session.write().replace(session.clone()); self .store_preferences - .set_object(&self.user_config.session_cache_key, session.clone()) + .set_object(&self.user_config.session_cache_key, &session) .map_err(internal_error)?; - Ok(()) }, } + Ok(()) } pub fn set_user_workspace(&self, user_workspace: UserWorkspace) -> FlowyResult<()> { - let mut session = self.get_session()?; - session.user_workspace = user_workspace; - self.set_session(Some(session)) + let session = self.get_session()?; + self.set_session(Some(Arc::new(Session { + user_id: session.user_id, + user_uuid: session.user_uuid, + user_workspace, + }))) } - pub fn get_session(&self) -> FlowyResult { - if let Some(session) = (self.session.read()).clone() { + pub fn get_session(&self) -> FlowyResult> { + if let Some(session) = self.session.load_full() { return Ok(session); } match self .store_preferences - .get_object::(&self.user_config.session_cache_key) + .get_object::>(&self.user_config.session_cache_key) { None => Err(FlowyError::new( ErrorCode::RecordNotFound, "User is not logged in", )), Some(session) => { - self.session.write().replace(session.clone()); + self.session.store(Some(session.clone())); Ok(session) }, } diff --git a/frontend/rust-lib/flowy-user/src/services/cloud_config.rs b/frontend/rust-lib/flowy-user/src/services/cloud_config.rs index 62ab5a5e72..d4b4afc7a8 100644 --- a/frontend/rust-lib/flowy-user/src/services/cloud_config.rs +++ b/frontend/rust-lib/flowy-user/src/services/cloud_config.rs @@ -10,14 +10,14 @@ const CLOUD_CONFIG_KEY: &str = "af_user_cloud_config"; fn generate_cloud_config(uid: i64, store_preference: &Arc) -> UserCloudConfig { let config = UserCloudConfig::new(generate_encryption_secret()); let key = cache_key_for_cloud_config(uid); - store_preference.set_object(&key, config.clone()).unwrap(); + store_preference.set_object(&key, &config).unwrap(); config } pub fn save_cloud_config( uid: i64, store_preference: &Arc, - config: UserCloudConfig, + config: &UserCloudConfig, ) -> FlowyResult<()> { tracing::info!("save user:{} cloud config: {}", uid, config); let key = cache_key_for_cloud_config(uid); diff --git a/frontend/rust-lib/flowy-user/src/services/data_import/appflowy_data_import.rs b/frontend/rust-lib/flowy-user/src/services/data_import/appflowy_data_import.rs index 6f7271658e..f7579d73e6 100644 --- a/frontend/rust-lib/flowy-user/src/services/data_import/appflowy_data_import.rs +++ b/frontend/rust-lib/flowy-user/src/services/data_import/appflowy_data_import.rs @@ -6,11 +6,10 @@ use crate::services::entities::UserPaths; use crate::services::sqlite_sql::user_sql::select_user_profile; use crate::user_manager::run_collab_data_migration; use anyhow::anyhow; -use collab::core::collab::{DataSource, MutexCollab}; +use collab::core::collab::DataSource; use collab::core::origin::CollabOrigin; -use collab::core::transaction::DocTransactionExtension; use collab::preclude::updates::decoder::Decode; -use collab::preclude::{Collab, Doc, Transact, Update}; +use collab::preclude::{Collab, Doc, ReadTxn, StateVector, Transact, Update}; use collab_database::database::{ is_database_collab, mut_database_views_with_collab, reset_inline_view_id, }; @@ -22,6 +21,7 @@ use collab_folder::{Folder, UserId, View, ViewIdentifier, ViewLayout}; use collab_integrate::{CollabKVAction, CollabKVDB, PersistenceError}; use collab_plugins::local_storage::kv::KVTransactionDB; +use collab::preclude::updates::encoder::Encode; use flowy_error::FlowyError; use flowy_folder_pub::cloud::gen_view_id; use flowy_folder_pub::entities::{AppFlowyData, ImportData}; @@ -30,7 +30,6 @@ use flowy_sqlite::kv::KVStorePreferences; use flowy_user_pub::cloud::{UserCloudService, UserCollabParams}; use flowy_user_pub::entities::{user_awareness_object_id, Authenticator}; use flowy_user_pub::session::Session; -use parking_lot::{Mutex, RwLock}; use std::collections::{HashMap, HashSet}; use std::ops::{Deref, DerefMut}; use std::path::Path; @@ -129,9 +128,9 @@ pub(crate) fn generate_import_data( let imported_container_view_name = imported_folder.container_name.clone(); let mut database_view_ids_by_database_id: HashMap> = HashMap::new(); - let row_object_ids = Mutex::new(HashSet::new()); - let document_object_ids = Mutex::new(HashSet::new()); - let database_object_ids = Mutex::new(HashSet::new()); + let mut row_object_ids = HashSet::new(); + let mut document_object_ids = HashSet::new(); + let mut database_object_ids = HashSet::new(); // All the imported views will be attached to the container view. If the container view name is not provided, // the container view will be the workspace, which mean the root of the workspace. @@ -146,7 +145,7 @@ pub(crate) fn generate_import_data( let views = collab_db.with_write_txn(|collab_write_txn| { let imported_collab_read_txn = imported_collab_db.read_txn(); // use the old_to_new_id_map to keep track of the other collab object id and the new collab object id - let old_to_new_id_map = Arc::new(Mutex::new(OldToNewIdMap::new())); + let mut old_to_new_id_map = OldToNewIdMap::new(); // 1. Get all the imported collab object ids let mut all_imported_object_ids = imported_collab_read_txn @@ -171,17 +170,17 @@ pub(crate) fn generate_import_data( ImportedSource::ExternalFolder => { // 2. mapping the database indexer ids mapping_database_indexer_ids( - &mut old_to_new_id_map.lock(), + &mut old_to_new_id_map, &imported_session, &imported_collab_read_txn, &mut database_view_ids_by_database_id, - &database_object_ids, + &mut database_object_ids, )?; }, ImportedSource::AnonUser => { // 2. migrate the database with views object migrate_database_with_views_object( - &mut old_to_new_id_map.lock(), + &mut old_to_new_id_map, &imported_session, &imported_collab_read_txn, current_session, @@ -200,7 +199,7 @@ pub(crate) fn generate_import_data( all_imported_object_ids.retain(|id| !database_view_ids.contains(id)); // 3. load imported collab objects data. - let imported_collab_by_oid = load_collab_by_object_ids( + let mut imported_collab_by_oid = load_collab_by_object_ids( imported_session.user_id, &imported_collab_read_txn, &all_imported_object_ids, @@ -208,19 +207,19 @@ pub(crate) fn generate_import_data( // import the database migrate_databases( - &old_to_new_id_map, + &mut old_to_new_id_map, current_session, collab_write_txn, &mut all_imported_object_ids, - &imported_collab_by_oid, - &row_object_ids, + &mut imported_collab_by_oid, + &mut row_object_ids, )?; // the object ids now only contains the document collab object ids for object_id in &all_imported_object_ids { if let Some(imported_collab) = imported_collab_by_oid.get(object_id) { - let new_object_id = old_to_new_id_map.lock().exchange_new_id(object_id); - document_object_ids.lock().insert(new_object_id.clone()); + let new_object_id = old_to_new_id_map.exchange_new_id(object_id); + document_object_ids.insert(new_object_id.clone()); debug!("import from: {}, to: {}", object_id, new_object_id,); write_collab_object( imported_collab, @@ -236,7 +235,7 @@ pub(crate) fn generate_import_data( // structure is correctly maintained. let (mut child_views, orphan_views) = mapping_folder_views( &import_container_view_id, - &mut old_to_new_id_map.lock(), + &mut old_to_new_id_map, &imported_session, &imported_collab_read_txn, )?; @@ -251,7 +250,7 @@ pub(crate) fn generate_import_data( // create a new view with given name and then attach views to it attach_to_new_view( current_session, - &document_object_ids, + &mut document_object_ids, &import_container_view_id, collab_write_txn, child_views, @@ -274,16 +273,16 @@ pub(crate) fn generate_import_data( database_view_ids_by_database_id, }, AppFlowyData::CollabObject { - row_object_ids: row_object_ids.into_inner().into_iter().collect(), - database_object_ids: database_object_ids.into_inner().into_iter().collect(), - document_object_ids: document_object_ids.into_inner().into_iter().collect(), + row_object_ids: row_object_ids.into_iter().collect(), + database_object_ids: database_object_ids.into_iter().collect(), + document_object_ids: document_object_ids.into_iter().collect(), }, ], }) } fn attach_to_new_view<'a, W>( current_session: &Session, - document_object_ids: &Mutex>, + document_object_ids: &mut HashSet, import_container_view_id: &str, collab_write_txn: &'a W, child_views: Vec, @@ -315,9 +314,7 @@ where collab_write_txn, )?; - document_object_ids - .lock() - .insert(import_container_view_id.to_string()); + document_object_ids.insert(import_container_view_id.to_string()); let mut import_container_views = vec![ViewBuilder::new( current_session.user_id, current_session.user_workspace.id.clone(), @@ -337,29 +334,27 @@ fn mapping_database_indexer_ids<'a, W>( imported_session: &Session, imported_collab_read_txn: &W, database_view_ids_by_database_id: &mut HashMap>, - database_object_ids: &Mutex>, + database_object_ids: &mut HashSet, ) -> Result<(), PersistenceError> where W: CollabKVAction<'a>, PersistenceError: From, { - let imported_database_indexer = Collab::new( + let mut imported_database_indexer = Collab::new( imported_session.user_id, &imported_session.user_workspace.database_indexer_id, "import_device", vec![], false, ); - imported_database_indexer.with_origin_transact_mut(|txn| { - imported_collab_read_txn.load_doc_with_txn( - imported_session.user_id, - &imported_session.user_workspace.database_indexer_id, - txn, - ) - })?; + imported_collab_read_txn.load_doc_with_txn( + imported_session.user_id, + &imported_session.user_workspace.database_indexer_id, + &mut imported_database_indexer.transact_mut(), + )?; - let array = DatabaseMetaList::from_collab(&imported_database_indexer); - for database_meta_list in array.get_all_database_meta() { + let array = DatabaseMetaList::new(&mut imported_database_indexer); + for database_meta_list in array.get_all_database_meta(&imported_database_indexer.transact()) { database_view_ids_by_database_id.insert( old_to_new_id_map.exchange_new_id(&database_meta_list.database_id), database_meta_list @@ -369,7 +364,7 @@ where .collect(), ); } - database_object_ids.lock().extend( + database_object_ids.extend( database_view_ids_by_database_id .keys() .cloned() @@ -392,27 +387,26 @@ where PersistenceError: From, PersistenceError: From, { - let database_with_views_collab = Collab::new( + let mut database_with_views_collab = Collab::new( old_user_session.user_id, &old_user_session.user_workspace.database_indexer_id, "migrate_device", vec![], false, ); - database_with_views_collab.with_origin_transact_mut(|txn| { - old_collab_r_txn.load_doc_with_txn( - old_user_session.user_id, - &old_user_session.user_workspace.database_indexer_id, - txn, - ) - })?; + old_collab_r_txn.load_doc_with_txn( + old_user_session.user_id, + &old_user_session.user_workspace.database_indexer_id, + &mut database_with_views_collab.transact_mut(), + )?; let new_uid = new_user_session.user_id; let new_object_id = &new_user_session.user_workspace.database_indexer_id; - let array = DatabaseMetaList::from_collab(&database_with_views_collab); - for database_meta in array.get_all_database_meta() { - array.update_database(&database_meta.database_id, |update| { + let array = DatabaseMetaList::new(&mut database_with_views_collab); + let mut txn = database_with_views_collab.transact_mut(); + for database_meta in array.get_all_database_meta(&txn) { + array.update_database(&mut txn, &database_meta.database_id, |update| { let new_linked_views = update .linked_views .iter() @@ -423,7 +417,6 @@ where }) } - let txn = database_with_views_collab.transact(); if let Err(err) = new_collab_w_txn.create_new_doc(new_uid, new_object_id, &txn) { error!("🔴migrate database storage failed: {:?}", err); } @@ -432,61 +425,53 @@ where } fn migrate_databases<'a, W>( - old_to_new_id_map: &Arc>, + old_to_new_id_map: &mut OldToNewIdMap, session: &Session, collab_write_txn: &'a W, imported_object_ids: &mut Vec, - imported_collab_by_oid: &HashMap, - row_object_ids: &Mutex>, + imported_collab_by_oid: &mut HashMap, + row_object_ids: &mut HashSet, ) -> Result<(), PersistenceError> where W: CollabKVAction<'a>, PersistenceError: From, { // Migrate databases - let row_document_object_ids = Mutex::new(HashSet::new()); + let mut row_document_object_ids = HashSet::new(); let mut database_object_ids = vec![]; - let imported_database_row_object_ids: RwLock>> = - RwLock::new(HashMap::new()); + let mut imported_database_row_object_ids: HashMap> = HashMap::new(); - for object_id in &mut *imported_object_ids { - if let Some(database_collab) = imported_collab_by_oid.get(object_id) { + for object_id in imported_object_ids.iter() { + if let Some(database_collab) = imported_collab_by_oid.get_mut(object_id) { if !is_database_collab(database_collab) { continue; } database_object_ids.push(object_id.clone()); reset_inline_view_id(database_collab, |old_inline_view_id| { - old_to_new_id_map - .lock() - .exchange_new_id(&old_inline_view_id) + old_to_new_id_map.exchange_new_id(&old_inline_view_id) }); mut_database_views_with_collab(database_collab, |database_view| { - let new_view_id = old_to_new_id_map.lock().exchange_new_id(&database_view.id); + let new_view_id = old_to_new_id_map.exchange_new_id(&database_view.id); let old_database_id = database_view.database_id.clone(); - let new_database_id = old_to_new_id_map - .lock() - .exchange_new_id(&database_view.database_id); + let new_database_id = old_to_new_id_map.exchange_new_id(&database_view.database_id); database_view.id = new_view_id; database_view.database_id = new_database_id; database_view.row_orders.iter_mut().for_each(|row_order| { let old_row_id = String::from(row_order.id.clone()); let old_row_document_id = database_row_document_id_from_row_id(&old_row_id); - let new_row_id = old_to_new_id_map.lock().exchange_new_id(&old_row_id); + let new_row_id = old_to_new_id_map.exchange_new_id(&old_row_id); // The row document might not exist in the database row. But by querying the old_row_document_id, // we can know the document of the row is exist or not. let new_row_document_id = database_row_document_id_from_row_id(&new_row_id); - old_to_new_id_map - .lock() - .insert(old_row_document_id.clone(), new_row_document_id); + old_to_new_id_map.insert(old_row_document_id.clone(), new_row_document_id); row_order.id = RowId::from(new_row_id); imported_database_row_object_ids - .write() .entry(old_database_id.clone()) .or_default() .insert(old_row_id); @@ -498,10 +483,10 @@ where .iter() .map(|order| order.id.clone().into_inner()) .collect::>(); - row_object_ids.lock().extend(new_row_ids); + row_object_ids.extend(new_row_ids); }); - let new_object_id = old_to_new_id_map.lock().exchange_new_id(object_id); + let new_object_id = old_to_new_id_map.exchange_new_id(object_id); debug!( "migrate database from: {}, to: {}", object_id, new_object_id, @@ -514,7 +499,6 @@ where ); } } - let imported_database_row_object_ids = imported_database_row_object_ids.read(); // remove the database object ids from the object ids imported_object_ids.retain(|id| !database_object_ids.contains(id)); @@ -527,11 +511,11 @@ where .any(|row_id| row_id == id) }); - for (database_id, imported_row_ids) in &*imported_database_row_object_ids { + for (database_id, imported_row_ids) in imported_database_row_object_ids { for imported_row_id in imported_row_ids { - if let Some(imported_collab) = imported_collab_by_oid.get(imported_row_id) { - let new_database_id = old_to_new_id_map.lock().exchange_new_id(database_id); - let new_row_id = old_to_new_id_map.lock().exchange_new_id(imported_row_id); + if let Some(imported_collab) = imported_collab_by_oid.get_mut(&imported_row_id) { + let new_database_id = old_to_new_id_map.exchange_new_id(&database_id); + let new_row_id = old_to_new_id_map.exchange_new_id(&imported_row_id); info!( "import database row from: {}, to: {}", imported_row_id, new_row_id, @@ -550,25 +534,20 @@ where // imported_collab_by_oid contains all the collab object ids, including the row document collab object ids. // So, if the id exist in the imported_collab_by_oid, it means the row document collab object is exist. - let imported_row_document_id = database_row_document_id_from_row_id(imported_row_id); + let imported_row_document_id = database_row_document_id_from_row_id(&imported_row_id); if imported_collab_by_oid .get(&imported_row_document_id) .is_some() { - let new_row_document_id = old_to_new_id_map - .lock() - .exchange_new_id(&imported_row_document_id); - row_document_object_ids.lock().insert(new_row_document_id); + let new_row_document_id = old_to_new_id_map.exchange_new_id(&imported_row_document_id); + row_document_object_ids.insert(new_row_document_id); } } } debug!( "import row document ids: {:?}", - row_document_object_ids - .lock() - .iter() - .collect::>() + row_document_object_ids.iter().collect::>() ); Ok(()) @@ -588,18 +567,17 @@ where drop(txn); } - let encoded_collab = doc.get_encoded_collab_v1(); + let txn = doc.transact(); + let state_vector = txn.state_vector(); + let doc_state = txn.encode_state_as_update_v1(&StateVector::default()); info!( "import collab:{} with len: {}", new_object_id, - encoded_collab.doc_state.len() + doc_state.len() ); - if let Err(err) = w_txn.flush_doc( - new_uid, - &new_object_id, - encoded_collab.state_vector.to_vec(), - encoded_collab.doc_state.to_vec(), - ) { + if let Err(err) = + w_txn.flush_doc(new_uid, &new_object_id, state_vector.encode_v1(), doc_state) + { error!("import collab:{} failed: {:?}", new_object_id, err); } } @@ -639,27 +617,21 @@ where W: CollabKVAction<'a>, PersistenceError: From, { - let imported_folder_collab = Collab::new( + let mut imported_folder_collab = Collab::new( imported_session.user_id, &imported_session.user_workspace.id, "migrate_device", vec![], false, ); - imported_folder_collab.with_origin_transact_mut(|txn| { - imported_collab_read_txn.load_doc_with_txn( - imported_session.user_id, - &imported_session.user_workspace.id, - txn, - ) - })?; + imported_collab_read_txn.load_doc_with_txn( + imported_session.user_id, + &imported_session.user_workspace.id, + &mut imported_folder_collab.transact_mut(), + )?; let other_user_id = UserId::from(imported_session.user_id); - let imported_folder = Folder::open( - other_user_id, - Arc::new(MutexCollab::new(imported_folder_collab)), - None, - ) - .map_err(|err| PersistenceError::InvalidData(err.to_string()))?; + let imported_folder = Folder::open(other_user_id, imported_folder_collab, None) + .map_err(|err| PersistenceError::InvalidData(err.to_string()))?; let imported_folder_data = imported_folder .get_folder_data(&imported_session.user_workspace.id) diff --git a/frontend/rust-lib/flowy-user/src/services/data_import/importer.rs b/frontend/rust-lib/flowy-user/src/services/data_import/importer.rs index 47d7167fb4..be2712827f 100644 --- a/frontend/rust-lib/flowy-user/src/services/data_import/importer.rs +++ b/frontend/rust-lib/flowy-user/src/services/data_import/importer.rs @@ -39,8 +39,7 @@ where R: CollabKVAction<'a>, PersistenceError: From, { - let collab = Collab::new(uid, object_id, "phantom", vec![], false); - collab - .with_origin_transact_mut(|txn| collab_read_txn.load_doc_with_txn(uid, object_id, txn)) - .map(|_| collab) + let mut collab = Collab::new(uid, object_id, "phantom", vec![], false); + collab_read_txn.load_doc_with_txn(uid, object_id, &mut collab.transact_mut())?; + Ok(collab) } diff --git a/frontend/rust-lib/flowy-user/src/services/db.rs b/frontend/rust-lib/flowy-user/src/services/db.rs index 3305fca41a..f16d242a96 100644 --- a/frontend/rust-lib/flowy-user/src/services/db.rs +++ b/frontend/rust-lib/flowy-user/src/services/db.rs @@ -1,9 +1,11 @@ use std::path::{Path, PathBuf}; -use std::{collections::HashMap, fs, io, sync::Arc, time::Duration}; +use std::{fs, io, sync::Arc}; use chrono::{Days, Local}; use collab_integrate::{CollabKVAction, CollabKVDB, PersistenceError}; use collab_plugins::local_storage::kv::KVTransactionDB; +use dashmap::mapref::entry::Entry; +use dashmap::DashMap; use flowy_error::FlowyError; use flowy_sqlite::schema::user_workspace_table; use flowy_sqlite::ConnectionPool; @@ -15,7 +17,6 @@ use flowy_sqlite::{ use flowy_user_pub::entities::{UserProfile, UserWorkspace}; use lib_dispatch::prelude::af_spawn; use lib_infra::file_util::{unzip_and_replace, zip_folder}; -use parking_lot::RwLock; use tracing::{error, event, info, instrument}; use crate::services::sqlite_sql::user_sql::UserTable; @@ -29,8 +30,8 @@ pub trait UserDBPath: Send + Sync + 'static { pub struct UserDB { paths: Box, - sqlite_map: RwLock>, - collab_db_map: RwLock>>, + sqlite_map: DashMap, + collab_db_map: DashMap>, } impl UserDB { @@ -112,18 +113,14 @@ impl UserDB { /// Close the database connection for the user. pub(crate) fn close(&self, user_id: i64) -> Result<(), FlowyError> { - if let Some(mut sqlite_dbs) = self.sqlite_map.try_write_for(Duration::from_millis(300)) { - if sqlite_dbs.remove(&user_id).is_some() { - tracing::trace!("close sqlite db for user {}", user_id); - } + if self.sqlite_map.remove(&user_id).is_some() { + tracing::trace!("close sqlite db for user {}", user_id); } - if let Some(mut collab_dbs) = self.collab_db_map.try_write_for(Duration::from_millis(300)) { - if let Some(db) = collab_dbs.remove(&user_id) { - tracing::trace!("close collab db for user {}", user_id); - let _ = db.flush(); - drop(db); - } + if let Some((_, db)) = self.collab_db_map.remove(&user_id) { + tracing::trace!("close collab db for user {}", user_id); + let _ = db.flush(); + drop(db); } Ok(()) } @@ -148,18 +145,18 @@ impl UserDB { db_path: impl AsRef, user_id: i64, ) -> Result, FlowyError> { - if let Some(database) = self.sqlite_map.read().get(&user_id) { - return Ok(database.get_pool()); + match self.sqlite_map.entry(user_id) { + Entry::Occupied(e) => Ok(e.get().get_pool()), + Entry::Vacant(e) => { + tracing::debug!("open sqlite db {} at path: {:?}", user_id, db_path.as_ref()); + let db = flowy_sqlite::init(&db_path).map_err(|e| { + FlowyError::internal().with_context(format!("open user db failed, {:?}", e)) + })?; + let pool = db.get_pool(); + e.insert(db); + Ok(pool) + }, } - - let mut write_guard = self.sqlite_map.write(); - tracing::debug!("open sqlite db {} at path: {:?}", user_id, db_path.as_ref()); - let db = flowy_sqlite::init(&db_path) - .map_err(|e| FlowyError::internal().with_context(format!("open user db failed, {:?}", e)))?; - let pool = db.get_pool(); - write_guard.insert(user_id.to_owned(), db); - drop(write_guard); - Ok(pool) } pub fn get_user_profile( @@ -195,28 +192,27 @@ impl UserDB { collab_db_path: impl AsRef, uid: i64, ) -> Result, PersistenceError> { - if let Some(collab_db) = self.collab_db_map.read().get(&uid) { - return Ok(collab_db.clone()); - } + match self.collab_db_map.entry(uid) { + Entry::Occupied(e) => Ok(e.get().clone()), + Entry::Vacant(e) => { + info!( + "open collab db for user {} at path: {:?}", + uid, + collab_db_path.as_ref() + ); + let db = match CollabKVDB::open(&collab_db_path) { + Ok(db) => Ok(db), + Err(err) => { + error!("open collab db error, {:?}", err); + Err(err) + }, + }?; - let mut write_guard = self.collab_db_map.write(); - info!( - "open collab db for user {} at path: {:?}", - uid, - collab_db_path.as_ref() - ); - let db = match CollabKVDB::open(&collab_db_path) { - Ok(db) => Ok(db), - Err(err) => { - error!("open collab db error, {:?}", err); - Err(err) + let db = Arc::new(db); + e.insert(db.clone()); + Ok(db) }, - }?; - - let db = Arc::new(db); - write_guard.insert(uid.to_owned(), db.clone()); - drop(write_guard); - Ok(db) + } } } diff --git a/frontend/rust-lib/flowy-user/src/user_manager/manager.rs b/frontend/rust-lib/flowy-user/src/user_manager/manager.rs index 8d8cb151fd..78443360aa 100644 --- a/frontend/rust-lib/flowy-user/src/user_manager/manager.rs +++ b/frontend/rust-lib/flowy-user/src/user_manager/manager.rs @@ -1,8 +1,10 @@ use collab_integrate::collab_builder::AppFlowyCollabBuilder; use collab_integrate::CollabKVDB; -use collab_user::core::MutexUserAwareness; use flowy_error::{internal_error, ErrorCode, FlowyResult}; +use arc_swap::ArcSwapOption; +use collab_user::core::UserAwareness; +use dashmap::DashMap; use flowy_server_pub::AuthenticatorType; use flowy_sqlite::kv::KVStorePreferences; use flowy_sqlite::schema::user_table; @@ -14,7 +16,7 @@ use flowy_user_pub::workspace_service::UserWorkspaceService; use semver::Version; use serde_json::Value; use std::string::ToString; -use std::sync::atomic::{AtomicBool, AtomicI64, Ordering}; +use std::sync::atomic::{AtomicI64, Ordering}; use std::sync::{Arc, Weak}; use tokio::sync::{Mutex, RwLock}; use tokio_stream::StreamExt; @@ -23,7 +25,6 @@ use tracing::{debug, error, event, info, instrument, trace, warn}; use lib_dispatch::prelude::af_spawn; use lib_infra::box_any::BoxAny; -use crate::anon_user::{migration_anon_user_on_sign_up, sync_supabase_user_data_to_cloud}; use crate::entities::{AuthStateChangedPB, AuthStatePB, UserProfilePB, UserSettingPB}; use crate::event_map::{DefaultUserStatusCallback, UserStatusCallback}; use crate::migrations::document_empty_content::HistoricalEmptyDocumentMigration; @@ -49,7 +50,7 @@ use super::manager_user_workspace::save_user_workspace; pub struct UserManager { pub(crate) cloud_services: Arc, pub(crate) store_preferences: Arc, - pub(crate) user_awareness: Arc>>, + pub(crate) user_awareness: Arc>>, pub(crate) user_status_callback: RwLock>, pub(crate) collab_builder: Weak, pub(crate) collab_interact: RwLock>, @@ -57,7 +58,7 @@ pub struct UserManager { auth_process: Mutex>, pub(crate) authenticate_user: Arc, refresh_user_profile_since: AtomicI64, - pub(crate) is_loading_awareness: Arc, + pub(crate) is_loading_awareness: Arc>, } impl UserManager { @@ -75,7 +76,7 @@ impl UserManager { let user_manager = Arc::new(Self { cloud_services, store_preferences, - user_awareness: Arc::new(Default::default()), + user_awareness: Default::default(), user_status_callback, collab_builder, collab_interact: RwLock::new(Arc::new(DefaultCollabInteract)), @@ -83,7 +84,7 @@ impl UserManager { authenticate_user, refresh_user_profile_since, user_workspace_service, - is_loading_awareness: Arc::new(AtomicBool::new(false)), + is_loading_awareness: Arc::new(Default::default()), }); let weak_user_manager = Arc::downgrade(&user_manager); @@ -267,8 +268,10 @@ impl UserManager { } self.authenticate_user.vacuum_database_if_need(); let cloud_config = get_cloud_config(session.user_id, &self.store_preferences); - // Init the user awareness - self.initialize_user_awareness(&session).await; + // Init the user awareness. here we ignore the error + let _ = self + .initial_user_awareness(&session, &user.authenticator) + .await; user_status_callback .did_init( @@ -283,7 +286,7 @@ impl UserManager { Ok(()) } - pub fn get_session(&self) -> FlowyResult { + pub fn get_session(&self) -> FlowyResult> { self.authenticate_user.get_session() } @@ -338,7 +341,9 @@ impl UserManager { .save_auth_data(&response, &authenticator, &session) .await?; - let _ = self.initialize_user_awareness(&session).await; + let _ = self + .initial_user_awareness(&session, &user_profile.authenticator) + .await; self .user_status_callback .read() @@ -426,7 +431,9 @@ impl UserManager { self .save_auth_data(&response, authenticator, &new_session) .await?; - let _ = self.try_initial_user_awareness(&new_session).await; + let _ = self + .initial_user_awareness(&new_session, &new_user_profile.authenticator) + .await; self .user_status_callback .read() @@ -617,7 +624,8 @@ impl UserManager { } pub fn workspace_id(&self) -> Result { - Ok(self.get_session()?.user_workspace.id) + let session = self.get_session()?; + Ok(session.user_workspace.id.clone()) } pub fn token(&self) -> Result, FlowyError> { @@ -714,7 +722,7 @@ impl UserManager { let uid = user_profile.uid; if authenticator.is_local() { event!(tracing::Level::DEBUG, "Save new anon user: {:?}", uid); - self.set_anon_user(session.clone()); + self.set_anon_user(session); } save_all_user_workspaces(uid, self.db_connection(uid)?, response.user_workspaces())?; @@ -723,7 +731,9 @@ impl UserManager { authenticator ); - self.authenticate_user.set_session(Some(session.clone()))?; + self + .authenticate_user + .set_session(Some(session.clone().into()))?; self .save_user(uid, (user_profile, authenticator.clone()).into()) .await?; @@ -753,38 +763,18 @@ impl UserManager { async fn migrate_anon_user_data_to_cloud( &self, old_user: &AnonUser, - new_user_session: &Session, + _new_user_session: &Session, authenticator: &Authenticator, ) -> Result<(), FlowyError> { let old_collab_db = self .authenticate_user .database .get_collab_db(old_user.session.user_id)?; - let new_collab_db = self - .authenticate_user - .database - .get_collab_db(new_user_session.user_id)?; - match authenticator { - Authenticator::Supabase => { - migration_anon_user_on_sign_up(old_user, &old_collab_db, new_user_session, &new_collab_db)?; - if let Err(err) = sync_supabase_user_data_to_cloud( - self.cloud_services.get_user_service()?, - &self.authenticate_user.user_config.device_id, - new_user_session, - &new_collab_db, - ) - .await - { - error!("Sync user data to cloud failed: {:?}", err); - } - }, - Authenticator::AppFlowyCloud => { - self - .migration_anon_user_on_appflowy_cloud_sign_up(old_user, &old_collab_db) - .await?; - }, - _ => {}, + if authenticator == &Authenticator::AppFlowyCloud { + self + .migration_anon_user_on_appflowy_cloud_sign_up(old_user, &old_collab_db) + .await?; } // Save the old user workspace setting. @@ -803,7 +793,6 @@ impl UserManager { fn current_authenticator() -> Authenticator { match AuthenticatorType::from_env() { AuthenticatorType::Local => Authenticator::Local, - AuthenticatorType::Supabase => Authenticator::Supabase, AuthenticatorType::AppFlowyCloud => Authenticator::AppFlowyCloud, } } diff --git a/frontend/rust-lib/flowy-user/src/user_manager/manager_history_user.rs b/frontend/rust-lib/flowy-user/src/user_manager/manager_history_user.rs index 251a77bd98..8d20bae427 100644 --- a/frontend/rust-lib/flowy-user/src/user_manager/manager_history_user.rs +++ b/frontend/rust-lib/flowy-user/src/user_manager/manager_history_user.rs @@ -1,3 +1,4 @@ +use std::sync::Arc; use tracing::instrument; use crate::entities::UserProfilePB; @@ -33,7 +34,7 @@ impl UserManager { } } - pub fn set_anon_user(&self, session: Session) { + pub fn set_anon_user(&self, session: &Session) { let _ = self.store_preferences.set_object(ANON_USER, session); } @@ -63,7 +64,7 @@ impl UserManager { pub async fn open_anon_user(&self) -> FlowyResult<()> { let anon_session = self .store_preferences - .get_object::(ANON_USER) + .get_object::>(ANON_USER) .ok_or(FlowyError::new( ErrorCode::RecordNotFound, "Anon user not found", diff --git a/frontend/rust-lib/flowy-user/src/user_manager/manager_user_awareness.rs b/frontend/rust-lib/flowy-user/src/user_manager/manager_user_awareness.rs index ec6dab5499..224c91467a 100644 --- a/frontend/rust-lib/flowy-user/src/user_manager/manager_user_awareness.rs +++ b/frontend/rust-lib/flowy-user/src/user_manager/manager_user_awareness.rs @@ -1,17 +1,20 @@ -use std::sync::atomic::Ordering; use std::sync::{Arc, Weak}; use anyhow::Context; -use collab::core::collab::{DataSource, MutexCollab}; +use collab::core::collab::DataSource; use collab_entity::reminder::Reminder; use collab_entity::CollabType; -use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig}; -use collab_user::core::{MutexUserAwareness, UserAwareness}; -use tracing::{debug, error, info, instrument, trace}; +use collab_integrate::collab_builder::{ + AppFlowyCollabBuilder, CollabBuilderConfig, KVDBCollabPersistenceImpl, +}; +use collab_user::core::{UserAwareness, UserAwarenessNotifier}; +use dashmap::try_result::TryResult; +use tokio::sync::RwLock; +use tracing::{error, info, instrument, trace}; use collab_integrate::CollabKVDB; use flowy_error::{ErrorCode, FlowyError, FlowyResult}; -use flowy_user_pub::entities::user_awareness_object_id; +use flowy_user_pub::entities::{user_awareness_object_id, Authenticator}; use crate::entities::ReminderPB; use crate::user_manager::UserManager; @@ -34,10 +37,10 @@ impl UserManager { pub async fn add_reminder(&self, reminder_pb: ReminderPB) -> FlowyResult<()> { let reminder = Reminder::from(reminder_pb); self - .with_awareness((), |user_awareness| { + .mut_awareness(|user_awareness| { user_awareness.add_reminder(reminder.clone()); }) - .await; + .await?; self .collab_interact .read() @@ -51,10 +54,10 @@ impl UserManager { /// pub async fn remove_reminder(&self, reminder_id: &str) -> FlowyResult<()> { self - .with_awareness((), |user_awareness| { + .mut_awareness(|user_awareness| { user_awareness.remove_reminder(reminder_id); }) - .await; + .await?; self .collab_interact .read() @@ -69,12 +72,12 @@ impl UserManager { pub async fn update_reminder(&self, reminder_pb: ReminderPB) -> FlowyResult<()> { let reminder = Reminder::from(reminder_pb); self - .with_awareness((), |user_awareness| { + .mut_awareness(|user_awareness| { user_awareness.update_reminder(&reminder.id, |new_reminder| { new_reminder.clone_from(&reminder) }); }) - .await; + .await?; self .collab_interact .read() @@ -95,117 +98,203 @@ impl UserManager { /// - Returns a vector of `Reminder` objects containing all reminders for the user. /// pub async fn get_all_reminders(&self) -> Vec { - self - .with_awareness(vec![], |user_awareness| user_awareness.get_all_reminders()) - .await + let reminders = self + .mut_awareness(|user_awareness| user_awareness.get_all_reminders()) + .await; + reminders.unwrap_or_default() } - pub async fn initialize_user_awareness(&self, session: &Session) { - match self.try_initial_user_awareness(session).await { - Ok(_) => {}, - Err(e) => error!("Failed to initialize user awareness: {:?}", e), - } - } - - /// Initializes the user's awareness based on the specified data source. - /// - /// This asynchronous function attempts to initialize the user's awareness from either a local or remote data source. - /// Depending on the chosen source, it will either construct the user awareness from an empty dataset or fetch it - /// from a remote service. Once obtained, the user's awareness is stored in a shared mutex-protected structure. - /// - /// # Parameters - /// - `session`: The current user's session data. - /// - `source`: The source from which the user's awareness data should be obtained, either local or remote. - /// - /// # Returns - /// - Returns `Ok(())` if the user's awareness is successfully initialized. - /// - May return errors of type `FlowyError` if any issues arise during the initialization. + /// Init UserAwareness for user + /// 1. check if user awareness exists on disk. If yes init awareness from disk + /// 2. If not, init awareness from server. #[instrument(level = "info", skip(self, session), err)] - pub(crate) async fn try_initial_user_awareness(&self, session: &Session) -> FlowyResult<()> { - if self.is_loading_awareness.load(Ordering::SeqCst) { - return Ok(()); - } - self.is_loading_awareness.store(true, Ordering::SeqCst); - - if let Some(old_user_awareness) = self.user_awareness.lock().await.take() { - debug!("Closing old user awareness"); - old_user_awareness.lock().close(); - drop(old_user_awareness); - } - + pub(crate) async fn initial_user_awareness( + &self, + session: &Session, + authenticator: &Authenticator, + ) -> FlowyResult<()> { + let authenticator = authenticator.clone(); let object_id = user_awareness_object_id(&session.user_uuid, &session.user_workspace.id).to_string(); - trace!("Initializing user awareness {}", object_id); - let collab_db = self.get_collab_db(session.user_id)?; - let weak_cloud_services = Arc::downgrade(&self.cloud_services); - let weak_user_awareness = Arc::downgrade(&self.user_awareness); - let weak_builder = self.collab_builder.clone(); - let cloned_is_loading = self.is_loading_awareness.clone(); - let session = session.clone(); - let workspace_id = session.user_workspace.id.clone(); - tokio::spawn(async move { - if cloned_is_loading.load(Ordering::SeqCst) { - return Ok(()); + + // Try to acquire mutable access to `is_loading_awareness`. + // Thread-safety is ensured by DashMap + let should_init = match self.is_loading_awareness.try_get_mut(&object_id) { + TryResult::Present(mut is_loading) => { + if *is_loading { + false + } else { + *is_loading = true; + true + } + }, + TryResult::Absent => true, + TryResult::Locked => { + return Err(FlowyError::new( + ErrorCode::Internal, + format!( + "Failed to lock is_loading_awareness for object: {}", + object_id + ), + )); + }, + }; + + if should_init { + if let Some(old_user_awareness) = self.user_awareness.swap(None) { + info!("Closing previous user awareness"); + old_user_awareness.read().await.close(); // Ensure that old awareness is closed } - if let (Some(cloud_services), Some(user_awareness)) = - (weak_cloud_services.upgrade(), weak_user_awareness.upgrade()) - { + let is_exist_on_disk = self + .authenticate_user + .is_collab_on_disk(session.user_id, &object_id)?; + if authenticator.is_local() || is_exist_on_disk { + trace!( + "Initializing new user awareness from disk:{}, {:?}", + object_id, + authenticator + ); + let collab_db = self.get_collab_db(session.user_id)?; + let doc_state = + KVDBCollabPersistenceImpl::new(collab_db.clone(), session.user_id).into_data_source(); + let awareness = Self::collab_for_user_awareness( + &self.collab_builder.clone(), + &session.user_workspace.id, + session.user_id, + &object_id, + collab_db, + doc_state, + None, + )?; + info!("User awareness initialized successfully"); + self.user_awareness.store(Some(awareness)); + if let Some(mut is_loading) = self.is_loading_awareness.get_mut(&object_id) { + *is_loading = false; + } + } else { + info!( + "Initializing new user awareness from server:{}, {:?}", + object_id, authenticator + ); + self.load_awareness_from_server(session, object_id, authenticator.clone())?; + } + } else { + return Err(FlowyError::new( + ErrorCode::Internal, + format!( + "User awareness is already being loaded for object: {}", + object_id + ), + )); + } + + Ok(()) + } + + /// Initialize UserAwareness from server. + /// It will spawn a task in the background in order to no block the caller. This functions is + /// designed to be thread safe. + fn load_awareness_from_server( + &self, + session: &Session, + object_id: String, + authenticator: Authenticator, + ) -> FlowyResult<()> { + // Clone necessary data + let session = session.clone(); + let collab_db = self.get_collab_db(session.user_id)?; + let weak_builder = self.collab_builder.clone(); + let user_awareness = Arc::downgrade(&self.user_awareness); + let cloud_services = self.cloud_services.clone(); + let authenticate_user = self.authenticate_user.clone(); + let is_loading_awareness = self.is_loading_awareness.clone(); + + // Spawn an async task to fetch or create user awareness + tokio::spawn(async move { + let set_is_loading_false = || { + if let Some(mut is_loading) = is_loading_awareness.get_mut(&object_id) { + *is_loading = false; + } + }; + + let create_awareness = if authenticator.is_local() { + let doc_state = + KVDBCollabPersistenceImpl::new(collab_db.clone(), session.user_id).into_data_source(); + Self::collab_for_user_awareness( + &weak_builder, + &session.user_workspace.id, + session.user_id, + &object_id, + collab_db, + doc_state, + None, + ) + } else { let result = cloud_services .get_user_service()? .get_user_awareness_doc_state(session.user_id, &session.user_workspace.id, &object_id) .await; - let mut lock_awareness = user_awareness - .try_lock() - .map_err(|err| FlowyError::internal().with_context(err))?; - if lock_awareness.is_some() { - return Ok(()); - } - - let awareness = match result { + match result { Ok(data) => { - trace!("Get user awareness collab from remote: {}", data.len()); - let collab = Self::collab_for_user_awareness( - &workspace_id, + trace!("Fetched user awareness collab from remote: {}", data.len()); + Self::collab_for_user_awareness( &weak_builder, + &session.user_workspace.id, session.user_id, &object_id, collab_db, DataSource::DocStateV1(data), + None, ) - .await?; - MutexUserAwareness::new(UserAwareness::create(collab, None)) }, Err(err) => { if err.is_record_not_found() { info!("User awareness not found, creating new"); - let collab = Self::collab_for_user_awareness( - &workspace_id, + let doc_state = KVDBCollabPersistenceImpl::new(collab_db.clone(), session.user_id) + .into_data_source(); + Self::collab_for_user_awareness( &weak_builder, + &session.user_workspace.id, session.user_id, &object_id, collab_db, - DataSource::Disk, + doc_state, + None, ) - .await?; - MutexUserAwareness::new(UserAwareness::create(collab, None)) } else { - error!("Failed to fetch user awareness: {:?}", err); - return Err(err); + Err(err) } }, - }; + } + }; - trace!("User awareness initialized"); - lock_awareness.replace(awareness); + match create_awareness { + Ok(new_user_awareness) => { + // Validate session before storing the awareness + if let Ok(current_session) = authenticate_user.get_session() { + if current_session.user_workspace.id == session.user_workspace.id { + if let Some(user_awareness) = user_awareness.upgrade() { + info!("User awareness initialized successfully"); + user_awareness.store(Some(new_user_awareness)); + } else { + error!("Failed to upgrade user awareness"); + } + } else { + info!("User awareness is outdated, ignoring"); + } + } + set_is_loading_false(); + Ok(()) + }, + Err(err) => { + error!("Error while creating user awareness: {:?}", err); + set_is_loading_false(); + Err(err) + }, } - Ok(()) }); - - // mark the user awareness as not loading - self.is_loading_awareness.store(false, Ordering::SeqCst); - Ok(()) } @@ -214,29 +303,29 @@ impl UserManager { /// This function constructs a collaboration instance based on the given session and raw data, /// using a collaboration builder. This instance is specifically geared towards handling /// user awareness. - async fn collab_for_user_awareness( - workspace_id: &str, + fn collab_for_user_awareness( collab_builder: &Weak, + workspace_id: &str, uid: i64, object_id: &str, collab_db: Weak, doc_state: DataSource, - ) -> Result, FlowyError> { + notifier: Option, + ) -> Result>, FlowyError> { let collab_builder = collab_builder.upgrade().ok_or(FlowyError::new( ErrorCode::Internal, "Unexpected error: collab builder is not available", ))?; + let collab_object = + collab_builder.collab_object(workspace_id, uid, object_id, CollabType::UserAwareness)?; let collab = collab_builder - .build( - workspace_id, - uid, - object_id, - CollabType::UserAwareness, + .create_user_awareness( + collab_object, doc_state, collab_db, CollabBuilderConfig::default().sync_enable(true), + notifier, ) - .await .context("Build collab for user awareness failed")?; Ok(collab) } @@ -252,26 +341,39 @@ impl UserManager { /// # Parameters /// - `default_value`: A default value to return if the user awareness is `None` and cannot be initialized. /// - `f`: The asynchronous closure to execute with the user awareness. - async fn with_awareness(&self, default_value: Output, f: F) -> Output + async fn mut_awareness(&self, f: F) -> FlowyResult where - F: FnOnce(&UserAwareness) -> Output, + F: FnOnce(&mut UserAwareness) -> Output, { - // Check if initialization is needed and perform it if necessary - if self.user_awareness.lock().await.is_none() { - if let Ok(session) = self.get_session() { - self.initialize_user_awareness(&session).await; - } - } + match self.user_awareness.load_full() { + None => { + info!("User awareness is not loaded when trying to access it"); - let user_awareness = self.user_awareness.lock().await; - match &*user_awareness { - Some(inner_awareness) => { - let inner_awareness_clone = inner_awareness.clone(); - drop(user_awareness); - let result = f(&inner_awareness_clone.lock()); - result + let session = self.get_session()?; + let object_id = + user_awareness_object_id(&session.user_uuid, &session.user_workspace.id).to_string(); + let is_loading = self + .is_loading_awareness + .get(&object_id) + .map(|r| *r.value()) + .unwrap_or(false); + + if !is_loading { + let user_profile = self.get_user_profile_from_disk(session.user_id).await?; + self + .initial_user_awareness(&session, &user_profile.authenticator) + .await?; + } + + Err(FlowyError::new( + ErrorCode::InProgress, + "User awareness is loading", + )) + }, + Some(lock) => { + let mut user_awareness = lock.write().await; + Ok(f(&mut user_awareness)) }, - None => default_value, } } } diff --git a/frontend/rust-lib/flowy-user/src/user_manager/manager_user_workspace.rs b/frontend/rust-lib/flowy-user/src/user_manager/manager_user_workspace.rs index 97cc6747f2..4b56b51df0 100644 --- a/frontend/rust-lib/flowy-user/src/user_manager/manager_user_workspace.rs +++ b/frontend/rust-lib/flowy-user/src/user_manager/manager_user_workspace.rs @@ -157,7 +157,7 @@ impl UserManager { old_collab_db: &Arc, ) -> FlowyResult<()> { let import_context = ImportedFolder { - imported_session: old_user.session.clone(), + imported_session: old_user.session.as_ref().clone(), imported_collab_db: old_collab_db.clone(), container_name: None, source: ImportedSource::AnonUser, @@ -179,14 +179,19 @@ impl UserManager { .authenticate_user .set_user_workspace(user_workspace.clone())?; - if let Err(err) = self.try_initial_user_awareness(&self.get_session()?).await { + let uid = self.user_id()?; + let user_profile = self.get_user_profile_from_disk(uid).await?; + + if let Err(err) = self + .initial_user_awareness(self.get_session()?.as_ref(), &user_profile.authenticator) + .await + { error!( "Failed to initialize user awareness when opening workspace: {:?}", err ); } - let uid = self.user_id()?; if let Err(err) = self .user_status_callback .read() diff --git a/frontend/rust-lib/lib-dispatch/Cargo.toml b/frontend/rust-lib/lib-dispatch/Cargo.toml index 0d835915c7..a9636c3c8d 100644 --- a/frontend/rust-lib/lib-dispatch/Cargo.toml +++ b/frontend/rust-lib/lib-dispatch/Cargo.toml @@ -23,7 +23,6 @@ serde = { version = "1.0", features = ["derive"], optional = true } serde_repr = { workspace = true, optional = true } validator = "0.16.1" tracing.workspace = true -parking_lot = "0.12" bincode = { version = "1.3", optional = true } protobuf = { workspace = true, optional = true } diff --git a/frontend/rust-lib/lib-dispatch/src/module/module.rs b/frontend/rust-lib/lib-dispatch/src/module/module.rs index ae92cf9a0c..883225c1b0 100644 --- a/frontend/rust-lib/lib-dispatch/src/module/module.rs +++ b/frontend/rust-lib/lib-dispatch/src/module/module.rs @@ -1,3 +1,15 @@ +use crate::dispatcher::AFConcurrent; +use crate::prelude::{AFBoxFuture, AFStateMap}; +use crate::service::AFPluginHandler; +use crate::{ + errors::{DispatchError, InternalError}, + request::{payload::Payload, AFPluginEventRequest, FromAFPluginRequest}, + response::{AFPluginEventResponse, AFPluginResponder}, + service::{ + factory, AFPluginHandlerService, AFPluginServiceFactory, BoxService, BoxServiceFactory, + Service, ServiceRequest, ServiceResponse, + }, +}; use futures_core::ready; use nanoid::nanoid; use pin_project::pin_project; @@ -13,19 +25,6 @@ use std::{ task::{Context, Poll}, }; -use crate::dispatcher::AFConcurrent; -use crate::prelude::{AFBoxFuture, AFStateMap}; -use crate::service::AFPluginHandler; -use crate::{ - errors::{DispatchError, InternalError}, - request::{payload::Payload, AFPluginEventRequest, FromAFPluginRequest}, - response::{AFPluginEventResponse, AFPluginResponder}, - service::{ - factory, AFPluginHandlerService, AFPluginServiceFactory, BoxService, BoxServiceFactory, - Service, ServiceRequest, ServiceResponse, - }, -}; - pub type AFPluginMap = Rc>>; pub(crate) fn plugin_map_or_crash(plugins: Vec) -> AFPluginMap { let mut plugin_map: HashMap> = HashMap::new(); diff --git a/frontend/rust-lib/lib-infra/src/native/future.rs b/frontend/rust-lib/lib-infra/src/native/future.rs index 4d918d7e7c..0f1c174c55 100644 --- a/frontend/rust-lib/lib-infra/src/native/future.rs +++ b/frontend/rust-lib/lib-infra/src/native/future.rs @@ -2,7 +2,6 @@ use futures_core::future::BoxFuture; use futures_core::ready; use pin_project::pin_project; use std::{ - fmt::Debug, future::Future, pin::Pin, task::{Context, Poll}, @@ -33,33 +32,4 @@ where } } -#[pin_project] -pub struct FutureResult { - #[pin] - pub fut: Pin> + Sync + Send>>, -} - -impl FutureResult { - pub fn new(f: F) -> Self - where - F: Future> + Send + Sync + 'static, - { - Self { fut: Box::pin(f) } - } -} - -impl Future for FutureResult -where - T: Send + Sync, - E: Debug, -{ - type Output = Result; - - fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { - let this = self.as_mut().project(); - let result = ready!(this.fut.poll(cx)); - Poll::Ready(result) - } -} - pub type BoxResultFuture<'a, T, E> = BoxFuture<'a, Result>;