move to latest appflowy collab version (#5894)

* chore: move to latest appflowy collab version

* chore: filter mapping

* chore: remove mutex folder

* chore: cleanup borrow checker issues

* chore: fixed flowy user crate compilation errors

* chore: removed parking lot crate

* chore: adjusting non locking approach

* chore: remove with folder method

* chore: fix folder manager

* chore: fixed workspace database compilation errors

* chore: initialize database plugins

* chore: fix locks in flowy core

* chore: remove supabase

* chore: async traits

* chore: add mutexes in dart ffi

* chore: post rebase fixes

* chore: remove supabase dart code

* chore: fix deadlock

* chore: fix page_id is empty

* chore: use data source to init collab

* chore: fix user awareness test

* chore: fix database deadlock

* fix: initialize user awareness

* chore: fix open workspace test

* chore: fix import csv

* chore: fix update row meta deadlock

* chore: fix document size test

* fix: timestamp set/get type convert

* fix: calculation

* chore: revert Arc to Rc

* chore: attach plugin to database and database row

* chore: async get row

* chore: clippy

* chore: fix tauri build

* chore: clippy

* fix: duplicate view deadlock

* chore: fmt

* chore: tauri build

---------

Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
Bartosz Sypytkowski
2024-08-18 05:16:42 +02:00
committed by GitHub
parent c2d7c5360d
commit fd5299a13d
212 changed files with 5068 additions and 6341 deletions

View File

@ -1,93 +1,93 @@
import 'package:appflowy/env/cloud_env.dart'; // import 'package:appflowy/env/cloud_env.dart';
import 'package:appflowy/workspace/application/settings/prelude.dart'; // import 'package:appflowy/workspace/application/settings/prelude.dart';
import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart'; // import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart';
import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart'; // import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart';
import 'package:flutter_test/flutter_test.dart'; // import 'package:flutter_test/flutter_test.dart';
import 'package:integration_test/integration_test.dart'; // import 'package:integration_test/integration_test.dart';
import '../shared/util.dart'; // import '../shared/util.dart';
void main() { // void main() {
IntegrationTestWidgetsFlutterBinding.ensureInitialized(); // IntegrationTestWidgetsFlutterBinding.ensureInitialized();
group('supabase auth', () { // group('supabase auth', () {
testWidgets('sign in with supabase', (tester) async { // testWidgets('sign in with supabase', (tester) async {
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); // await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
await tester.tapGoogleLoginInButton(); // await tester.tapGoogleLoginInButton();
await tester.expectToSeeHomePageWithGetStartedPage(); // await tester.expectToSeeHomePageWithGetStartedPage();
}); // });
testWidgets('sign out with supabase', (tester) async { // testWidgets('sign out with supabase', (tester) async {
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); // await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
await tester.tapGoogleLoginInButton(); // await tester.tapGoogleLoginInButton();
// Open the setting page and sign out // // Open the setting page and sign out
await tester.openSettings(); // await tester.openSettings();
await tester.openSettingsPage(SettingsPage.account); // await tester.openSettingsPage(SettingsPage.account);
await tester.logout(); // await tester.logout();
// Go to the sign in page again // // Go to the sign in page again
await tester.pumpAndSettle(const Duration(seconds: 1)); // await tester.pumpAndSettle(const Duration(seconds: 1));
tester.expectToSeeGoogleLoginButton(); // tester.expectToSeeGoogleLoginButton();
}); // });
testWidgets('sign in as anonymous', (tester) async { // testWidgets('sign in as anonymous', (tester) async {
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase); // await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
await tester.tapSignInAsGuest(); // await tester.tapSignInAsGuest();
// should not see the sync setting page when sign in as anonymous // // should not see the sync setting page when sign in as anonymous
await tester.openSettings(); // await tester.openSettings();
await tester.openSettingsPage(SettingsPage.account); // await tester.openSettingsPage(SettingsPage.account);
// Scroll to sign-out // // Scroll to sign-out
await tester.scrollUntilVisible( // await tester.scrollUntilVisible(
find.byType(SignInOutButton), // find.byType(SignInOutButton),
100, // 100,
scrollable: find.findSettingsScrollable(), // scrollable: find.findSettingsScrollable(),
); // );
await tester.tapButton(find.byType(SignInOutButton)); // await tester.tapButton(find.byType(SignInOutButton));
tester.expectToSeeGoogleLoginButton(); // tester.expectToSeeGoogleLoginButton();
}); // });
// testWidgets('enable encryption', (tester) async { // // testWidgets('enable encryption', (tester) async {
// await tester.initializeAppFlowy(cloudType: CloudType.supabase); // // await tester.initializeAppFlowy(cloudType: CloudType.supabase);
// // await tester.tapGoogleLoginInButton();
// // // Open the setting page and sign out
// // await tester.openSettings();
// // await tester.openSettingsPage(SettingsPage.cloud);
// // // the switch should be off by default
// // tester.assertEnableEncryptSwitchValue(false);
// // await tester.toggleEnableEncrypt();
// // // the switch should be on after toggling
// // tester.assertEnableEncryptSwitchValue(true);
// // // the switch can not be toggled back to off
// // await tester.toggleEnableEncrypt();
// // tester.assertEnableEncryptSwitchValue(true);
// // });
// testWidgets('enable sync', (tester) async {
// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
// await tester.tapGoogleLoginInButton(); // await tester.tapGoogleLoginInButton();
// // Open the setting page and sign out // // Open the setting page and sign out
// await tester.openSettings(); // await tester.openSettings();
// await tester.openSettingsPage(SettingsPage.cloud); // await tester.openSettingsPage(SettingsPage.cloud);
// // the switch should be off by default // // the switch should be on by default
// tester.assertEnableEncryptSwitchValue(false); // tester.assertSupabaseEnableSyncSwitchValue(true);
// await tester.toggleEnableEncrypt(); // await tester.toggleEnableSync(SupabaseEnableSync);
// // the switch should be off
// tester.assertSupabaseEnableSyncSwitchValue(false);
// // the switch should be on after toggling // // the switch should be on after toggling
// tester.assertEnableEncryptSwitchValue(true); // await tester.toggleEnableSync(SupabaseEnableSync);
// tester.assertSupabaseEnableSyncSwitchValue(true);
// // the switch can not be toggled back to off
// await tester.toggleEnableEncrypt();
// tester.assertEnableEncryptSwitchValue(true);
// }); // });
// });
testWidgets('enable sync', (tester) async { // }
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
await tester.tapGoogleLoginInButton();
// Open the setting page and sign out
await tester.openSettings();
await tester.openSettingsPage(SettingsPage.cloud);
// the switch should be on by default
tester.assertSupabaseEnableSyncSwitchValue(true);
await tester.toggleEnableSync(SupabaseEnableSync);
// the switch should be off
tester.assertSupabaseEnableSyncSwitchValue(false);
// the switch should be on after toggling
await tester.toggleEnableSync(SupabaseEnableSync);
tester.assertSupabaseEnableSyncSwitchValue(true);
});
});
}

View File

@ -2,7 +2,6 @@ import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart'; import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart';
import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart'; import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart';
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart'; import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart';
import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart'; import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
import 'package:easy_localization/easy_localization.dart'; import 'package:easy_localization/easy_localization.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
@ -52,26 +51,6 @@ extension AppFlowyAuthTest on WidgetTester {
assert(isSwitched == value); assert(isSwitched == value);
} }
void assertEnableEncryptSwitchValue(bool value) {
assertSwitchValue(
find.descendant(
of: find.byType(EnableEncrypt),
matching: find.byWidgetPredicate((widget) => widget is Switch),
),
value,
);
}
void assertSupabaseEnableSyncSwitchValue(bool value) {
assertSwitchValue(
find.descendant(
of: find.byType(SupabaseEnableSync),
matching: find.byWidgetPredicate((widget) => widget is Switch),
),
value,
);
}
void assertAppFlowyCloudEnableSyncSwitchValue(bool value) { void assertAppFlowyCloudEnableSyncSwitchValue(bool value) {
assertToggleValue( assertToggleValue(
find.descendant( find.descendant(
@ -82,15 +61,6 @@ extension AppFlowyAuthTest on WidgetTester {
); );
} }
Future<void> toggleEnableEncrypt() async {
final finder = find.descendant(
of: find.byType(EnableEncrypt),
matching: find.byWidgetPredicate((widget) => widget is Switch),
);
await tapButton(finder);
}
Future<void> toggleEnableSync(Type syncButton) async { Future<void> toggleEnableSync(Type syncButton) async {
final finder = find.descendant( final finder = find.descendant(
of: find.byType(syncButton), of: find.byType(syncButton),

View File

@ -7,7 +7,6 @@ import 'package:appflowy/startup/entry_point.dart';
import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/user/application/auth/af_cloud_mock_auth_service.dart'; import 'package:appflowy/user/application/auth/af_cloud_mock_auth_service.dart';
import 'package:appflowy/user/application/auth/auth_service.dart'; import 'package:appflowy/user/application/auth/auth_service.dart';
import 'package:appflowy/user/application/auth/supabase_mock_auth_service.dart';
import 'package:appflowy/user/presentation/presentation.dart'; import 'package:appflowy/user/presentation/presentation.dart';
import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart'; import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart';
import 'package:appflowy/workspace/application/settings/prelude.dart'; import 'package:appflowy/workspace/application/settings/prelude.dart';
@ -55,8 +54,6 @@ extension AppFlowyTestBase on WidgetTester {
switch (cloudType) { switch (cloudType) {
case AuthenticatorType.local: case AuthenticatorType.local:
break; break;
case AuthenticatorType.supabase:
break;
case AuthenticatorType.appflowyCloudSelfHost: case AuthenticatorType.appflowyCloudSelfHost:
rustEnvs["GOTRUE_ADMIN_EMAIL"] = "admin@example.com"; rustEnvs["GOTRUE_ADMIN_EMAIL"] = "admin@example.com";
rustEnvs["GOTRUE_ADMIN_PASSWORD"] = "password"; rustEnvs["GOTRUE_ADMIN_PASSWORD"] = "password";
@ -75,13 +72,6 @@ extension AppFlowyTestBase on WidgetTester {
case AuthenticatorType.local: case AuthenticatorType.local:
await useLocalServer(); await useLocalServer();
break; break;
case AuthenticatorType.supabase:
await useTestSupabaseCloud();
getIt.unregister<AuthService>();
getIt.registerFactory<AuthService>(
() => SupabaseMockAuthService(),
);
break;
case AuthenticatorType.appflowyCloudSelfHost: case AuthenticatorType.appflowyCloudSelfHost:
await useTestSelfHostedAppFlowyCloud(); await useTestSelfHostedAppFlowyCloud();
getIt.unregister<AuthService>(); getIt.unregister<AuthService>();
@ -242,13 +232,6 @@ extension AppFlowyFinderTestBase on CommonFinders {
} }
} }
Future<void> useTestSupabaseCloud() async {
await useSupabaseCloud(
url: TestEnv.supabaseUrl,
anonKey: TestEnv.supabaseAnonKey,
);
}
Future<void> useTestSelfHostedAppFlowyCloud() async { Future<void> useTestSelfHostedAppFlowyCloud() async {
await useSelfHostedAppFlowyCloudWithURL(TestEnv.afCloudUrl); await useSelfHostedAppFlowyCloudWithURL(TestEnv.afCloudUrl);
} }

View File

@ -174,7 +174,7 @@ SPEC CHECKSUMS:
file_picker: 09aa5ec1ab24135ccd7a1621c46c84134bfd6655 file_picker: 09aa5ec1ab24135ccd7a1621c46c84134bfd6655
flowy_infra_ui: 0455e1fa8c51885aa1437848e361e99419f34ebc flowy_infra_ui: 0455e1fa8c51885aa1437848e361e99419f34ebc
Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7 Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7
fluttertoast: e9a18c7be5413da53898f660530c56f35edfba9c fluttertoast: 723e187574b149e68e63ca4d39b837586b903cfa
image_picker_ios: 99dfe1854b4fa34d0364e74a78448a0151025425 image_picker_ios: 99dfe1854b4fa34d0364e74a78448a0151025425
integration_test: ce0a3ffa1de96d1a89ca0ac26fca7ea18a749ef4 integration_test: ce0a3ffa1de96d1a89ca0ac26fca7ea18a749ef4
irondash_engine_context: 3458bf979b90d616ffb8ae03a150bafe2e860cc9 irondash_engine_context: 3458bf979b90d616ffb8ae03a150bafe2e860cc9
@ -196,4 +196,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: d0d9b4ff572d8695c38eb3f9b490f55cdfc57eca PODFILE CHECKSUM: d0d9b4ff572d8695c38eb3f9b490f55cdfc57eca
COCOAPODS: 1.15.2 COCOAPODS: 1.11.3

View File

@ -13,7 +13,6 @@ class AppFlowyConfiguration {
required this.device_id, required this.device_id,
required this.platform, required this.platform,
required this.authenticator_type, required this.authenticator_type,
required this.supabase_config,
required this.appflowy_cloud_config, required this.appflowy_cloud_config,
required this.envs, required this.envs,
}); });
@ -28,41 +27,12 @@ class AppFlowyConfiguration {
final String device_id; final String device_id;
final String platform; final String platform;
final int authenticator_type; final int authenticator_type;
final SupabaseConfiguration supabase_config;
final AppFlowyCloudConfiguration appflowy_cloud_config; final AppFlowyCloudConfiguration appflowy_cloud_config;
final Map<String, String> envs; final Map<String, String> envs;
Map<String, dynamic> toJson() => _$AppFlowyConfigurationToJson(this); Map<String, dynamic> toJson() => _$AppFlowyConfigurationToJson(this);
} }
@JsonSerializable()
class SupabaseConfiguration {
SupabaseConfiguration({
required this.url,
required this.anon_key,
});
factory SupabaseConfiguration.fromJson(Map<String, dynamic> json) =>
_$SupabaseConfigurationFromJson(json);
/// Indicates whether the sync feature is enabled.
final String url;
final String anon_key;
Map<String, dynamic> toJson() => _$SupabaseConfigurationToJson(this);
static SupabaseConfiguration defaultConfig() {
return SupabaseConfiguration(
url: '',
anon_key: '',
);
}
bool get isValid {
return url.isNotEmpty && anon_key.isNotEmpty;
}
}
@JsonSerializable() @JsonSerializable()
class AppFlowyCloudConfiguration { class AppFlowyCloudConfiguration {
AppFlowyCloudConfiguration({ AppFlowyCloudConfiguration({

View File

@ -21,9 +21,6 @@ Future<void> _setAuthenticatorType(AuthenticatorType ty) async {
case AuthenticatorType.local: case AuthenticatorType.local:
await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 0.toString()); await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 0.toString());
break; break;
case AuthenticatorType.supabase:
await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 1.toString());
break;
case AuthenticatorType.appflowyCloud: case AuthenticatorType.appflowyCloud:
await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 2.toString()); await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 2.toString());
break; break;
@ -63,8 +60,6 @@ Future<AuthenticatorType> getAuthenticatorType() async {
switch (value ?? "0") { switch (value ?? "0") {
case "0": case "0":
return AuthenticatorType.local; return AuthenticatorType.local;
case "1":
return AuthenticatorType.supabase;
case "2": case "2":
return AuthenticatorType.appflowyCloud; return AuthenticatorType.appflowyCloud;
case "3": case "3":
@ -93,10 +88,6 @@ Future<AuthenticatorType> getAuthenticatorType() async {
/// Returns `false` otherwise. /// Returns `false` otherwise.
bool get isAuthEnabled { bool get isAuthEnabled {
final env = getIt<AppFlowyCloudSharedEnv>(); final env = getIt<AppFlowyCloudSharedEnv>();
if (env.authenticatorType == AuthenticatorType.supabase) {
return env.supabaseConfig.isValid;
}
if (env.authenticatorType.isAppFlowyCloudEnabled) { if (env.authenticatorType.isAppFlowyCloudEnabled) {
return env.appflowyCloudConfig.isValid; return env.appflowyCloudConfig.isValid;
} }
@ -104,19 +95,6 @@ bool get isAuthEnabled {
return false; return false;
} }
/// Checks if Supabase is enabled.
///
/// This getter evaluates if Supabase should be enabled based on the
/// current integration mode and cloud type setting.
///
/// Returns:
/// A boolean value indicating whether Supabase is enabled. It returns `true`
/// if the application is in release or develop mode and the current cloud type
/// is `CloudType.supabase`. Otherwise, it returns `false`.
bool get isSupabaseEnabled {
return currentCloudType().isSupabaseEnabled;
}
/// Determines if AppFlowy Cloud is enabled. /// Determines if AppFlowy Cloud is enabled.
bool get isAppFlowyCloudEnabled { bool get isAppFlowyCloudEnabled {
return currentCloudType().isAppFlowyCloudEnabled; return currentCloudType().isAppFlowyCloudEnabled;
@ -124,7 +102,6 @@ bool get isAppFlowyCloudEnabled {
enum AuthenticatorType { enum AuthenticatorType {
local, local,
supabase,
appflowyCloud, appflowyCloud,
appflowyCloudSelfHost, appflowyCloudSelfHost,
// The 'appflowyCloudDevelop' type is used for develop purposes only. // The 'appflowyCloudDevelop' type is used for develop purposes only.
@ -137,14 +114,10 @@ enum AuthenticatorType {
this == AuthenticatorType.appflowyCloudDevelop || this == AuthenticatorType.appflowyCloudDevelop ||
this == AuthenticatorType.appflowyCloud; this == AuthenticatorType.appflowyCloud;
bool get isSupabaseEnabled => this == AuthenticatorType.supabase;
int get value { int get value {
switch (this) { switch (this) {
case AuthenticatorType.local: case AuthenticatorType.local:
return 0; return 0;
case AuthenticatorType.supabase:
return 1;
case AuthenticatorType.appflowyCloud: case AuthenticatorType.appflowyCloud:
return 2; return 2;
case AuthenticatorType.appflowyCloudSelfHost: case AuthenticatorType.appflowyCloudSelfHost:
@ -158,8 +131,6 @@ enum AuthenticatorType {
switch (value) { switch (value) {
case 0: case 0:
return AuthenticatorType.local; return AuthenticatorType.local;
case 1:
return AuthenticatorType.supabase;
case 2: case 2:
return AuthenticatorType.appflowyCloud; return AuthenticatorType.appflowyCloud;
case 3: case 3:
@ -197,25 +168,15 @@ Future<void> useLocalServer() async {
await _setAuthenticatorType(AuthenticatorType.local); await _setAuthenticatorType(AuthenticatorType.local);
} }
Future<void> useSupabaseCloud({
required String url,
required String anonKey,
}) async {
await _setAuthenticatorType(AuthenticatorType.supabase);
await setSupabaseServer(url, anonKey);
}
/// Use getIt<AppFlowyCloudSharedEnv>() to get the shared environment. /// Use getIt<AppFlowyCloudSharedEnv>() to get the shared environment.
class AppFlowyCloudSharedEnv { class AppFlowyCloudSharedEnv {
AppFlowyCloudSharedEnv({ AppFlowyCloudSharedEnv({
required AuthenticatorType authenticatorType, required AuthenticatorType authenticatorType,
required this.appflowyCloudConfig, required this.appflowyCloudConfig,
required this.supabaseConfig,
}) : _authenticatorType = authenticatorType; }) : _authenticatorType = authenticatorType;
final AuthenticatorType _authenticatorType; final AuthenticatorType _authenticatorType;
final AppFlowyCloudConfiguration appflowyCloudConfig; final AppFlowyCloudConfiguration appflowyCloudConfig;
final SupabaseConfiguration supabaseConfig;
AuthenticatorType get authenticatorType => _authenticatorType; AuthenticatorType get authenticatorType => _authenticatorType;
@ -229,10 +190,6 @@ class AppFlowyCloudSharedEnv {
? await getAppFlowyCloudConfig(authenticatorType) ? await getAppFlowyCloudConfig(authenticatorType)
: AppFlowyCloudConfiguration.defaultConfig(); : AppFlowyCloudConfiguration.defaultConfig();
final supabaseCloudConfig = authenticatorType.isSupabaseEnabled
? await getSupabaseCloudConfig()
: SupabaseConfiguration.defaultConfig();
// In the backend, the value '2' represents the use of AppFlowy Cloud. However, in the frontend, // In the backend, the value '2' represents the use of AppFlowy Cloud. However, in the frontend,
// we distinguish between [AuthenticatorType.appflowyCloudSelfHost] and [AuthenticatorType.appflowyCloud]. // we distinguish between [AuthenticatorType.appflowyCloudSelfHost] and [AuthenticatorType.appflowyCloud].
// When the cloud type is [AuthenticatorType.appflowyCloudSelfHost] in the frontend, it should be // When the cloud type is [AuthenticatorType.appflowyCloudSelfHost] in the frontend, it should be
@ -244,7 +201,6 @@ class AppFlowyCloudSharedEnv {
return AppFlowyCloudSharedEnv( return AppFlowyCloudSharedEnv(
authenticatorType: authenticatorType, authenticatorType: authenticatorType,
appflowyCloudConfig: appflowyCloudConfig, appflowyCloudConfig: appflowyCloudConfig,
supabaseConfig: supabaseCloudConfig,
); );
} else { } else {
// Using the cloud settings from the .env file. // Using the cloud settings from the .env file.
@ -257,7 +213,6 @@ class AppFlowyCloudSharedEnv {
return AppFlowyCloudSharedEnv( return AppFlowyCloudSharedEnv(
authenticatorType: AuthenticatorType.fromValue(Env.authenticatorType), authenticatorType: AuthenticatorType.fromValue(Env.authenticatorType),
appflowyCloudConfig: appflowyCloudConfig, appflowyCloudConfig: appflowyCloudConfig,
supabaseConfig: SupabaseConfiguration.defaultConfig(),
); );
} }
} }
@ -265,8 +220,7 @@ class AppFlowyCloudSharedEnv {
@override @override
String toString() { String toString() {
return 'authenticator: $_authenticatorType\n' return 'authenticator: $_authenticatorType\n'
'appflowy: ${appflowyCloudConfig.toJson()}\n' 'appflowy: ${appflowyCloudConfig.toJson()}\n';
'supabase: ${supabaseConfig.toJson()})\n';
} }
} }
@ -354,22 +308,3 @@ Future<void> setSupabaseServer(
await getIt<KeyValueStorage>().set(KVKeys.kSupabaseAnonKey, anonKey); await getIt<KeyValueStorage>().set(KVKeys.kSupabaseAnonKey, anonKey);
} }
} }
Future<SupabaseConfiguration> getSupabaseCloudConfig() async {
final url = await _getSupabaseUrl();
final anonKey = await _getSupabaseAnonKey();
return SupabaseConfiguration(
url: url,
anon_key: anonKey,
);
}
Future<String> _getSupabaseUrl() async {
final result = await getIt<KeyValueStorage>().get(KVKeys.kSupabaseURL);
return result ?? '';
}
Future<String> _getSupabaseAnonKey() async {
final result = await getIt<KeyValueStorage>().get(KVKeys.kSupabaseAnonKey);
return result ?? '';
}

View File

@ -37,6 +37,14 @@ class RowBackendService {
return DatabaseEventCreateRow(payload).send(); return DatabaseEventCreateRow(payload).send();
} }
Future<FlowyResult<void, FlowyError>> initRow(RowId rowId) async {
final payload = RowIdPB()
..viewId = viewId
..rowId = rowId;
return DatabaseEventInitRow(payload).send();
}
Future<FlowyResult<RowMetaPB, FlowyError>> createRowBefore(RowId rowId) { Future<FlowyResult<RowMetaPB, FlowyError>> createRowBefore(RowId rowId) {
return createRow( return createRow(
viewId: viewId, viewId: viewId,

View File

@ -23,6 +23,8 @@ class RowBloc extends Bloc<RowEvent, RowState> {
}) : _rowBackendSvc = RowBackendService(viewId: viewId), }) : _rowBackendSvc = RowBackendService(viewId: viewId),
_rowController = rowController, _rowController = rowController,
super(RowState.initial()) { super(RowState.initial()) {
_rowBackendSvc.initRow(rowId);
_dispatch(); _dispatch();
_startListening(); _startListening();
_init(); _init();

View File

@ -12,7 +12,6 @@ import 'package:appflowy/startup/tasks/appflowy_cloud_task.dart';
import 'package:appflowy/user/application/ai_service.dart'; import 'package:appflowy/user/application/ai_service.dart';
import 'package:appflowy/user/application/auth/af_cloud_auth_service.dart'; import 'package:appflowy/user/application/auth/af_cloud_auth_service.dart';
import 'package:appflowy/user/application/auth/auth_service.dart'; import 'package:appflowy/user/application/auth/auth_service.dart';
import 'package:appflowy/user/application/auth/supabase_auth_service.dart';
import 'package:appflowy/user/application/prelude.dart'; import 'package:appflowy/user/application/prelude.dart';
import 'package:appflowy/user/application/reminder/reminder_bloc.dart'; import 'package:appflowy/user/application/reminder/reminder_bloc.dart';
import 'package:appflowy/user/application/user_listener.dart'; import 'package:appflowy/user/application/user_listener.dart';
@ -124,9 +123,6 @@ void _resolveUserDeps(GetIt getIt, IntegrationMode mode) {
), ),
); );
break; break;
case AuthenticatorType.supabase:
getIt.registerFactory<AuthService>(() => SupabaseAuthService());
break;
case AuthenticatorType.appflowyCloud: case AuthenticatorType.appflowyCloud:
case AuthenticatorType.appflowyCloudSelfHost: case AuthenticatorType.appflowyCloudSelfHost:
case AuthenticatorType.appflowyCloudDevelop: case AuthenticatorType.appflowyCloudDevelop:

View File

@ -133,7 +133,6 @@ class FlowyRunner {
// It is unable to get the device information from the test environment. // It is unable to get the device information from the test environment.
const ApplicationInfoTask(), const ApplicationInfoTask(),
const HotKeyTask(), const HotKeyTask(),
if (isSupabaseEnabled) InitSupabaseTask(),
if (isAppFlowyCloudEnabled) InitAppFlowyCloudTask(), if (isAppFlowyCloudEnabled) InitAppFlowyCloudTask(),
const InitAppWidgetTask(), const InitAppWidgetTask(),
const InitPlatformServiceTask(), const InitPlatformServiceTask(),

View File

@ -7,7 +7,6 @@ import 'package:app_links/app_links.dart';
import 'package:appflowy/env/cloud_env.dart'; import 'package:appflowy/env/cloud_env.dart';
import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/startup/tasks/app_widget.dart'; import 'package:appflowy/startup/tasks/app_widget.dart';
import 'package:appflowy/startup/tasks/supabase_task.dart';
import 'package:appflowy/user/application/auth/auth_error.dart'; import 'package:appflowy/user/application/auth/auth_error.dart';
import 'package:appflowy/user/application/auth/auth_service.dart'; import 'package:appflowy/user/application/auth/auth_service.dart';
import 'package:appflowy/user/application/auth/device_id.dart'; import 'package:appflowy/user/application/auth/device_id.dart';
@ -22,6 +21,8 @@ import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:appflowy_result/appflowy_result.dart'; import 'package:appflowy_result/appflowy_result.dart';
import 'package:url_protocol/url_protocol.dart'; import 'package:url_protocol/url_protocol.dart';
const appflowyDeepLinkSchema = 'appflowy-flutter';
class AppFlowyCloudDeepLink { class AppFlowyCloudDeepLink {
AppFlowyCloudDeepLink() { AppFlowyCloudDeepLink() {
if (_deeplinkSubscription == null) { if (_deeplinkSubscription == null) {

View File

@ -12,5 +12,4 @@ export 'platform_service.dart';
export 'recent_service_task.dart'; export 'recent_service_task.dart';
export 'rust_sdk.dart'; export 'rust_sdk.dart';
export 'sentry.dart'; export 'sentry.dart';
export 'supabase_task.dart';
export 'windows.dart'; export 'windows.dart';

View File

@ -63,7 +63,6 @@ AppFlowyConfiguration _makeAppFlowyConfiguration(
device_id: deviceId, device_id: deviceId,
platform: Platform.operatingSystem, platform: Platform.operatingSystem,
authenticator_type: env.authenticatorType.value, authenticator_type: env.authenticatorType.value,
supabase_config: env.supabaseConfig,
appflowy_cloud_config: env.appflowyCloudConfig, appflowy_cloud_config: env.appflowyCloudConfig,
envs: rustEnvs, envs: rustEnvs,
); );

View File

@ -1,118 +0,0 @@
import 'dart:async';
import 'dart:io';
import 'package:appflowy/env/cloud_env.dart';
import 'package:appflowy/user/application/supabase_realtime.dart';
import 'package:appflowy/workspace/application/settings/application_data_storage.dart';
import 'package:flutter/foundation.dart';
import 'package:hive_flutter/hive_flutter.dart';
import 'package:path/path.dart' as p;
import 'package:supabase_flutter/supabase_flutter.dart';
import 'package:url_protocol/url_protocol.dart';
import '../startup.dart';
// ONLY supports in macOS and Windows now.
//
// If you need to update the schema, please update the following files:
// - appflowy_flutter/macos/Runner/Info.plist (macOS)
// - the callback url in Supabase dashboard
const appflowyDeepLinkSchema = 'appflowy-flutter';
const supabaseLoginCallback = '$appflowyDeepLinkSchema://login-callback';
const hiveBoxName = 'appflowy_supabase_authentication';
// Used to store the session of the supabase in case of the user switch the different folder.
Supabase? supabase;
SupabaseRealtimeService? realtimeService;
class InitSupabaseTask extends LaunchTask {
@override
Future<void> initialize(LaunchContext context) async {
if (!isSupabaseEnabled) {
return;
}
await supabase?.dispose();
supabase = null;
final initializedSupabase = await Supabase.initialize(
url: getIt<AppFlowyCloudSharedEnv>().supabaseConfig.url,
anonKey: getIt<AppFlowyCloudSharedEnv>().supabaseConfig.anon_key,
debug: kDebugMode,
authOptions: const FlutterAuthClientOptions(
localStorage: SupabaseLocalStorage(),
),
);
if (realtimeService != null) {
await realtimeService?.dispose();
realtimeService = null;
}
realtimeService = SupabaseRealtimeService(supabase: initializedSupabase);
supabase = initializedSupabase;
if (Platform.isWindows) {
// register deep link for Windows
registerProtocolHandler(appflowyDeepLinkSchema);
}
}
@override
Future<void> dispose() async {
await realtimeService?.dispose();
realtimeService = null;
await supabase?.dispose();
supabase = null;
}
}
/// customize the supabase auth storage
///
/// We don't use the default one because it always save the session in the document directory.
/// When we switch to the different folder, the session still exists.
class SupabaseLocalStorage extends LocalStorage {
const SupabaseLocalStorage();
@override
Future<void> initialize() async {
HiveCipher? encryptionCipher;
// customize the path for Hive
final path = await getIt<ApplicationDataStorage>().getPath();
Hive.init(p.join(path, 'supabase_auth'));
await Hive.openBox(
hiveBoxName,
encryptionCipher: encryptionCipher,
);
}
@override
Future<bool> hasAccessToken() {
return Future.value(
Hive.box(hiveBoxName).containsKey(
supabasePersistSessionKey,
),
);
}
@override
Future<String?> accessToken() {
return Future.value(
Hive.box(hiveBoxName).get(supabasePersistSessionKey) as String?,
);
}
@override
Future<void> removePersistedSession() {
return Hive.box(hiveBoxName).delete(supabasePersistSessionKey);
}
@override
Future<void> persistSession(String persistSessionString) {
return Hive.box(hiveBoxName).put(
supabasePersistSessionKey,
persistSessionString,
);
}
}

View File

@ -20,7 +20,7 @@ class AppFlowyCloudMockAuthService implements AuthService {
final String userEmail; final String userEmail;
final BackendAuthService _appFlowyAuthService = final BackendAuthService _appFlowyAuthService =
BackendAuthService(AuthenticatorPB.Supabase); BackendAuthService(AuthenticatorPB.AppFlowyCloud);
@override @override
Future<FlowyResult<UserProfilePB, FlowyError>> signUp({ Future<FlowyResult<UserProfilePB, FlowyError>> signUp({

View File

@ -1,252 +0,0 @@
import 'dart:async';
import 'package:appflowy/startup/tasks/prelude.dart';
import 'package:appflowy/user/application/auth/auth_service.dart';
import 'package:appflowy/user/application/auth/backend_auth_service.dart';
import 'package:appflowy/user/application/auth/device_id.dart';
import 'package:appflowy/user/application/user_service.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:flutter/foundation.dart';
import 'package:supabase_flutter/supabase_flutter.dart';
import 'auth_error.dart';
class SupabaseAuthService implements AuthService {
SupabaseAuthService();
SupabaseClient get _client => Supabase.instance.client;
GoTrueClient get _auth => _client.auth;
final BackendAuthService _backendAuthService = BackendAuthService(
AuthenticatorPB.Supabase,
);
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signUp({
required String name,
required String email,
required String password,
Map<String, String> params = const {},
}) async {
// fetch the uuid from supabase.
final response = await _auth.signUp(
email: email,
password: password,
);
final uuid = response.user?.id;
if (uuid == null) {
return FlowyResult.failure(AuthError.supabaseSignUpError);
}
// assign the uuid to our backend service.
// and will transfer this logic to backend later.
return _backendAuthService.signUp(
name: name,
email: email,
password: password,
params: {
AuthServiceMapKeys.uuid: uuid,
},
);
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithEmailPassword({
required String email,
required String password,
Map<String, String> params = const {},
}) async {
try {
final response = await _auth.signInWithPassword(
email: email,
password: password,
);
final uuid = response.user?.id;
if (uuid == null) {
return FlowyResult.failure(AuthError.supabaseSignInError);
}
return _backendAuthService.signInWithEmailPassword(
email: email,
password: password,
params: {
AuthServiceMapKeys.uuid: uuid,
},
);
} on AuthException catch (e) {
Log.error(e);
return FlowyResult.failure(AuthError.supabaseSignInError);
}
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signUpWithOAuth({
required String platform,
Map<String, String> params = const {},
}) async {
// Before signing in, sign out any existing users. Otherwise, the callback will be triggered even if the user doesn't click the 'Sign In' button on the website
if (_auth.currentUser != null) {
await _auth.signOut();
}
final provider = platform.toProvider();
final completer = supabaseLoginCompleter(
onSuccess: (userId, userEmail) async {
return _setupAuth(
map: {
AuthServiceMapKeys.uuid: userId,
AuthServiceMapKeys.email: userEmail,
AuthServiceMapKeys.deviceId: await getDeviceId(),
},
);
},
);
final response = await _auth.signInWithOAuth(
provider,
queryParams: queryParamsForProvider(provider),
redirectTo: supabaseLoginCallback,
);
if (!response) {
completer.complete(
FlowyResult.failure(AuthError.supabaseSignInWithOauthError),
);
}
return completer.future;
}
@override
Future<void> signOut() async {
await _auth.signOut();
await _backendAuthService.signOut();
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signUpAsGuest({
Map<String, String> params = const {},
}) async {
// supabase don't support guest login.
// so, just forward to our backend.
return _backendAuthService.signUpAsGuest();
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithMagicLink({
required String email,
Map<String, String> params = const {},
}) async {
final completer = supabaseLoginCompleter(
onSuccess: (userId, userEmail) async {
return _setupAuth(
map: {
AuthServiceMapKeys.uuid: userId,
AuthServiceMapKeys.email: userEmail,
AuthServiceMapKeys.deviceId: await getDeviceId(),
},
);
},
);
await _auth.signInWithOtp(
email: email,
emailRedirectTo: kIsWeb ? null : supabaseLoginCallback,
);
return completer.future;
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> getUser() async {
return UserBackendService.getCurrentUserProfile();
}
Future<FlowyResult<User, FlowyError>> getSupabaseUser() async {
final user = _auth.currentUser;
if (user == null) {
return FlowyResult.failure(AuthError.supabaseGetUserError);
}
return FlowyResult.success(user);
}
Future<FlowyResult<UserProfilePB, FlowyError>> _setupAuth({
required Map<String, String> map,
}) async {
final payload = OauthSignInPB(
authenticator: AuthenticatorPB.Supabase,
map: map,
);
return UserEventOauthSignIn(payload).send().then((value) => value);
}
}
extension on String {
OAuthProvider toProvider() {
switch (this) {
case 'github':
return OAuthProvider.github;
case 'google':
return OAuthProvider.google;
case 'discord':
return OAuthProvider.discord;
default:
throw UnimplementedError();
}
}
}
/// Creates a completer that listens to Supabase authentication state changes and
/// completes when a user signs in.
///
/// This function sets up a listener on Supabase's authentication state. When a user
/// signs in, it triggers the provided [onSuccess] callback with the user's `id` and
/// `email`. Once the [onSuccess] callback is executed and a response is received,
/// the completer completes with the response, and the listener is canceled.
///
/// Parameters:
/// - [onSuccess]: A callback function that's executed when a user signs in. It
/// should take in a user's `id` and `email` and return a `Future` containing either
/// a `FlowyError` or a `UserProfilePB`.
///
/// Returns:
/// A completer of type `FlowyResult<UserProfilePB, FlowyError>`. This completer completes
/// with the response from the [onSuccess] callback when a user signs in.
Completer<FlowyResult<UserProfilePB, FlowyError>> supabaseLoginCompleter({
required Future<FlowyResult<UserProfilePB, FlowyError>> Function(
String userId,
String userEmail,
) onSuccess,
}) {
final completer = Completer<FlowyResult<UserProfilePB, FlowyError>>();
late final StreamSubscription<AuthState> subscription;
final auth = Supabase.instance.client.auth;
subscription = auth.onAuthStateChange.listen((event) async {
final user = event.session?.user;
if (event.event == AuthChangeEvent.signedIn && user != null) {
final response = await onSuccess(
user.id,
user.email ?? user.newEmail ?? '',
);
// Only cancel the subscription if the Event is signedIn.
await subscription.cancel();
completer.complete(response);
}
});
return completer;
}
Map<String, String> queryParamsForProvider(OAuthProvider provider) {
switch (provider) {
case OAuthProvider.google:
return {
'access_type': 'offline',
'prompt': 'consent',
};
case OAuthProvider.github:
case OAuthProvider.discord:
default:
return {};
}
}

View File

@ -1,113 +0,0 @@
import 'dart:async';
import 'package:appflowy/user/application/auth/auth_service.dart';
import 'package:appflowy/user/application/auth/backend_auth_service.dart';
import 'package:appflowy/user/application/user_service.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:supabase_flutter/supabase_flutter.dart';
import 'auth_error.dart';
/// Only used for testing.
class SupabaseMockAuthService implements AuthService {
SupabaseMockAuthService();
static OauthSignInPB? signInPayload;
SupabaseClient get _client => Supabase.instance.client;
GoTrueClient get _auth => _client.auth;
final BackendAuthService _appFlowyAuthService =
BackendAuthService(AuthenticatorPB.Supabase);
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signUp({
required String name,
required String email,
required String password,
Map<String, String> params = const {},
}) async {
throw UnimplementedError();
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithEmailPassword({
required String email,
required String password,
Map<String, String> params = const {},
}) async {
throw UnimplementedError();
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signUpWithOAuth({
required String platform,
Map<String, String> params = const {},
}) async {
const password = "AppFlowyTest123!";
const email = "supabase_integration_test@appflowy.io";
try {
if (_auth.currentSession == null) {
try {
await _auth.signInWithPassword(
password: password,
email: email,
);
} catch (e) {
Log.error(e);
return FlowyResult.failure(AuthError.supabaseSignUpError);
}
}
// Check if the user is already logged in.
final session = _auth.currentSession!;
final uuid = session.user.id;
// Create the OAuth sign-in payload.
final payload = OauthSignInPB(
authenticator: AuthenticatorPB.Supabase,
map: {
AuthServiceMapKeys.uuid: uuid,
AuthServiceMapKeys.email: email,
AuthServiceMapKeys.deviceId: 'MockDeviceId',
},
);
// Send the sign-in event and handle the response.
return UserEventOauthSignIn(payload).send().then((value) => value);
} on AuthException catch (e) {
Log.error(e);
return FlowyResult.failure(AuthError.supabaseSignInError);
}
}
@override
Future<void> signOut() async {
// await _auth.signOut();
await _appFlowyAuthService.signOut();
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signUpAsGuest({
Map<String, String> params = const {},
}) async {
// supabase don't support guest login.
// so, just forward to our backend.
return _appFlowyAuthService.signUpAsGuest();
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithMagicLink({
required String email,
Map<String, String> params = const {},
}) async {
throw UnimplementedError();
}
@override
Future<FlowyResult<UserProfilePB, FlowyError>> getUser() async {
return UserBackendService.getCurrentUserProfile();
}
}

View File

@ -90,7 +90,6 @@ class SettingsDialogBloc
]) async { ]) async {
if ([ if ([
AuthenticatorPB.Local, AuthenticatorPB.Local,
AuthenticatorPB.Supabase,
].contains(userProfile.authenticator)) { ].contains(userProfile.authenticator)) {
return false; return false;
} }

View File

@ -1,103 +0,0 @@
import 'package:appflowy/env/backend_env.dart';
import 'package:appflowy/env/cloud_env.dart';
import 'package:appflowy/plugins/database/application/defines.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import 'cloud_setting_listener.dart';
part 'supabase_cloud_setting_bloc.freezed.dart';
class SupabaseCloudSettingBloc
extends Bloc<SupabaseCloudSettingEvent, SupabaseCloudSettingState> {
SupabaseCloudSettingBloc({
required CloudSettingPB setting,
}) : _listener = UserCloudConfigListener(),
super(SupabaseCloudSettingState.initial(setting)) {
_dispatch();
}
final UserCloudConfigListener _listener;
@override
Future<void> close() async {
await _listener.stop();
return super.close();
}
void _dispatch() {
on<SupabaseCloudSettingEvent>(
(event, emit) async {
await event.when(
initial: () async {
_listener.start(
onSettingChanged: (result) {
if (isClosed) {
return;
}
result.fold(
(setting) =>
add(SupabaseCloudSettingEvent.didReceiveSetting(setting)),
(error) => Log.error(error),
);
},
);
},
enableSync: (bool enable) async {
final update = UpdateCloudConfigPB.create()..enableSync = enable;
await updateCloudConfig(update);
},
didReceiveSetting: (CloudSettingPB setting) {
emit(
state.copyWith(
setting: setting,
loadingState: LoadingState.finish(FlowyResult.success(null)),
),
);
},
enableEncrypt: (bool enable) {
final update = UpdateCloudConfigPB.create()..enableEncrypt = enable;
updateCloudConfig(update);
emit(state.copyWith(loadingState: const LoadingState.loading()));
},
);
},
);
}
Future<void> updateCloudConfig(UpdateCloudConfigPB setting) async {
await UserEventSetCloudConfig(setting).send();
}
}
@freezed
class SupabaseCloudSettingEvent with _$SupabaseCloudSettingEvent {
const factory SupabaseCloudSettingEvent.initial() = _Initial;
const factory SupabaseCloudSettingEvent.didReceiveSetting(
CloudSettingPB setting,
) = _DidSyncSupabaseConfig;
const factory SupabaseCloudSettingEvent.enableSync(bool enable) = _EnableSync;
const factory SupabaseCloudSettingEvent.enableEncrypt(bool enable) =
_EnableEncrypt;
}
@freezed
class SupabaseCloudSettingState with _$SupabaseCloudSettingState {
const factory SupabaseCloudSettingState({
required LoadingState loadingState,
required SupabaseConfiguration config,
required CloudSettingPB setting,
}) = _SupabaseCloudSettingState;
factory SupabaseCloudSettingState.initial(CloudSettingPB setting) =>
SupabaseCloudSettingState(
loadingState: LoadingState.finish(FlowyResult.success(null)),
setting: setting,
config: getIt<AppFlowyCloudSharedEnv>().supabaseConfig,
);
}

View File

@ -1,128 +0,0 @@
import 'package:appflowy/env/backend_env.dart';
import 'package:appflowy/env/cloud_env.dart';
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import 'appflowy_cloud_setting_bloc.dart';
part 'supabase_cloud_urls_bloc.freezed.dart';
class SupabaseCloudURLsBloc
extends Bloc<SupabaseCloudURLsEvent, SupabaseCloudURLsState> {
SupabaseCloudURLsBloc() : super(SupabaseCloudURLsState.initial()) {
on<SupabaseCloudURLsEvent>((event, emit) async {
await event.when(
updateUrl: (String url) {
emit(
state.copyWith(
updatedUrl: url,
showRestartHint: url.isNotEmpty && state.upatedAnonKey.isNotEmpty,
urlError: null,
),
);
},
updateAnonKey: (String anonKey) {
emit(
state.copyWith(
upatedAnonKey: anonKey,
showRestartHint:
anonKey.isNotEmpty && state.updatedUrl.isNotEmpty,
anonKeyError: null,
),
);
},
confirmUpdate: () async {
if (state.updatedUrl.isEmpty) {
emit(
state.copyWith(
urlError:
LocaleKeys.settings_menu_cloudSupabaseUrlCanNotBeEmpty.tr(),
anonKeyError: null,
restartApp: false,
),
);
return;
}
if (state.upatedAnonKey.isEmpty) {
emit(
state.copyWith(
urlError: null,
anonKeyError: LocaleKeys
.settings_menu_cloudSupabaseAnonKeyCanNotBeEmpty
.tr(),
restartApp: false,
),
);
return;
}
validateUrl(state.updatedUrl).fold(
(_) async {
await useSupabaseCloud(
url: state.updatedUrl,
anonKey: state.upatedAnonKey,
);
add(const SupabaseCloudURLsEvent.didSaveConfig());
},
(error) => emit(state.copyWith(urlError: error)),
);
},
didSaveConfig: () {
emit(
state.copyWith(
urlError: null,
anonKeyError: null,
restartApp: true,
),
);
},
);
});
}
Future<void> updateCloudConfig(UpdateCloudConfigPB setting) async {
await UserEventSetCloudConfig(setting).send();
}
}
@freezed
class SupabaseCloudURLsEvent with _$SupabaseCloudURLsEvent {
const factory SupabaseCloudURLsEvent.updateUrl(String text) = _UpdateUrl;
const factory SupabaseCloudURLsEvent.updateAnonKey(String text) =
_UpdateAnonKey;
const factory SupabaseCloudURLsEvent.confirmUpdate() = _UpdateConfig;
const factory SupabaseCloudURLsEvent.didSaveConfig() = _DidSaveConfig;
}
@freezed
class SupabaseCloudURLsState with _$SupabaseCloudURLsState {
const factory SupabaseCloudURLsState({
required SupabaseConfiguration config,
required String updatedUrl,
required String upatedAnonKey,
required String? urlError,
required String? anonKeyError,
required bool restartApp,
required bool showRestartHint,
}) = _SupabaseCloudURLsState;
factory SupabaseCloudURLsState.initial() {
final config = getIt<AppFlowyCloudSharedEnv>().supabaseConfig;
return SupabaseCloudURLsState(
updatedUrl: config.url,
upatedAnonKey: config.anon_key,
urlError: null,
anonKeyError: null,
restartApp: false,
showRestartHint: config.url.isNotEmpty && config.anon_key.isNotEmpty,
config: config,
);
}
}

View File

@ -22,7 +22,6 @@ import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:go_router/go_router.dart'; import 'package:go_router/go_router.dart';
import 'setting_appflowy_cloud.dart'; import 'setting_appflowy_cloud.dart';
import 'setting_supabase_cloud.dart';
class SettingCloud extends StatelessWidget { class SettingCloud extends StatelessWidget {
const SettingCloud({required this.restartAppFlowy, super.key}); const SettingCloud({required this.restartAppFlowy, super.key});
@ -80,8 +79,6 @@ class SettingCloud extends StatelessWidget {
switch (cloudType) { switch (cloudType) {
case AuthenticatorType.local: case AuthenticatorType.local:
return SettingLocalCloud(restartAppFlowy: restartAppFlowy); return SettingLocalCloud(restartAppFlowy: restartAppFlowy);
case AuthenticatorType.supabase:
return SettingSupabaseCloudView(restartAppFlowy: restartAppFlowy);
case AuthenticatorType.appflowyCloud: case AuthenticatorType.appflowyCloud:
return AppFlowyCloudViewSetting(restartAppFlowy: restartAppFlowy); return AppFlowyCloudViewSetting(restartAppFlowy: restartAppFlowy);
case AuthenticatorType.appflowyCloudSelfHost: case AuthenticatorType.appflowyCloudSelfHost:
@ -112,9 +109,6 @@ class CloudTypeSwitcher extends StatelessWidget {
// Only show the appflowyCloudDevelop in develop mode // Only show the appflowyCloudDevelop in develop mode
final values = AuthenticatorType.values.where((element) { final values = AuthenticatorType.values.where((element) {
// Supabase will going to be removed in the future // Supabase will going to be removed in the future
if (element == AuthenticatorType.supabase) {
return false;
}
return isDevelopMode || element != AuthenticatorType.appflowyCloudDevelop; return isDevelopMode || element != AuthenticatorType.appflowyCloudDevelop;
}).toList(); }).toList();
@ -218,8 +212,6 @@ String titleFromCloudType(AuthenticatorType cloudType) {
switch (cloudType) { switch (cloudType) {
case AuthenticatorType.local: case AuthenticatorType.local:
return LocaleKeys.settings_menu_cloudLocal.tr(); return LocaleKeys.settings_menu_cloudLocal.tr();
case AuthenticatorType.supabase:
return LocaleKeys.settings_menu_cloudSupabase.tr();
case AuthenticatorType.appflowyCloud: case AuthenticatorType.appflowyCloud:
return LocaleKeys.settings_menu_cloudAppFlowy.tr(); return LocaleKeys.settings_menu_cloudAppFlowy.tr();
case AuthenticatorType.appflowyCloudSelfHost: case AuthenticatorType.appflowyCloudSelfHost:

View File

@ -1,339 +0,0 @@
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:appflowy/core/helpers/url_launcher.dart';
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/supabase_cloud_setting_bloc.dart';
import 'package:appflowy/workspace/application/settings/supabase_cloud_urls_bloc.dart';
import 'package:appflowy/workspace/presentation/home/toast.dart';
import 'package:appflowy/workspace/presentation/settings/widgets/_restart_app_button.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/user_setting.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra/size.dart';
import 'package:flowy_infra/theme_extension.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
import 'package:flowy_infra_ui/widget/error_page.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
class SettingSupabaseCloudView extends StatelessWidget {
const SettingSupabaseCloudView({required this.restartAppFlowy, super.key});
final VoidCallback restartAppFlowy;
@override
Widget build(BuildContext context) {
return FutureBuilder<FlowyResult<CloudSettingPB, FlowyError>>(
future: UserEventGetCloudConfig().send(),
builder: (context, snapshot) {
if (snapshot.data != null &&
snapshot.connectionState == ConnectionState.done) {
return snapshot.data!.fold(
(setting) {
return BlocProvider(
create: (context) => SupabaseCloudSettingBloc(
setting: setting,
)..add(const SupabaseCloudSettingEvent.initial()),
child: Column(
children: [
BlocBuilder<SupabaseCloudSettingBloc,
SupabaseCloudSettingState>(
builder: (context, state) {
return const Column(
children: [
SupabaseEnableSync(),
EnableEncrypt(),
],
);
},
),
const VSpace(40),
const SupabaseSelfhostTip(),
SupabaseCloudURLs(
didUpdateUrls: restartAppFlowy,
),
],
),
);
},
(err) {
return FlowyErrorPage.message(err.toString(), howToFix: "");
},
);
} else {
return const Center(
child: CircularProgressIndicator(),
);
}
},
);
}
}
class SupabaseCloudURLs extends StatelessWidget {
const SupabaseCloudURLs({super.key, required this.didUpdateUrls});
final VoidCallback didUpdateUrls;
@override
Widget build(BuildContext context) {
return BlocProvider(
create: (context) => SupabaseCloudURLsBloc(),
child: BlocListener<SupabaseCloudURLsBloc, SupabaseCloudURLsState>(
listener: (context, state) async {
if (state.restartApp) {
didUpdateUrls();
}
},
child: BlocBuilder<SupabaseCloudURLsBloc, SupabaseCloudURLsState>(
builder: (context, state) {
return Column(
children: [
SupabaseInput(
title: LocaleKeys.settings_menu_cloudSupabaseUrl.tr(),
url: state.config.url,
hint: LocaleKeys.settings_menu_cloudURLHint.tr(),
onChanged: (text) {
context
.read<SupabaseCloudURLsBloc>()
.add(SupabaseCloudURLsEvent.updateUrl(text));
},
error: state.urlError,
),
SupabaseInput(
title: LocaleKeys.settings_menu_cloudSupabaseAnonKey.tr(),
url: state.config.anon_key,
hint: LocaleKeys.settings_menu_cloudURLHint.tr(),
onChanged: (text) {
context
.read<SupabaseCloudURLsBloc>()
.add(SupabaseCloudURLsEvent.updateAnonKey(text));
},
error: state.anonKeyError,
),
const VSpace(20),
RestartButton(
onClick: () => _restartApp(context),
showRestartHint: state.showRestartHint,
),
],
);
},
),
),
);
}
void _restartApp(BuildContext context) {
NavigatorAlertDialog(
title: LocaleKeys.settings_menu_restartAppTip.tr(),
confirm: () => context
.read<SupabaseCloudURLsBloc>()
.add(const SupabaseCloudURLsEvent.confirmUpdate()),
).show(context);
}
}
class EnableEncrypt extends StatelessWidget {
const EnableEncrypt({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<SupabaseCloudSettingBloc, SupabaseCloudSettingState>(
builder: (context, state) {
final indicator = state.loadingState.when(
loading: () => const CircularProgressIndicator.adaptive(),
finish: (successOrFail) => const SizedBox.shrink(),
idle: () => const SizedBox.shrink(),
);
return Column(
children: [
Row(
children: [
FlowyText.medium(LocaleKeys.settings_menu_enableEncrypt.tr()),
const Spacer(),
indicator,
const HSpace(3),
Switch.adaptive(
activeColor: Theme.of(context).colorScheme.primary,
onChanged: state.setting.enableEncrypt
? null
: (bool value) {
context.read<SupabaseCloudSettingBloc>().add(
SupabaseCloudSettingEvent.enableEncrypt(value),
);
},
value: state.setting.enableEncrypt,
),
],
),
Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
IntrinsicHeight(
child: Opacity(
opacity: 0.6,
child: FlowyText.medium(
LocaleKeys.settings_menu_enableEncryptPrompt.tr(),
maxLines: 13,
),
),
),
const VSpace(6),
SizedBox(
height: 40,
child: FlowyTooltip(
message: LocaleKeys.settings_menu_clickToCopySecret.tr(),
child: FlowyButton(
disable: !state.setting.enableEncrypt,
decoration: BoxDecoration(
borderRadius: Corners.s5Border,
border: Border.all(
color: Theme.of(context).colorScheme.secondary,
),
),
text: FlowyText.medium(state.setting.encryptSecret),
onTap: () async {
await Clipboard.setData(
ClipboardData(text: state.setting.encryptSecret),
);
showMessageToast(LocaleKeys.message_copy_success.tr());
},
),
),
),
],
),
],
);
},
);
}
}
class SupabaseEnableSync extends StatelessWidget {
const SupabaseEnableSync({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<SupabaseCloudSettingBloc, SupabaseCloudSettingState>(
builder: (context, state) {
return Row(
children: [
FlowyText.medium(LocaleKeys.settings_menu_enableSync.tr()),
const Spacer(),
Switch.adaptive(
activeColor: Theme.of(context).colorScheme.primary,
onChanged: (bool value) {
context.read<SupabaseCloudSettingBloc>().add(
SupabaseCloudSettingEvent.enableSync(value),
);
},
value: state.setting.enableSync,
),
],
);
},
);
}
}
@visibleForTesting
class SupabaseInput extends StatefulWidget {
const SupabaseInput({
super.key,
required this.title,
required this.url,
required this.hint,
required this.error,
required this.onChanged,
});
final String title;
final String url;
final String hint;
final String? error;
final Function(String) onChanged;
@override
SupabaseInputState createState() => SupabaseInputState();
}
class SupabaseInputState extends State<SupabaseInput> {
late final _controller = TextEditingController(text: widget.url);
@override
void dispose() {
_controller.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return TextField(
controller: _controller,
style: const TextStyle(fontSize: 12.0),
decoration: InputDecoration(
contentPadding: const EdgeInsets.symmetric(vertical: 6),
labelText: widget.title,
labelStyle: Theme.of(context)
.textTheme
.titleMedium!
.copyWith(fontWeight: FontWeight.w400, fontSize: 16),
enabledBorder: UnderlineInputBorder(
borderSide:
BorderSide(color: AFThemeExtension.of(context).onBackground),
),
focusedBorder: UnderlineInputBorder(
borderSide: BorderSide(color: Theme.of(context).colorScheme.primary),
),
hintText: widget.hint,
errorText: widget.error,
),
onChanged: widget.onChanged,
);
}
}
class SupabaseSelfhostTip extends StatelessWidget {
const SupabaseSelfhostTip({super.key});
final url =
"https://docs.appflowy.io/docs/guides/appflowy/self-hosting-appflowy-using-supabase";
@override
Widget build(BuildContext context) {
return Opacity(
opacity: 0.6,
child: RichText(
text: TextSpan(
children: <TextSpan>[
TextSpan(
text: LocaleKeys.settings_menu_selfHostStart.tr(),
style: Theme.of(context).textTheme.bodySmall!,
),
TextSpan(
text: " ${LocaleKeys.settings_menu_selfHostContent.tr()} ",
style: Theme.of(context).textTheme.bodyMedium!.copyWith(
fontSize: FontSizes.s14,
color: Theme.of(context).colorScheme.primary,
decoration: TextDecoration.underline,
),
recognizer: TapGestureRecognizer()
..onTap = () => afLaunchUrlString(url),
),
TextSpan(
text: LocaleKeys.settings_menu_selfHostEnd.tr(),
style: Theme.of(context).textTheme.bodySmall!,
),
],
),
),
);
}
}

View File

@ -172,7 +172,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -192,7 +192,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -826,11 +826,12 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
"app-error", "app-error",
"arc-swap",
"async-trait", "async-trait",
"bincode", "bincode",
"brotli", "brotli",
@ -876,7 +877,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -888,7 +889,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -962,15 +963,16 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-trait", "async-trait",
"bincode", "bincode",
"bytes", "bytes",
"chrono", "chrono",
"js-sys", "js-sys",
"parking_lot 0.12.1", "lazy_static",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
@ -986,7 +988,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -995,11 +997,11 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-plugins", "collab-plugins",
"dashmap 5.5.3", "dashmap 5.5.3",
"futures",
"getrandom 0.2.10", "getrandom 0.2.10",
"js-sys", "js-sys",
"lazy_static", "lazy_static",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"rayon", "rayon",
"serde", "serde",
"serde_json", "serde_json",
@ -1016,14 +1018,14 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"collab", "collab",
"collab-entity", "collab-entity",
"getrandom 0.2.10", "getrandom 0.2.10",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror",
@ -1036,7 +1038,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-entity" name = "collab-entity"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -1055,14 +1057,15 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"chrono", "chrono",
"collab", "collab",
"collab-entity", "collab-entity",
"dashmap 5.5.3",
"getrandom 0.2.10", "getrandom 0.2.10",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
@ -1077,13 +1080,17 @@ name = "collab-integrate"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-trait", "async-trait",
"collab", "collab",
"collab-database",
"collab-document",
"collab-entity", "collab-entity",
"collab-folder",
"collab-plugins", "collab-plugins",
"collab-user",
"futures", "futures",
"lib-infra", "lib-infra",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"tokio", "tokio",
@ -1093,7 +1100,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -1109,7 +1116,6 @@ dependencies = [
"indexed_db_futures", "indexed_db_futures",
"js-sys", "js-sys",
"lazy_static", "lazy_static",
"parking_lot 0.12.1",
"rand 0.8.5", "rand 0.8.5",
"rocksdb", "rocksdb",
"serde", "serde",
@ -1132,7 +1138,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1157,7 +1163,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1174,13 +1180,12 @@ dependencies = [
[[package]] [[package]]
name = "collab-user" name = "collab-user"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
"collab-entity", "collab-entity",
"getrandom 0.2.10", "getrandom 0.2.10",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"tokio", "tokio",
@ -1546,7 +1551,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -1972,6 +1977,7 @@ dependencies = [
"anyhow", "anyhow",
"appflowy-local-ai", "appflowy-local-ai",
"appflowy-plugin", "appflowy-plugin",
"arc-swap",
"base64 0.21.5", "base64 0.21.5",
"bytes", "bytes",
"dashmap 6.0.1", "dashmap 6.0.1",
@ -1989,7 +1995,6 @@ dependencies = [
"log", "log",
"md5", "md5",
"notify", "notify",
"parking_lot 0.12.1",
"pin-project", "pin-project",
"protobuf", "protobuf",
"reqwest", "reqwest",
@ -2072,6 +2077,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-local-ai", "appflowy-local-ai",
"arc-swap",
"base64 0.21.5", "base64 0.21.5",
"bytes", "bytes",
"client-api", "client-api",
@ -2079,6 +2085,7 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-integrate", "collab-integrate",
"collab-plugins", "collab-plugins",
"dashmap 6.0.1",
"diesel", "diesel",
"flowy-ai", "flowy-ai",
"flowy-ai-pub", "flowy-ai-pub",
@ -2105,7 +2112,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"lib-log", "lib-log",
"parking_lot 0.12.1",
"semver", "semver",
"serde", "serde",
"serde_json", "serde_json",
@ -2135,6 +2141,7 @@ name = "flowy-database2"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-stream", "async-stream",
"async-trait", "async-trait",
"bytes", "bytes",
@ -2159,7 +2166,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"rayon", "rayon",
"rust_decimal", "rust_decimal",
@ -2231,7 +2237,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"scraper 0.18.1", "scraper 0.18.1",
"serde", "serde",
@ -2302,6 +2307,7 @@ dependencies = [
name = "flowy-folder" name = "flowy-folder"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arc-swap",
"async-trait", "async-trait",
"bytes", "bytes",
"chrono", "chrono",
@ -2323,7 +2329,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"regex", "regex",
"serde", "serde",
@ -2418,6 +2423,7 @@ name = "flowy-server"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"bytes", "bytes",
"chrono", "chrono",
"client-api", "client-api",
@ -2426,6 +2432,7 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-folder", "collab-folder",
"collab-plugins", "collab-plugins",
"dashmap 6.0.1",
"flowy-ai-pub", "flowy-ai-pub",
"flowy-database-pub", "flowy-database-pub",
"flowy-document-pub", "flowy-document-pub",
@ -2445,7 +2452,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"mime_guess", "mime_guess",
"parking_lot 0.12.1",
"postgrest", "postgrest",
"rand 0.8.5", "rand 0.8.5",
"reqwest", "reqwest",
@ -2481,7 +2487,6 @@ dependencies = [
"diesel_derives", "diesel_derives",
"diesel_migrations", "diesel_migrations",
"libsqlite3-sys", "libsqlite3-sys",
"parking_lot 0.12.1",
"r2d2", "r2d2",
"scheduled-thread-pool", "scheduled-thread-pool",
"serde", "serde",
@ -2539,6 +2544,7 @@ name = "flowy-user"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"base64 0.21.5", "base64 0.21.5",
"bytes", "bytes",
"chrono", "chrono",
@ -2551,6 +2557,7 @@ dependencies = [
"collab-integrate", "collab-integrate",
"collab-plugins", "collab-plugins",
"collab-user", "collab-user",
"dashmap 6.0.1",
"diesel", "diesel",
"diesel_derives", "diesel_derives",
"fancy-regex 0.11.0", "fancy-regex 0.11.0",
@ -2567,7 +2574,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"once_cell", "once_cell",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"semver", "semver",
"serde", "serde",
@ -3068,7 +3074,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -3085,7 +3091,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3517,7 +3523,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -3782,7 +3788,6 @@ dependencies = [
"futures-util", "futures-util",
"getrandom 0.2.10", "getrandom 0.2.10",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"pin-project", "pin-project",
"protobuf", "protobuf",
"serde", "serde",
@ -6115,7 +6120,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
# To switch to the local path, run: # To switch to the local path, run:
# scripts/tool/update_collab_source.sh # scripts/tool/update_collab_source.sh
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
# Working directory: frontend # Working directory: frontend
# To update the commit ID, run: # To update the commit ID, run:

View File

@ -1,9 +1,9 @@
use dotenv::dotenv;
use flowy_core::config::AppFlowyCoreConfig; use flowy_core::config::AppFlowyCoreConfig;
use flowy_core::{AppFlowyCore, MutexAppFlowyCore, DEFAULT_NAME}; use flowy_core::{AppFlowyCore, DEFAULT_NAME};
use lib_dispatch::runtime::AFPluginRuntime; use lib_dispatch::runtime::AFPluginRuntime;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Mutex;
use dotenv::dotenv;
pub fn read_env() { pub fn read_env() {
dotenv().ok(); dotenv().ok();
@ -25,7 +25,7 @@ pub fn read_env() {
} }
} }
pub fn init_flowy_core() -> MutexAppFlowyCore { pub(crate) fn init_appflowy_core() -> MutexAppFlowyCore {
let config_json = include_str!("../tauri.conf.json"); let config_json = include_str!("../tauri.conf.json");
let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap(); let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap();
@ -67,3 +67,13 @@ pub fn init_flowy_core() -> MutexAppFlowyCore {
MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await) MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await)
}) })
} }
pub struct MutexAppFlowyCore(pub Rc<Mutex<AppFlowyCore>>);
impl MutexAppFlowyCore {
fn new(appflowy_core: AppFlowyCore) -> Self {
Self(Rc::new(Mutex::new(appflowy_core)))
}
}
unsafe impl Sync for MutexAppFlowyCore {}
unsafe impl Send for MutexAppFlowyCore {}

View File

@ -11,17 +11,18 @@ mod init;
mod notification; mod notification;
mod request; mod request;
use crate::init::init_appflowy_core;
use crate::request::invoke_request;
use flowy_notification::{register_notification_sender, unregister_all_notification_sender}; use flowy_notification::{register_notification_sender, unregister_all_notification_sender};
use init::*;
use notification::*; use notification::*;
use request::*;
use tauri::Manager; use tauri::Manager;
extern crate dotenv; extern crate dotenv;
fn main() { fn main() {
tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME); tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME);
let flowy_core = init_flowy_core(); let flowy_core = init_appflowy_core();
tauri::Builder::default() tauri::Builder::default()
.invoke_handler(tauri::generate_handler![invoke_request]) .invoke_handler(tauri::generate_handler![invoke_request])
.manage(flowy_core) .manage(flowy_core)

View File

@ -1,4 +1,4 @@
use flowy_core::; use crate::init::MutexAppFlowyCore;
use lib_dispatch::prelude::{ use lib_dispatch::prelude::{
AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode, AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode,
}; };
@ -39,7 +39,7 @@ pub async fn invoke_request(
) -> AFTauriResponse { ) -> AFTauriResponse {
let request: AFPluginRequest = request.into(); let request: AFPluginRequest = request.into();
let state: State<MutexAppFlowyCore> = app_handler.state(); let state: State<MutexAppFlowyCore> = app_handler.state();
let dispatcher = state.0.lock().dispatcher(); let dispatcher = state.0.lock().unwrap().dispatcher();
let response = AFPluginDispatcher::sync_send(dispatcher, request); let response = AFPluginDispatcher::sync_send(dispatcher, request);
response.into() response.into()
} }

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -800,11 +800,12 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
"app-error", "app-error",
"arc-swap",
"async-trait", "async-trait",
"bincode", "bincode",
"brotli", "brotli",
@ -850,7 +851,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -862,7 +863,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -945,15 +946,16 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-trait", "async-trait",
"bincode", "bincode",
"bytes", "bytes",
"chrono", "chrono",
"js-sys", "js-sys",
"parking_lot 0.12.1", "lazy_static",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
@ -969,7 +971,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -978,11 +980,11 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-plugins", "collab-plugins",
"dashmap 5.5.3", "dashmap 5.5.3",
"futures",
"getrandom 0.2.12", "getrandom 0.2.12",
"js-sys", "js-sys",
"lazy_static", "lazy_static",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"rayon", "rayon",
"serde", "serde",
"serde_json", "serde_json",
@ -999,14 +1001,14 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"collab", "collab",
"collab-entity", "collab-entity",
"getrandom 0.2.12", "getrandom 0.2.12",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror",
@ -1019,7 +1021,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-entity" name = "collab-entity"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -1038,14 +1040,15 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"chrono", "chrono",
"collab", "collab",
"collab-entity", "collab-entity",
"dashmap 5.5.3",
"getrandom 0.2.12", "getrandom 0.2.12",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
@ -1060,13 +1063,17 @@ name = "collab-integrate"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-trait", "async-trait",
"collab", "collab",
"collab-database",
"collab-document",
"collab-entity", "collab-entity",
"collab-folder",
"collab-plugins", "collab-plugins",
"collab-user",
"futures", "futures",
"lib-infra", "lib-infra",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"tokio", "tokio",
@ -1076,7 +1083,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -1092,7 +1099,6 @@ dependencies = [
"indexed_db_futures", "indexed_db_futures",
"js-sys", "js-sys",
"lazy_static", "lazy_static",
"parking_lot 0.12.1",
"rand 0.8.5", "rand 0.8.5",
"rocksdb", "rocksdb",
"serde", "serde",
@ -1115,7 +1121,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1140,7 +1146,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1157,13 +1163,12 @@ dependencies = [
[[package]] [[package]]
name = "collab-user" name = "collab-user"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
"collab-entity", "collab-entity",
"getrandom 0.2.12", "getrandom 0.2.12",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"tokio", "tokio",
@ -1536,7 +1541,7 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -2002,6 +2007,7 @@ dependencies = [
"anyhow", "anyhow",
"appflowy-local-ai", "appflowy-local-ai",
"appflowy-plugin", "appflowy-plugin",
"arc-swap",
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
"dashmap 6.0.1", "dashmap 6.0.1",
@ -2019,7 +2025,6 @@ dependencies = [
"log", "log",
"md5", "md5",
"notify", "notify",
"parking_lot 0.12.1",
"pin-project", "pin-project",
"protobuf", "protobuf",
"reqwest", "reqwest",
@ -2102,6 +2107,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-local-ai", "appflowy-local-ai",
"arc-swap",
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
"client-api", "client-api",
@ -2109,6 +2115,7 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-integrate", "collab-integrate",
"collab-plugins", "collab-plugins",
"dashmap 6.0.1",
"diesel", "diesel",
"flowy-ai", "flowy-ai",
"flowy-ai-pub", "flowy-ai-pub",
@ -2135,7 +2142,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"lib-log", "lib-log",
"parking_lot 0.12.1",
"semver", "semver",
"serde", "serde",
"serde_json", "serde_json",
@ -2165,6 +2171,7 @@ name = "flowy-database2"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-stream", "async-stream",
"async-trait", "async-trait",
"bytes", "bytes",
@ -2189,7 +2196,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"rayon", "rayon",
"rust_decimal", "rust_decimal",
@ -2261,7 +2267,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"scraper 0.18.1", "scraper 0.18.1",
"serde", "serde",
@ -2332,6 +2337,7 @@ dependencies = [
name = "flowy-folder" name = "flowy-folder"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arc-swap",
"async-trait", "async-trait",
"bytes", "bytes",
"chrono", "chrono",
@ -2353,7 +2359,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"regex", "regex",
"serde", "serde",
@ -2448,6 +2453,7 @@ name = "flowy-server"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"bytes", "bytes",
"chrono", "chrono",
"client-api", "client-api",
@ -2456,6 +2462,7 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-folder", "collab-folder",
"collab-plugins", "collab-plugins",
"dashmap 6.0.1",
"flowy-ai-pub", "flowy-ai-pub",
"flowy-database-pub", "flowy-database-pub",
"flowy-document-pub", "flowy-document-pub",
@ -2475,7 +2482,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"mime_guess", "mime_guess",
"parking_lot 0.12.1",
"postgrest", "postgrest",
"rand 0.8.5", "rand 0.8.5",
"reqwest", "reqwest",
@ -2511,7 +2517,6 @@ dependencies = [
"diesel_derives", "diesel_derives",
"diesel_migrations", "diesel_migrations",
"libsqlite3-sys", "libsqlite3-sys",
"parking_lot 0.12.1",
"r2d2", "r2d2",
"scheduled-thread-pool", "scheduled-thread-pool",
"serde", "serde",
@ -2569,6 +2574,7 @@ name = "flowy-user"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"base64 0.21.7", "base64 0.21.7",
"bytes", "bytes",
"chrono", "chrono",
@ -2581,6 +2587,7 @@ dependencies = [
"collab-integrate", "collab-integrate",
"collab-plugins", "collab-plugins",
"collab-user", "collab-user",
"dashmap 6.0.1",
"diesel", "diesel",
"diesel_derives", "diesel_derives",
"fancy-regex 0.11.0", "fancy-regex 0.11.0",
@ -2597,7 +2604,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"once_cell", "once_cell",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"semver", "semver",
"serde", "serde",
@ -3135,7 +3141,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -3152,7 +3158,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3589,7 +3595,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -3859,7 +3865,6 @@ dependencies = [
"futures-util", "futures-util",
"getrandom 0.2.12", "getrandom 0.2.12",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"pin-project", "pin-project",
"protobuf", "protobuf",
"serde", "serde",
@ -6179,7 +6184,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
# To switch to the local path, run: # To switch to the local path, run:
# scripts/tool/update_collab_source.sh # scripts/tool/update_collab_source.sh
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
# Working directory: frontend # Working directory: frontend
# To update the commit ID, run: # To update the commit ID, run:

View File

@ -1,9 +1,9 @@
use dotenv::dotenv;
use flowy_core::config::AppFlowyCoreConfig; use flowy_core::config::AppFlowyCoreConfig;
use flowy_core::{AppFlowyCore, MutexAppFlowyCore, DEFAULT_NAME}; use flowy_core::{AppFlowyCore, DEFAULT_NAME};
use lib_dispatch::runtime::AFPluginRuntime; use lib_dispatch::runtime::AFPluginRuntime;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Mutex;
use dotenv::dotenv;
pub fn read_env() { pub fn read_env() {
dotenv().ok(); dotenv().ok();
@ -25,7 +25,7 @@ pub fn read_env() {
} }
} }
pub fn init_flowy_core() -> MutexAppFlowyCore { pub fn init_appflowy_core() -> MutexAppFlowyCore {
let config_json = include_str!("../tauri.conf.json"); let config_json = include_str!("../tauri.conf.json");
let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap(); let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap();
@ -67,3 +67,13 @@ pub fn init_flowy_core() -> MutexAppFlowyCore {
MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await) MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await)
}) })
} }
pub struct MutexAppFlowyCore(pub Rc<Mutex<AppFlowyCore>>);
impl MutexAppFlowyCore {
pub(crate) fn new(appflowy_core: AppFlowyCore) -> Self {
Self(Rc::new(Mutex::new(appflowy_core)))
}
}
unsafe impl Sync for MutexAppFlowyCore {}
unsafe impl Send for MutexAppFlowyCore {}

View File

@ -21,7 +21,7 @@ extern crate dotenv;
fn main() { fn main() {
tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME); tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME);
let flowy_core = init_flowy_core(); let flowy_core = init_appflowy_core();
tauri::Builder::default() tauri::Builder::default()
.invoke_handler(tauri::generate_handler![invoke_request]) .invoke_handler(tauri::generate_handler![invoke_request])
.manage(flowy_core) .manage(flowy_core)

View File

@ -1,4 +1,4 @@
use flowy_core::MutexAppFlowyCore; use crate::init::MutexAppFlowyCore;
use lib_dispatch::prelude::{ use lib_dispatch::prelude::{
AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode, AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode,
}; };
@ -39,7 +39,7 @@ pub async fn invoke_request(
) -> AFTauriResponse { ) -> AFTauriResponse {
let request: AFPluginRequest = request.into(); let request: AFPluginRequest = request.into();
let state: State<MutexAppFlowyCore> = app_handler.state(); let state: State<MutexAppFlowyCore> = app_handler.state();
let dispatcher = state.0.lock().dispatcher(); let dispatcher = state.0.lock().unwrap().dispatcher();
let response = AFPluginDispatcher::sync_send(dispatcher, request); let response = AFPluginDispatcher::sync_send(dispatcher, request);
response.into() response.into()
} }

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -718,11 +718,12 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
"app-error", "app-error",
"arc-swap",
"async-trait", "async-trait",
"bincode", "bincode",
"brotli", "brotli",
@ -768,7 +769,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -780,7 +781,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -823,15 +824,16 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-trait", "async-trait",
"bincode", "bincode",
"bytes", "bytes",
"chrono", "chrono",
"js-sys", "js-sys",
"parking_lot 0.12.1", "lazy_static",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
@ -847,7 +849,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -856,11 +858,11 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-plugins", "collab-plugins",
"dashmap 5.5.3", "dashmap 5.5.3",
"futures",
"getrandom 0.2.10", "getrandom 0.2.10",
"js-sys", "js-sys",
"lazy_static", "lazy_static",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"rayon", "rayon",
"serde", "serde",
"serde_json", "serde_json",
@ -877,14 +879,14 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"collab", "collab",
"collab-entity", "collab-entity",
"getrandom 0.2.10", "getrandom 0.2.10",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"thiserror", "thiserror",
@ -897,7 +899,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-entity" name = "collab-entity"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -916,14 +918,15 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"chrono", "chrono",
"collab", "collab",
"collab-entity", "collab-entity",
"dashmap 5.5.3",
"getrandom 0.2.10", "getrandom 0.2.10",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
@ -938,13 +941,17 @@ name = "collab-integrate"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-trait", "async-trait",
"collab", "collab",
"collab-database",
"collab-document",
"collab-entity", "collab-entity",
"collab-folder",
"collab-plugins", "collab-plugins",
"collab-user",
"futures", "futures",
"lib-infra", "lib-infra",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"tokio", "tokio",
@ -954,7 +961,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -970,7 +977,6 @@ dependencies = [
"indexed_db_futures", "indexed_db_futures",
"js-sys", "js-sys",
"lazy_static", "lazy_static",
"parking_lot 0.12.1",
"rand 0.8.5", "rand 0.8.5",
"rocksdb", "rocksdb",
"serde", "serde",
@ -993,7 +999,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1018,7 +1024,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1035,13 +1041,12 @@ dependencies = [
[[package]] [[package]]
name = "collab-user" name = "collab-user"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
"collab-entity", "collab-entity",
"getrandom 0.2.10", "getrandom 0.2.10",
"parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
"tokio", "tokio",
@ -1323,7 +1328,6 @@ dependencies = [
"lazy_static", "lazy_static",
"lib-dispatch", "lib-dispatch",
"lib-log", "lib-log",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"semver", "semver",
"serde", "serde",
@ -1370,7 +1374,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -1662,7 +1666,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"rand 0.8.5", "rand 0.8.5",
"semver", "semver",
@ -1795,6 +1798,7 @@ dependencies = [
"anyhow", "anyhow",
"appflowy-local-ai", "appflowy-local-ai",
"appflowy-plugin", "appflowy-plugin",
"arc-swap",
"base64 0.21.5", "base64 0.21.5",
"bytes", "bytes",
"dashmap 6.0.1", "dashmap 6.0.1",
@ -1813,7 +1817,6 @@ dependencies = [
"log", "log",
"md5", "md5",
"notify", "notify",
"parking_lot 0.12.1",
"pin-project", "pin-project",
"protobuf", "protobuf",
"reqwest", "reqwest",
@ -1898,6 +1901,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-local-ai", "appflowy-local-ai",
"arc-swap",
"base64 0.21.5", "base64 0.21.5",
"bytes", "bytes",
"client-api", "client-api",
@ -1906,6 +1910,7 @@ dependencies = [
"collab-integrate", "collab-integrate",
"collab-plugins", "collab-plugins",
"console-subscriber", "console-subscriber",
"dashmap 6.0.1",
"diesel", "diesel",
"flowy-ai", "flowy-ai",
"flowy-ai-pub", "flowy-ai-pub",
@ -1932,7 +1937,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"lib-log", "lib-log",
"parking_lot 0.12.1",
"semver", "semver",
"serde", "serde",
"serde_json", "serde_json",
@ -1962,6 +1966,7 @@ name = "flowy-database2"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"async-stream", "async-stream",
"async-trait", "async-trait",
"bytes", "bytes",
@ -1987,7 +1992,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"rayon", "rayon",
"rust_decimal", "rust_decimal",
@ -2059,7 +2063,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"scraper 0.18.1", "scraper 0.18.1",
"serde", "serde",
@ -2132,6 +2135,7 @@ dependencies = [
name = "flowy-folder" name = "flowy-folder"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arc-swap",
"async-trait", "async-trait",
"bytes", "bytes",
"chrono", "chrono",
@ -2153,7 +2157,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"regex", "regex",
"serde", "serde",
@ -2249,6 +2252,7 @@ name = "flowy-server"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"assert-json-diff", "assert-json-diff",
"bytes", "bytes",
"chrono", "chrono",
@ -2258,6 +2262,7 @@ dependencies = [
"collab-entity", "collab-entity",
"collab-folder", "collab-folder",
"collab-plugins", "collab-plugins",
"dashmap 6.0.1",
"dotenv", "dotenv",
"flowy-ai-pub", "flowy-ai-pub",
"flowy-database-pub", "flowy-database-pub",
@ -2278,7 +2283,6 @@ dependencies = [
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"mime_guess", "mime_guess",
"parking_lot 0.12.1",
"postgrest", "postgrest",
"rand 0.8.5", "rand 0.8.5",
"reqwest", "reqwest",
@ -2317,7 +2321,6 @@ dependencies = [
"libsqlite3-sys", "libsqlite3-sys",
"openssl", "openssl",
"openssl-sys", "openssl-sys",
"parking_lot 0.12.1",
"r2d2", "r2d2",
"scheduled-thread-pool", "scheduled-thread-pool",
"serde", "serde",
@ -2378,6 +2381,7 @@ name = "flowy-user"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap",
"base64 0.21.5", "base64 0.21.5",
"bytes", "bytes",
"chrono", "chrono",
@ -2390,6 +2394,7 @@ dependencies = [
"collab-integrate", "collab-integrate",
"collab-plugins", "collab-plugins",
"collab-user", "collab-user",
"dashmap 6.0.1",
"diesel", "diesel",
"diesel_derives", "diesel_derives",
"fake", "fake",
@ -2408,7 +2413,6 @@ dependencies = [
"lib-infra", "lib-infra",
"nanoid", "nanoid",
"once_cell", "once_cell",
"parking_lot 0.12.1",
"protobuf", "protobuf",
"quickcheck", "quickcheck",
"quickcheck_macros", "quickcheck_macros",
@ -2747,7 +2751,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -2764,7 +2768,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3129,7 +3133,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -3295,7 +3299,6 @@ dependencies = [
"futures-util", "futures-util",
"getrandom 0.2.10", "getrandom 0.2.10",
"nanoid", "nanoid",
"parking_lot 0.12.1",
"pin-project", "pin-project",
"protobuf", "protobuf",
"serde", "serde",
@ -5338,7 +5341,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -68,6 +68,7 @@ flowy-date = { workspace = true, path = "flowy-date" }
flowy-ai = { workspace = true, path = "flowy-ai" } flowy-ai = { workspace = true, path = "flowy-ai" }
flowy-ai-pub = { workspace = true, path = "flowy-ai-pub" } flowy-ai-pub = { workspace = true, path = "flowy-ai-pub" }
anyhow = "1.0" anyhow = "1.0"
arc-swap = "1.7"
tracing = "0.1.40" tracing = "0.1.40"
bytes = "1.5.0" bytes = "1.5.0"
serde_json = "1.0.108" serde_json = "1.0.108"
@ -76,7 +77,6 @@ protobuf = { version = "2.28.0" }
diesel = { version = "2.1.0", features = ["sqlite", "chrono", "r2d2", "serde_json"] } diesel = { version = "2.1.0", features = ["sqlite", "chrono", "r2d2", "serde_json"] }
uuid = { version = "1.5.0", features = ["serde", "v4", "v5"] } uuid = { version = "1.5.0", features = ["serde", "v4", "v5"] }
serde_repr = "0.1" serde_repr = "0.1"
parking_lot = "0.12"
futures = "0.3.29" futures = "0.3.29"
tokio = "1.38.0" tokio = "1.38.0"
tokio-stream = "0.1.14" tokio-stream = "0.1.14"
@ -100,8 +100,8 @@ dashmap = "6.0.1"
# Run the script.add_workspace_members: # Run the script.add_workspace_members:
# scripts/tool/update_client_api_rev.sh new_rev_id # scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "7878a018a18553e3d8201e572a0c066c14ba3b35" } client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "d503905" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "7878a018a18553e3d8201e572a0c066c14ba3b35" } client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "d503905" }
[profile.dev] [profile.dev]
opt-level = 0 opt-level = 0
@ -136,13 +136,13 @@ rocksdb = { git = "https://github.com/rust-rocksdb/rust-rocksdb", rev = "1710120
# To switch to the local path, run: # To switch to the local path, run:
# scripts/tool/update_collab_source.sh # scripts/tool/update_collab_source.sh
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" } collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
# Working directory: frontend # Working directory: frontend
# To update the commit ID, run: # To update the commit ID, run:

View File

@ -11,15 +11,19 @@ crate-type = ["cdylib", "rlib"]
collab = { workspace = true } collab = { workspace = true }
collab-plugins = { workspace = true } collab-plugins = { workspace = true }
collab-entity = { workspace = true } collab-entity = { workspace = true }
collab-document = { workspace = true }
collab-folder = { workspace = true }
collab-user = { workspace = true }
collab-database = { workspace = true }
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
anyhow.workspace = true anyhow.workspace = true
tracing.workspace = true tracing.workspace = true
parking_lot.workspace = true
async-trait.workspace = true async-trait.workspace = true
tokio = { workspace = true, features = ["sync"] } tokio = { workspace = true, features = ["sync"] }
lib-infra = { workspace = true } lib-infra = { workspace = true }
futures = "0.3" futures = "0.3"
arc-swap = "1.7"
[features] [features]
default = [] default = []

View File

@ -1,11 +1,18 @@
use std::borrow::BorrowMut;
use std::fmt::{Debug, Display}; use std::fmt::{Debug, Display};
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use crate::CollabKVDB; use crate::CollabKVDB;
use anyhow::Error; use anyhow::Error;
use collab::core::collab::{DataSource, MutexCollab}; use arc_swap::{ArcSwap, ArcSwapOption};
use collab::preclude::CollabBuilder; use collab::core::collab::DataSource;
use collab::core::collab_plugin::CollabPersistence;
use collab::preclude::{Collab, CollabBuilder};
use collab_database::workspace_database::{DatabaseCollabService, WorkspaceDatabase};
use collab_document::blocks::DocumentData;
use collab_document::document::Document;
use collab_entity::{CollabObject, CollabType}; use collab_entity::{CollabObject, CollabType};
use collab_folder::{Folder, FolderData, FolderNotify};
use collab_plugins::connect_state::{CollabConnectReachability, CollabConnectState}; use collab_plugins::connect_state::{CollabConnectReachability, CollabConnectState};
use collab_plugins::local_storage::kv::snapshot::SnapshotPersistence; use collab_plugins::local_storage::kv::snapshot::SnapshotPersistence;
if_native! { if_native! {
@ -17,17 +24,19 @@ use collab_plugins::local_storage::indexeddb::IndexeddbDiskPlugin;
} }
pub use crate::plugin_provider::CollabCloudPluginProvider; pub use crate::plugin_provider::CollabCloudPluginProvider;
use collab_plugins::local_storage::kv::doc::CollabKVAction;
use collab_plugins::local_storage::kv::KVTransactionDB;
use collab_plugins::local_storage::CollabPersistenceConfig; use collab_plugins::local_storage::CollabPersistenceConfig;
use collab_user::core::{UserAwareness, UserAwarenessNotifier};
use tokio::sync::RwLock;
use lib_infra::{if_native, if_wasm}; use lib_infra::{if_native, if_wasm};
use parking_lot::{Mutex, RwLock}; use tracing::{error, instrument, trace, warn};
use tracing::{instrument, trace};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum CollabPluginProviderType { pub enum CollabPluginProviderType {
Local, Local,
AppFlowyCloud, AppFlowyCloud,
Supabase,
} }
pub enum CollabPluginProviderContext { pub enum CollabPluginProviderContext {
@ -35,13 +44,7 @@ pub enum CollabPluginProviderContext {
AppFlowyCloud { AppFlowyCloud {
uid: i64, uid: i64,
collab_object: CollabObject, collab_object: CollabObject,
local_collab: Weak<MutexCollab>, local_collab: Weak<RwLock<dyn BorrowMut<Collab> + Send + Sync + 'static>>,
},
Supabase {
uid: i64,
collab_object: CollabObject,
local_collab: Weak<MutexCollab>,
local_collab_db: Weak<CollabKVDB>,
}, },
} }
@ -52,13 +55,7 @@ impl Display for CollabPluginProviderContext {
CollabPluginProviderContext::AppFlowyCloud { CollabPluginProviderContext::AppFlowyCloud {
uid: _, uid: _,
collab_object, collab_object,
local_collab: _, ..
} => collab_object.to_string(),
CollabPluginProviderContext::Supabase {
uid: _,
collab_object,
local_collab: _,
local_collab_db: _,
} => collab_object.to_string(), } => collab_object.to_string(),
}; };
write!(f, "{}", str) write!(f, "{}", str)
@ -72,10 +69,10 @@ pub trait WorkspaceCollabIntegrate: Send + Sync {
pub struct AppFlowyCollabBuilder { pub struct AppFlowyCollabBuilder {
network_reachability: CollabConnectReachability, network_reachability: CollabConnectReachability,
plugin_provider: RwLock<Arc<dyn CollabCloudPluginProvider>>, plugin_provider: ArcSwap<Arc<dyn CollabCloudPluginProvider>>,
snapshot_persistence: Mutex<Option<Arc<dyn SnapshotPersistence>>>, snapshot_persistence: ArcSwapOption<Arc<dyn SnapshotPersistence + 'static>>,
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
rocksdb_backup: Mutex<Option<Arc<dyn RocksdbBackup>>>, rocksdb_backup: ArcSwapOption<Arc<dyn RocksdbBackup>>,
workspace_integrate: Arc<dyn WorkspaceCollabIntegrate>, workspace_integrate: Arc<dyn WorkspaceCollabIntegrate>,
} }
@ -86,7 +83,7 @@ impl AppFlowyCollabBuilder {
) -> Self { ) -> Self {
Self { Self {
network_reachability: CollabConnectReachability::new(), network_reachability: CollabConnectReachability::new(),
plugin_provider: RwLock::new(Arc::new(storage_provider)), plugin_provider: ArcSwap::new(Arc::new(Arc::new(storage_provider))),
snapshot_persistence: Default::default(), snapshot_persistence: Default::default(),
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
rocksdb_backup: Default::default(), rocksdb_backup: Default::default(),
@ -95,12 +92,14 @@ impl AppFlowyCollabBuilder {
} }
pub fn set_snapshot_persistence(&self, snapshot_persistence: Arc<dyn SnapshotPersistence>) { pub fn set_snapshot_persistence(&self, snapshot_persistence: Arc<dyn SnapshotPersistence>) {
*self.snapshot_persistence.lock() = Some(snapshot_persistence); self
.snapshot_persistence
.store(Some(snapshot_persistence.into()));
} }
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
pub fn set_rocksdb_backup(&self, rocksdb_backup: Arc<dyn RocksdbBackup>) { pub fn set_rocksdb_backup(&self, rocksdb_backup: Arc<dyn RocksdbBackup>) {
*self.rocksdb_backup.lock() = Some(rocksdb_backup); self.rocksdb_backup.store(Some(rocksdb_backup.into()));
} }
pub fn update_network(&self, reachable: bool) { pub fn update_network(&self, reachable: bool) {
@ -115,12 +114,23 @@ impl AppFlowyCollabBuilder {
} }
} }
fn collab_object( pub fn collab_object(
&self, &self,
workspace_id: &str,
uid: i64, uid: i64,
object_id: &str, object_id: &str,
collab_type: CollabType, collab_type: CollabType,
) -> Result<CollabObject, Error> { ) -> Result<CollabObject, Error> {
// Compare the workspace_id with the currently opened workspace_id. Return an error if they do not match.
// This check is crucial in asynchronous code contexts where the workspace_id might change during operation.
let actual_workspace_id = self.workspace_integrate.workspace_id()?;
if workspace_id != actual_workspace_id {
return Err(anyhow::anyhow!(
"workspace_id not match when build collab. expect workspace_id: {}, actual workspace_id: {}",
workspace_id,
actual_workspace_id
));
}
let device_id = self.workspace_integrate.device_id()?; let device_id = self.workspace_integrate.device_id()?;
let workspace_id = self.workspace_integrate.workspace_id()?; let workspace_id = self.workspace_integrate.workspace_id()?;
Ok(CollabObject::new( Ok(CollabObject::new(
@ -132,170 +142,155 @@ impl AppFlowyCollabBuilder {
)) ))
} }
/// Creates a new collaboration builder with the default configuration.
///
/// This function will initiate the creation of a [MutexCollab] object if it does not already exist.
/// To check for the existence of the object prior to creation, you should utilize a transaction
/// returned by the [read_txn] method of the [CollabKVDB]. Then, invoke the [is_exist] method
/// to confirm the object's presence.
///
/// # Parameters
/// - `uid`: The user ID associated with the collaboration.
/// - `object_id`: A string reference representing the ID of the object.
/// - `object_type`: The type of the collaboration, defined by the [CollabType] enum.
/// - `raw_data`: The raw data of the collaboration object, defined by the [CollabDocState] type.
/// - `collab_db`: A weak reference to the [CollabKVDB].
///
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub async fn build( #[instrument(
level = "trace",
skip(self, data_source, collab_db, builder_config, data)
)]
pub fn create_document(
&self, &self,
workspace_id: &str, object: CollabObject,
uid: i64, data_source: DataSource,
object_id: &str,
object_type: CollabType,
collab_doc_state: DataSource,
collab_db: Weak<CollabKVDB>, collab_db: Weak<CollabKVDB>,
build_config: CollabBuilderConfig, builder_config: CollabBuilderConfig,
) -> Result<Arc<MutexCollab>, Error> { data: Option<DocumentData>,
self.build_with_config( ) -> Result<Arc<RwLock<Document>>, Error> {
workspace_id, assert_eq!(object.collab_type, CollabType::Document);
uid, let collab = self.build_collab(&object, &collab_db, data_source)?;
object_id, let document = Document::open_with(collab, data)?;
object_type, let document = Arc::new(RwLock::new(document));
collab_db, self.finalize(object, builder_config, document)
collab_doc_state,
build_config,
)
} }
/// Creates a new collaboration builder with the custom configuration.
///
/// This function will initiate the creation of a [MutexCollab] object if it does not already exist.
/// To check for the existence of the object prior to creation, you should utilize a transaction
/// returned by the [read_txn] method of the [CollabKVDB]. Then, invoke the [is_exist] method
/// to confirm the object's presence.
///
/// # Parameters
/// - `uid`: The user ID associated with the collaboration.
/// - `object_id`: A string reference representing the ID of the object.
/// - `object_type`: The type of the collaboration, defined by the [CollabType] enum.
/// - `raw_data`: The raw data of the collaboration object, defined by the [CollabDocState] type.
/// - `collab_db`: A weak reference to the [CollabKVDB].
///
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
#[instrument(level = "trace", skip(self, collab_db, collab_doc_state, build_config))] #[instrument(
pub fn build_with_config( level = "trace",
skip(self, object, doc_state, collab_db, builder_config, folder_notifier)
)]
pub fn create_folder(
&self, &self,
workspace_id: &str, object: CollabObject,
uid: i64, doc_state: DataSource,
object_id: &str,
object_type: CollabType,
collab_db: Weak<CollabKVDB>, collab_db: Weak<CollabKVDB>,
collab_doc_state: DataSource, builder_config: CollabBuilderConfig,
build_config: CollabBuilderConfig, folder_notifier: Option<FolderNotify>,
) -> Result<Arc<MutexCollab>, Error> { folder_data: Option<FolderData>,
let collab = CollabBuilder::new(uid, object_id) ) -> Result<Arc<RwLock<Folder>>, Error> {
.with_doc_state(collab_doc_state) assert_eq!(object.collab_type, CollabType::Folder);
let collab = self.build_collab(&object, &collab_db, doc_state)?;
let folder = Folder::open_with(object.uid, collab, folder_notifier, folder_data);
let folder = Arc::new(RwLock::new(folder));
self.finalize(object, builder_config, folder)
}
#[allow(clippy::too_many_arguments)]
#[instrument(
level = "trace",
skip(self, object, doc_state, collab_db, builder_config, notifier)
)]
pub fn create_user_awareness(
&self,
object: CollabObject,
doc_state: DataSource,
collab_db: Weak<CollabKVDB>,
builder_config: CollabBuilderConfig,
notifier: Option<UserAwarenessNotifier>,
) -> Result<Arc<RwLock<UserAwareness>>, Error> {
assert_eq!(object.collab_type, CollabType::UserAwareness);
let collab = self.build_collab(&object, &collab_db, doc_state)?;
let user_awareness = UserAwareness::open(collab, notifier);
let user_awareness = Arc::new(RwLock::new(user_awareness));
self.finalize(object, builder_config, user_awareness)
}
#[allow(clippy::too_many_arguments)]
#[instrument(
level = "trace",
skip(self, object, doc_state, collab_db, builder_config, collab_service)
)]
pub fn create_workspace_database(
&self,
object: CollabObject,
doc_state: DataSource,
collab_db: Weak<CollabKVDB>,
builder_config: CollabBuilderConfig,
collab_service: impl DatabaseCollabService,
) -> Result<Arc<RwLock<WorkspaceDatabase>>, Error> {
assert_eq!(object.collab_type, CollabType::WorkspaceDatabase);
let collab = self.build_collab(&object, &collab_db, doc_state)?;
let workspace = WorkspaceDatabase::open(object.uid, collab, collab_db.clone(), collab_service);
let workspace = Arc::new(RwLock::new(workspace));
self.finalize(object, builder_config, workspace)
}
pub fn build_collab(
&self,
object: &CollabObject,
collab_db: &Weak<CollabKVDB>,
data_source: DataSource,
) -> Result<Collab, Error> {
let collab = CollabBuilder::new(object.uid, &object.object_id, data_source)
.with_device_id(self.workspace_integrate.device_id()?) .with_device_id(self.workspace_integrate.device_id()?)
.build()?; .build()?;
// Compare the workspace_id with the currently opened workspace_id. Return an error if they do not match.
// This check is crucial in asynchronous code contexts where the workspace_id might change during operation.
let actual_workspace_id = self.workspace_integrate.workspace_id()?;
if workspace_id != actual_workspace_id {
return Err(anyhow::anyhow!(
"workspace_id not match when build collab. expect workspace_id: {}, actual workspace_id: {}",
workspace_id,
actual_workspace_id
));
}
let persistence_config = CollabPersistenceConfig::default(); let persistence_config = CollabPersistenceConfig::default();
let db_plugin = RocksdbDiskPlugin::new_with_config(
#[cfg(target_arch = "wasm32")] object.uid,
{ object.object_id.to_string(),
collab.lock().add_plugin(Box::new(IndexeddbDiskPlugin::new( object.collab_type.clone(),
uid,
object_id.to_string(),
object_type.clone(),
collab_db.clone(),
)));
}
#[cfg(not(target_arch = "wasm32"))]
{
collab
.lock()
.add_plugin(Box::new(RocksdbDiskPlugin::new_with_config(
uid,
object_id.to_string(),
object_type.clone(),
collab_db.clone(), collab_db.clone(),
persistence_config.clone(), persistence_config.clone(),
None, );
))); collab.add_plugin(Box::new(db_plugin));
Ok(collab)
} }
let arc_collab = Arc::new(collab); pub fn finalize<T>(
&self,
object: CollabObject,
build_config: CollabBuilderConfig,
collab: Arc<RwLock<T>>,
) -> Result<Arc<RwLock<T>>, Error>
where
T: BorrowMut<Collab> + Send + Sync + 'static,
{ {
let collab_object = self.collab_object(uid, object_id, object_type.clone())?; let mut write_collab = collab.try_write()?;
if !write_collab.borrow().get_state().is_uninitialized() {
drop(write_collab);
return Ok(collab);
}
trace!("🚀finalize collab:{}", object);
if build_config.sync_enable { if build_config.sync_enable {
let provider_type = self.plugin_provider.read().provider_type(); let plugin_provider = self.plugin_provider.load_full();
let span = tracing::span!(tracing::Level::TRACE, "collab_builder", object_id = %object_id); let provider_type = plugin_provider.provider_type();
let span =
tracing::span!(tracing::Level::TRACE, "collab_builder", object_id = %object.object_id);
let _enter = span.enter(); let _enter = span.enter();
match provider_type { match provider_type {
CollabPluginProviderType::AppFlowyCloud => { CollabPluginProviderType::AppFlowyCloud => {
let local_collab = Arc::downgrade(&arc_collab); let local_collab = Arc::downgrade(&collab);
let plugins = let plugins = plugin_provider.get_plugins(CollabPluginProviderContext::AppFlowyCloud {
self uid: object.uid,
.plugin_provider collab_object: object,
.read()
.get_plugins(CollabPluginProviderContext::AppFlowyCloud {
uid,
collab_object,
local_collab, local_collab,
}); });
// at the moment when we get the lock, the collab object is not yet exposed outside
for plugin in plugins { for plugin in plugins {
arc_collab.lock().add_plugin(plugin); write_collab.borrow().add_plugin(plugin);
}
},
CollabPluginProviderType::Supabase => {
#[cfg(not(target_arch = "wasm32"))]
{
trace!("init supabase collab plugins");
let local_collab = Arc::downgrade(&arc_collab);
let local_collab_db = collab_db.clone();
let plugins =
self
.plugin_provider
.read()
.get_plugins(CollabPluginProviderContext::Supabase {
uid,
collab_object,
local_collab,
local_collab_db,
});
for plugin in plugins {
arc_collab.lock().add_plugin(plugin);
}
} }
}, },
CollabPluginProviderType::Local => {}, CollabPluginProviderType::Local => {},
} }
} }
}
if build_config.auto_initialize { if build_config.auto_initialize {
#[cfg(target_arch = "wasm32")] // at the moment when we get the lock, the collab object is not yet exposed outside
futures::executor::block_on(arc_collab.lock().initialize()); (*write_collab).borrow_mut().initialize();
#[cfg(not(target_arch = "wasm32"))]
arc_collab.lock().initialize();
} }
drop(write_collab);
trace!("collab initialized: {}:{}", object_type, object_id); Ok(collab)
Ok(arc_collab)
} }
} }
@ -328,3 +323,39 @@ impl CollabBuilderConfig {
self self
} }
} }
pub struct KVDBCollabPersistenceImpl {
pub db: Weak<CollabKVDB>,
pub uid: i64,
}
impl KVDBCollabPersistenceImpl {
pub fn new(db: Weak<CollabKVDB>, uid: i64) -> Self {
Self { db, uid }
}
pub fn into_data_source(self) -> DataSource {
DataSource::Disk(Some(Box::new(self)))
}
}
impl CollabPersistence for KVDBCollabPersistenceImpl {
fn load_collab(&self, collab: &mut Collab) {
if let Some(collab_db) = self.db.upgrade() {
let object_id = collab.object_id().to_string();
let rocksdb_read = collab_db.read_txn();
if rocksdb_read.is_exist(self.uid, &object_id) {
let mut txn = collab.transact_mut();
if let Err(err) = rocksdb_read.load_doc_with_txn(self.uid, &object_id, &mut txn) {
error!("🔴 load doc:{} failed: {}", object_id, err);
}
drop(rocksdb_read);
txn.commit();
drop(txn);
}
} else {
warn!("collab_db is dropped");
}
}
}

View File

@ -1,4 +1,3 @@
pub use collab::core::collab::MutexCollab;
pub use collab::preclude::Snapshot; pub use collab::preclude::Snapshot;
pub use collab_plugins::local_storage::CollabPersistenceConfig; pub use collab_plugins::local_storage::CollabPersistenceConfig;
pub use collab_plugins::CollabKVDB; pub use collab_plugins::CollabKVDB;

View File

@ -1,6 +1,7 @@
use crate::collab_builder::{CollabPluginProviderContext, CollabPluginProviderType};
use collab::preclude::CollabPlugin; use collab::preclude::CollabPlugin;
use crate::collab_builder::{CollabPluginProviderContext, CollabPluginProviderType};
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
pub trait CollabCloudPluginProvider: 'static { pub trait CollabCloudPluginProvider: 'static {
fn provider_type(&self) -> CollabPluginProviderType; fn provider_type(&self) -> CollabPluginProviderType;
@ -11,9 +12,9 @@ pub trait CollabCloudPluginProvider: 'static {
} }
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
impl<T> CollabCloudPluginProvider for std::rc::Rc<T> impl<U> CollabCloudPluginProvider for std::rc::Rc<U>
where where
T: CollabCloudPluginProvider, U: CollabCloudPluginProvider,
{ {
fn provider_type(&self) -> CollabPluginProviderType { fn provider_type(&self) -> CollabPluginProviderType {
(**self).provider_type() (**self).provider_type()
@ -38,9 +39,9 @@ pub trait CollabCloudPluginProvider: Send + Sync + 'static {
} }
#[cfg(not(target_arch = "wasm32"))] #[cfg(not(target_arch = "wasm32"))]
impl<T> CollabCloudPluginProvider for std::sync::Arc<T> impl<U> CollabCloudPluginProvider for std::sync::Arc<U>
where where
T: CollabCloudPluginProvider, U: CollabCloudPluginProvider,
{ {
fn provider_type(&self) -> CollabPluginProviderType { fn provider_type(&self) -> CollabPluginProviderType {
(**self).provider_type() (**self).provider_type()

View File

@ -22,7 +22,6 @@ serde_json.workspace = true
bytes.workspace = true bytes.workspace = true
crossbeam-utils = "0.8.15" crossbeam-utils = "0.8.15"
lazy_static = "1.4.0" lazy_static = "1.4.0"
parking_lot.workspace = true
tracing.workspace = true tracing.workspace = true
lib-log.workspace = true lib-log.workspace = true
semver = "1.0.22" semver = "1.0.22"

View File

@ -3,7 +3,6 @@ use std::collections::HashMap;
use serde::Deserialize; use serde::Deserialize;
use flowy_server_pub::af_cloud_config::AFCloudConfiguration; use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_server_pub::AuthenticatorType; use flowy_server_pub::AuthenticatorType;
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@ -17,7 +16,7 @@ pub struct AppFlowyDartConfiguration {
pub device_id: String, pub device_id: String,
pub platform: String, pub platform: String,
pub authenticator_type: AuthenticatorType, pub authenticator_type: AuthenticatorType,
pub(crate) supabase_config: SupabaseConfiguration, //pub(crate) supabase_config: SupabaseConfiguration,
pub(crate) appflowy_cloud_config: AFCloudConfiguration, pub(crate) appflowy_cloud_config: AFCloudConfiguration,
#[serde(default)] #[serde(default)]
pub(crate) envs: HashMap<String, String>, pub(crate) envs: HashMap<String, String>,
@ -31,7 +30,7 @@ impl AppFlowyDartConfiguration {
pub fn write_env(&self) { pub fn write_env(&self) {
self.authenticator_type.write_env(); self.authenticator_type.write_env();
self.appflowy_cloud_config.write_env(); self.appflowy_cloud_config.write_env();
self.supabase_config.write_env(); //self.supabase_config.write_env();
for (k, v) in self.envs.iter() { for (k, v) in self.envs.iter() {
std::env::set_var(k, v); std::env::set_var(k, v);

View File

@ -2,10 +2,9 @@
use allo_isolate::Isolate; use allo_isolate::Isolate;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use parking_lot::Mutex;
use semver::Version; use semver::Version;
use std::rc::Rc; use std::rc::Rc;
use std::sync::Arc; use std::sync::{Arc, Mutex};
use std::{ffi::CStr, os::raw::c_char}; use std::{ffi::CStr, os::raw::c_char};
use tracing::{debug, error, info, trace, warn}; use tracing::{debug, error, info, trace, warn};
@ -38,6 +37,10 @@ lazy_static! {
static ref LOG_STREAM_ISOLATE: Mutex<Option<Isolate>> = Mutex::new(None); static ref LOG_STREAM_ISOLATE: Mutex<Option<Isolate>> = Mutex::new(None);
} }
unsafe impl Send for MutexAppFlowyCore {}
unsafe impl Sync for MutexAppFlowyCore {}
///FIXME: I'm pretty sure that there's a better way to do this
struct MutexAppFlowyCore(Rc<Mutex<Option<AppFlowyCore>>>); struct MutexAppFlowyCore(Rc<Mutex<Option<AppFlowyCore>>>);
impl MutexAppFlowyCore { impl MutexAppFlowyCore {
@ -46,15 +49,12 @@ impl MutexAppFlowyCore {
} }
fn dispatcher(&self) -> Option<Rc<AFPluginDispatcher>> { fn dispatcher(&self) -> Option<Rc<AFPluginDispatcher>> {
let binding = self.0.lock(); let binding = self.0.lock().unwrap();
let core = binding.as_ref(); let core = binding.as_ref();
core.map(|core| core.event_dispatcher.clone()) core.map(|core| core.event_dispatcher.clone())
} }
} }
unsafe impl Sync for MutexAppFlowyCore {}
unsafe impl Send for MutexAppFlowyCore {}
#[no_mangle] #[no_mangle]
pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 { pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 {
// and sent it the `Rust's` result // and sent it the `Rust's` result
@ -87,7 +87,7 @@ pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 {
// Ensure that the database is closed before initialization. Also, verify that the init_sdk function can be called // Ensure that the database is closed before initialization. Also, verify that the init_sdk function can be called
// multiple times (is reentrant). Currently, only the database resource is exclusive. // multiple times (is reentrant). Currently, only the database resource is exclusive.
if let Some(core) = &*APPFLOWY_CORE.0.lock() { if let Some(core) = &*APPFLOWY_CORE.0.lock().unwrap() {
core.close_db(); core.close_db();
} }
@ -96,11 +96,12 @@ pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 {
let log_stream = LOG_STREAM_ISOLATE let log_stream = LOG_STREAM_ISOLATE
.lock() .lock()
.unwrap()
.take() .take()
.map(|isolate| Arc::new(LogStreamSenderImpl { isolate }) as Arc<dyn StreamLogSender>); .map(|isolate| Arc::new(LogStreamSenderImpl { isolate }) as Arc<dyn StreamLogSender>);
// let isolate = allo_isolate::Isolate::new(port); // let isolate = allo_isolate::Isolate::new(port);
*APPFLOWY_CORE.0.lock() = runtime.block_on(async move { *APPFLOWY_CORE.0.lock().unwrap() = runtime.block_on(async move {
Some(AppFlowyCore::new(config, cloned_runtime, log_stream).await) Some(AppFlowyCore::new(config, cloned_runtime, log_stream).await)
// isolate.post("".to_string()); // isolate.post("".to_string());
}); });
@ -168,7 +169,7 @@ pub extern "C" fn set_stream_port(notification_port: i64) -> i32 {
#[no_mangle] #[no_mangle]
pub extern "C" fn set_log_stream_port(port: i64) -> i32 { pub extern "C" fn set_log_stream_port(port: i64) -> i32 {
*LOG_STREAM_ISOLATE.lock() = Some(Isolate::new(port)); *LOG_STREAM_ISOLATE.lock().unwrap() = Some(Isolate::new(port));
0 0
} }

View File

@ -37,7 +37,6 @@ thread-id = "3.3.0"
bytes.workspace = true bytes.workspace = true
nanoid = "0.4.0" nanoid = "0.4.0"
tracing.workspace = true tracing.workspace = true
parking_lot.workspace = true
uuid.workspace = true uuid.workspace = true
collab = { workspace = true } collab = { workspace = true }
collab-document = { workspace = true } collab-document = { workspace = true }

View File

@ -24,7 +24,7 @@ impl EventIntegrationTest {
self self
.appflowy_core .appflowy_core
.database_manager .database_manager
.get_database_with_view_id(database_view_id) .get_database_editor_with_view_id(database_view_id)
.await .await
.unwrap() .unwrap()
.export_csv(CSVFormat::Original) .export_csv(CSVFormat::Original)

View File

@ -42,10 +42,10 @@ impl DocumentEventTest {
.event_test .event_test
.appflowy_core .appflowy_core
.document_manager .document_manager
.get_opened_document(doc_id) .editable_document(doc_id)
.await .await
.unwrap(); .unwrap();
let guard = doc.lock(); let guard = doc.read().await;
guard.encode_collab().unwrap() guard.encode_collab().unwrap()
} }

View File

@ -1,6 +1,3 @@
use std::sync::Arc;
use collab::core::collab::MutexCollab;
use collab::core::origin::CollabOrigin; use collab::core::origin::CollabOrigin;
use collab::preclude::updates::decoder::Decode; use collab::preclude::updates::decoder::Decode;
use collab::preclude::{Collab, Update}; use collab::preclude::{Collab, Update};
@ -107,17 +104,13 @@ impl EventIntegrationTest {
} }
pub fn assert_document_data_equal(doc_state: &[u8], doc_id: &str, expected: DocumentData) { pub fn assert_document_data_equal(doc_state: &[u8], doc_id: &str, expected: DocumentData) {
let collab = MutexCollab::new(Collab::new_with_origin( let mut collab = Collab::new_with_origin(CollabOrigin::Server, doc_id, vec![], false);
CollabOrigin::Server, {
doc_id,
vec![],
false,
));
collab.lock().with_origin_transact_mut(|txn| {
let update = Update::decode_v1(doc_state).unwrap(); let update = Update::decode_v1(doc_state).unwrap();
let mut txn = collab.transact_mut();
txn.apply_update(update); txn.apply_update(update);
}); };
let document = Document::open(Arc::new(collab)).unwrap(); let document = Document::open(collab).unwrap();
let actual = document.get_document_data().unwrap(); let actual = document.get_document_data().unwrap();
assert_eq!(actual, expected); assert_eq!(actual, expected);
} }

View File

@ -1,3 +1,4 @@
use crate::EventIntegrationTest;
use flowy_user::errors::{internal_error, FlowyError}; use flowy_user::errors::{internal_error, FlowyError};
use lib_dispatch::prelude::{ use lib_dispatch::prelude::{
AFPluginDispatcher, AFPluginEventResponse, AFPluginFromBytes, AFPluginRequest, ToBytes, *, AFPluginDispatcher, AFPluginEventResponse, AFPluginFromBytes, AFPluginRequest, ToBytes, *,
@ -9,8 +10,6 @@ use std::{
hash::Hash, hash::Hash,
}; };
use crate::EventIntegrationTest;
#[derive(Clone)] #[derive(Clone)]
pub struct EventBuilder { pub struct EventBuilder {
context: TestContext, context: TestContext,

View File

@ -166,10 +166,14 @@ impl EventIntegrationTest {
.await; .await;
} }
pub fn get_folder_data(&self) -> FolderData { pub async fn get_folder_data(&self) -> FolderData {
let mutex_folder = self.appflowy_core.folder_manager.get_mutex_folder().clone(); let mutex_folder = self
let folder_lock_guard = mutex_folder.read(); .appflowy_core
let folder = folder_lock_guard.as_ref().unwrap(); .folder_manager
.get_mutex_folder()
.clone()
.unwrap();
let folder = mutex_folder.read().await;
let workspace_id = self.appflowy_core.user_manager.workspace_id().unwrap(); let workspace_id = self.appflowy_core.user_manager.workspace_id().unwrap();
folder.get_folder_data(&workspace_id).clone().unwrap() folder.get_folder_data(&workspace_id).clone().unwrap()
} }

View File

@ -6,11 +6,11 @@ use collab_entity::CollabType;
use std::env::temp_dir; use std::env::temp_dir;
use std::path::PathBuf; use std::path::PathBuf;
use std::rc::Rc; use std::rc::Rc;
use std::sync::atomic::{AtomicBool, AtomicU8, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use nanoid::nanoid; use nanoid::nanoid;
use parking_lot::{Mutex, RwLock};
use semver::Version; use semver::Version;
use tokio::select; use tokio::select;
use tokio::time::sleep; use tokio::time::sleep;
@ -35,10 +35,10 @@ pub mod user_event;
#[derive(Clone)] #[derive(Clone)]
pub struct EventIntegrationTest { pub struct EventIntegrationTest {
pub authenticator: Arc<RwLock<AuthenticatorPB>>, pub authenticator: Arc<AtomicU8>,
pub appflowy_core: AppFlowyCore, pub appflowy_core: AppFlowyCore,
#[allow(dead_code)] #[allow(dead_code)]
cleaner: Arc<Mutex<Cleaner>>, cleaner: Arc<Cleaner>,
pub notification_sender: TestNotificationSender, pub notification_sender: TestNotificationSender,
} }
@ -57,7 +57,7 @@ impl EventIntegrationTest {
let clean_path = config.storage_path.clone(); let clean_path = config.storage_path.clone();
let inner = init_core(config).await; let inner = init_core(config).await;
let notification_sender = TestNotificationSender::new(); let notification_sender = TestNotificationSender::new();
let authenticator = Arc::new(RwLock::new(AuthenticatorPB::Local)); let authenticator = Arc::new(AtomicU8::new(AuthenticatorPB::Local as u8));
register_notification_sender(notification_sender.clone()); register_notification_sender(notification_sender.clone());
// In case of dropping the runtime that runs the core, we need to forget the dispatcher // In case of dropping the runtime that runs the core, we need to forget the dispatcher
@ -66,7 +66,7 @@ impl EventIntegrationTest {
appflowy_core: inner, appflowy_core: inner,
authenticator, authenticator,
notification_sender, notification_sender,
cleaner: Arc::new(Mutex::new(Cleaner::new(PathBuf::from(clean_path)))), cleaner: Arc::new(Cleaner::new(PathBuf::from(clean_path))),
} }
} }
@ -93,7 +93,7 @@ impl EventIntegrationTest {
} }
pub fn skip_clean(&mut self) { pub fn skip_clean(&mut self) {
self.cleaner.lock().should_clean = false; self.cleaner.should_clean.store(false, Ordering::Release);
} }
pub fn instance_name(&self) -> String { pub fn instance_name(&self) -> String {
@ -154,7 +154,7 @@ pub fn document_data_from_document_doc_state(doc_id: &str, doc_state: Vec<u8>) -
} }
pub fn document_from_document_doc_state(doc_id: &str, doc_state: Vec<u8>) -> Document { pub fn document_from_document_doc_state(doc_id: &str, doc_state: Vec<u8>) -> Document {
Document::from_doc_state( Document::open_with_options(
CollabOrigin::Empty, CollabOrigin::Empty,
DataSource::DocStateV1(doc_state), DataSource::DocStateV1(doc_state),
doc_id, doc_id,
@ -177,17 +177,16 @@ impl std::ops::Deref for EventIntegrationTest {
} }
} }
#[derive(Clone)]
pub struct Cleaner { pub struct Cleaner {
dir: PathBuf, dir: PathBuf,
should_clean: bool, should_clean: AtomicBool,
} }
impl Cleaner { impl Cleaner {
pub fn new(dir: PathBuf) -> Self { pub fn new(dir: PathBuf) -> Self {
Self { Self {
dir, dir,
should_clean: true, should_clean: AtomicBool::new(true),
} }
} }
@ -198,7 +197,7 @@ impl Cleaner {
impl Drop for Cleaner { impl Drop for Cleaner {
fn drop(&mut self) { fn drop(&mut self) {
if self.should_clean { if self.should_clean.load(Ordering::Acquire) {
Self::cleanup(&self.dir) Self::cleanup(&self.dir)
} }
} }

View File

@ -1,11 +1,12 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::sync::atomic::Ordering;
use std::sync::Arc; use std::sync::Arc;
use bytes::Bytes; use bytes::Bytes;
use flowy_folder::entities::{RepeatedViewPB, WorkspacePB}; use flowy_folder::entities::{RepeatedViewPB, WorkspacePB};
use nanoid::nanoid;
use protobuf::ProtobufError; use protobuf::ProtobufError;
use tokio::sync::broadcast::{channel, Sender}; use tokio::sync::broadcast::{channel, Sender};
use tracing::error; use tracing::error;
@ -101,21 +102,6 @@ impl EventIntegrationTest {
} }
} }
pub async fn supabase_party_sign_up(&self) -> UserProfilePB {
let map = third_party_sign_up_param(Uuid::new_v4().to_string());
let payload = OauthSignInPB {
map,
authenticator: AuthenticatorPB::Supabase,
};
EventBuilder::new(self.clone())
.event(UserEvent::OauthSignIn)
.payload(payload)
.async_send()
.await
.parse::<UserProfilePB>()
}
pub async fn sign_out(&self) { pub async fn sign_out(&self) {
EventBuilder::new(self.clone()) EventBuilder::new(self.clone())
.event(UserEvent::SignOut) .event(UserEvent::SignOut)
@ -124,7 +110,7 @@ impl EventIntegrationTest {
} }
pub fn set_auth_type(&self, auth_type: AuthenticatorPB) { pub fn set_auth_type(&self, auth_type: AuthenticatorPB) {
*self.authenticator.write() = auth_type; self.authenticator.store(auth_type as u8, Ordering::Release);
} }
pub async fn init_anon_user(&self) -> UserProfilePB { pub async fn init_anon_user(&self) -> UserProfilePB {
@ -178,33 +164,6 @@ impl EventIntegrationTest {
Ok(user_profile) Ok(user_profile)
} }
pub async fn supabase_sign_up_with_uuid(
&self,
uuid: &str,
email: Option<String>,
) -> FlowyResult<UserProfilePB> {
let mut map = HashMap::new();
map.insert(USER_UUID.to_string(), uuid.to_string());
map.insert(USER_DEVICE_ID.to_string(), uuid.to_string());
map.insert(
USER_EMAIL.to_string(),
email.unwrap_or_else(|| format!("{}@appflowy.io", nanoid!(10))),
);
let payload = OauthSignInPB {
map,
authenticator: AuthenticatorPB::Supabase,
};
let user_profile = EventBuilder::new(self.clone())
.event(UserEvent::OauthSignIn)
.payload(payload)
.async_send()
.await
.try_parse::<UserProfilePB>()?;
Ok(user_profile)
}
pub async fn import_appflowy_data( pub async fn import_appflowy_data(
&self, &self,
path: String, path: String,

View File

@ -207,6 +207,22 @@ impl FolderTest {
}, },
} }
} }
// pub async fn duplicate_view(&self, view_id: &str) {
// let payload = DuplicateViewPayloadPB {
// view_id: view_id.to_string(),
// open_after_duplicate: false,
// include_children: false,
// parent_view_id: None,
// suffix: None,
// sync_after_create: false,
// };
// EventBuilder::new(self.sdk.clone())
// .event(DuplicateView)
// .payload(payload)
// .async_send()
// .await;
// }
} }
pub async fn create_workspace(sdk: &EventIntegrationTest, name: &str, desc: &str) -> WorkspacePB { pub async fn create_workspace(sdk: &EventIntegrationTest, name: &str, desc: &str) -> WorkspacePB {
let request = CreateWorkspacePayloadPB { let request = CreateWorkspacePayloadPB {

View File

@ -125,7 +125,7 @@ async fn af_cloud_open_workspace_test() {
assert_eq!(views[2].name, "D"); assert_eq!(views[2].name, "D");
// simulate open workspace and check if the views are correct // simulate open workspace and check if the views are correct
for i in 0..30 { for i in 0..10 {
if i % 2 == 0 { if i % 2 == 0 {
test.open_workspace(&first_workspace.id).await; test.open_workspace(&first_workspace.id).await;
sleep(Duration::from_millis(300)).await; sleep(Duration::from_millis(300)).await;
@ -142,16 +142,16 @@ async fn af_cloud_open_workspace_test() {
} }
test.open_workspace(&first_workspace.id).await; test.open_workspace(&first_workspace.id).await;
let views = test.get_all_workspace_views().await; let views_1 = test.get_all_workspace_views().await;
assert_eq!(views[0].name, default_document_name); assert_eq!(views_1[0].name, default_document_name);
assert_eq!(views[1].name, "A"); assert_eq!(views_1[1].name, "A");
assert_eq!(views[2].name, "B"); assert_eq!(views_1[2].name, "B");
test.open_workspace(&second_workspace.id).await; test.open_workspace(&second_workspace.id).await;
let views = test.get_all_workspace_views().await; let views_2 = test.get_all_workspace_views().await;
assert_eq!(views[0].name, default_document_name); assert_eq!(views_2[0].name, default_document_name);
assert_eq!(views[1].name, "C"); assert_eq!(views_2[1].name, "C");
assert_eq!(views[2].name, "D"); assert_eq!(views_2[2].name, "D");
} }
#[tokio::test] #[tokio::test]
@ -240,7 +240,7 @@ async fn af_cloud_different_open_same_workspace_test() {
// Retrieve and verify the views associated with the workspace. // Retrieve and verify the views associated with the workspace.
let views = folder.get_views_belong_to(&shared_workspace_id); let views = folder.get_views_belong_to(&shared_workspace_id);
let folder_workspace_id = folder.get_workspace_id(); let folder_workspace_id = folder.get_workspace_id();
assert_eq!(folder_workspace_id, shared_workspace_id); assert_eq!(folder_workspace_id, Some(shared_workspace_id));
assert_eq!(views.len(), 1, "only get: {:?}", views); // Expecting two views. assert_eq!(views.len(), 1, "only get: {:?}", views); // Expecting two views.
assert_eq!(views[0].name, "Getting started"); assert_eq!(views[0].name, "Getting started");

View File

@ -2,16 +2,11 @@ use std::fs::{create_dir_all, File, OpenOptions};
use std::io::copy; use std::io::copy;
use std::ops::Deref; use std::ops::Deref;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use std::{fs, io}; use std::{fs, io};
use anyhow::Error;
use collab_folder::FolderData;
use collab_plugins::cloud_storage::RemoteCollabStorage;
use nanoid::nanoid; use nanoid::nanoid;
use tokio::sync::mpsc::Receiver; use tokio::sync::mpsc::Receiver;
use tokio::time::timeout; use tokio::time::timeout;
use uuid::Uuid; use uuid::Uuid;
use walkdir::WalkDir; use walkdir::WalkDir;
@ -21,22 +16,9 @@ use zip::{CompressionMethod, ZipArchive, ZipWriter};
use event_integration_test::event_builder::EventBuilder; use event_integration_test::event_builder::EventBuilder;
use event_integration_test::Cleaner; use event_integration_test::Cleaner;
use event_integration_test::EventIntegrationTest; use event_integration_test::EventIntegrationTest;
use flowy_database_pub::cloud::DatabaseCloudService; use flowy_user::entities::UpdateUserProfilePayloadPB;
use flowy_folder_pub::cloud::{FolderCloudService, FolderSnapshot};
use flowy_server::supabase::api::*;
use flowy_server::{AppFlowyEncryption, EncryptionImpl};
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_user::entities::{AuthenticatorPB, UpdateUserProfilePayloadPB};
use flowy_user::errors::FlowyError; use flowy_user::errors::FlowyError;
use flowy_user::event_map::UserEvent::*; use flowy_user::event_map::UserEvent::*;
use flowy_user_pub::cloud::UserCloudService;
use flowy_user_pub::entities::Authenticator;
pub fn get_supabase_config() -> Option<SupabaseConfiguration> {
dotenv::from_path(".env.ci").ok()?;
SupabaseConfiguration::from_env().ok()
}
pub struct FlowySupabaseTest { pub struct FlowySupabaseTest {
event_test: EventIntegrationTest, event_test: EventIntegrationTest,
@ -44,13 +26,7 @@ pub struct FlowySupabaseTest {
impl FlowySupabaseTest { impl FlowySupabaseTest {
pub async fn new() -> Option<Self> { pub async fn new() -> Option<Self> {
let _ = get_supabase_config()?;
let event_test = EventIntegrationTest::new().await; let event_test = EventIntegrationTest::new().await;
event_test.set_auth_type(AuthenticatorPB::Supabase);
event_test
.server_provider
.set_authenticator(Authenticator::Supabase);
Some(Self { event_test }) Some(Self { event_test })
} }
@ -79,93 +55,6 @@ pub async fn receive_with_timeout<T>(mut receiver: Receiver<T>, duration: Durati
timeout(duration, receiver.recv()).await.ok()? timeout(duration, receiver.recv()).await.ok()?
} }
pub fn get_supabase_ci_config() -> Option<SupabaseConfiguration> {
dotenv::from_filename("./.env.ci").ok()?;
SupabaseConfiguration::from_env().ok()
}
#[allow(dead_code)]
pub fn get_supabase_dev_config() -> Option<SupabaseConfiguration> {
dotenv::from_filename("./.env.dev").ok()?;
SupabaseConfiguration::from_env().ok()
}
pub fn collab_service() -> Arc<dyn RemoteCollabStorage> {
let (server, encryption_impl) = appflowy_server(None);
Arc::new(SupabaseCollabStorageImpl::new(
server,
None,
Arc::downgrade(&encryption_impl),
))
}
pub fn database_service() -> Arc<dyn DatabaseCloudService> {
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseDatabaseServiceImpl::new(server))
}
pub fn user_auth_service() -> Arc<dyn UserCloudService> {
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseUserServiceImpl::new(server, vec![], None))
}
pub fn folder_service() -> Arc<dyn FolderCloudService> {
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseFolderServiceImpl::new(server))
}
#[allow(dead_code)]
pub fn encryption_folder_service(
secret: Option<String>,
) -> (Arc<dyn FolderCloudService>, Arc<dyn AppFlowyEncryption>) {
let (server, encryption_impl) = appflowy_server(secret);
let service = Arc::new(SupabaseFolderServiceImpl::new(server));
(service, encryption_impl)
}
pub fn encryption_collab_service(
secret: Option<String>,
) -> (Arc<dyn RemoteCollabStorage>, Arc<dyn AppFlowyEncryption>) {
let (server, encryption_impl) = appflowy_server(secret);
let service = Arc::new(SupabaseCollabStorageImpl::new(
server,
None,
Arc::downgrade(&encryption_impl),
));
(service, encryption_impl)
}
pub async fn get_folder_data_from_server(
uid: &i64,
folder_id: &str,
encryption_secret: Option<String>,
) -> Result<Option<FolderData>, Error> {
let (cloud_service, _encryption) = encryption_folder_service(encryption_secret);
cloud_service.get_folder_data(folder_id, uid).await
}
pub async fn get_folder_snapshots(
folder_id: &str,
encryption_secret: Option<String>,
) -> Vec<FolderSnapshot> {
let (cloud_service, _encryption) = encryption_folder_service(encryption_secret);
cloud_service
.get_folder_snapshots(folder_id, 10)
.await
.unwrap()
}
pub fn appflowy_server(
encryption_secret: Option<String>,
) -> (SupabaseServerServiceImpl, Arc<dyn AppFlowyEncryption>) {
let config = SupabaseConfiguration::from_env().unwrap();
let encryption_impl: Arc<dyn AppFlowyEncryption> =
Arc::new(EncryptionImpl::new(encryption_secret));
let encryption = Arc::downgrade(&encryption_impl);
let server = Arc::new(RESTfulPostgresServer::new(config, encryption));
(SupabaseServerServiceImpl::new(server), encryption_impl)
}
/// zip the asset to the destination /// zip the asset to the destination
/// Zips the specified directory into a zip file. /// Zips the specified directory into a zip file.
/// ///

View File

@ -19,6 +19,7 @@ uuid.workspace = true
strum_macros = "0.21" strum_macros = "0.21"
protobuf.workspace = true protobuf.workspace = true
bytes.workspace = true bytes.workspace = true
arc-swap.workspace = true
validator = { workspace = true, features = ["derive"] } validator = { workspace = true, features = ["derive"] }
lib-infra = { workspace = true, features = ["isolate_flutter"] } lib-infra = { workspace = true, features = ["isolate_flutter"] }
flowy-ai-pub.workspace = true flowy-ai-pub.workspace = true
@ -33,7 +34,6 @@ serde_json = { workspace = true }
anyhow = "1.0.86" anyhow = "1.0.86"
tokio-stream = "0.1.15" tokio-stream = "0.1.15"
tokio-util = { workspace = true, features = ["full"] } tokio-util = { workspace = true, features = ["full"] }
parking_lot.workspace = true
appflowy-local-ai = { version = "0.1.0", features = ["verbose"] } appflowy-local-ai = { version = "0.1.0", features = ["verbose"] }
appflowy-plugin = { version = "0.1.0" } appflowy-plugin = { version = "0.1.0" }
reqwest = "0.11.27" reqwest = "0.11.27"

View File

@ -67,7 +67,8 @@ impl AIManager {
} }
pub async fn initialize(&self, _workspace_id: &str) -> Result<(), FlowyError> { pub async fn initialize(&self, _workspace_id: &str) -> Result<(), FlowyError> {
self.local_ai_controller.refresh().await?; // Ignore following error
let _ = self.local_ai_controller.refresh().await;
Ok(()) Ok(())
} }

View File

@ -17,8 +17,8 @@ use lib_infra::async_trait::async_trait;
use std::collections::HashMap; use std::collections::HashMap;
use crate::stream_message::StreamMessage; use crate::stream_message::StreamMessage;
use arc_swap::ArcSwapOption;
use futures_util::SinkExt; use futures_util::SinkExt;
use parking_lot::Mutex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use std::ops::Deref; use std::ops::Deref;
@ -47,7 +47,7 @@ const LOCAL_AI_SETTING_KEY: &str = "appflowy_local_ai_setting:v0";
pub struct LocalAIController { pub struct LocalAIController {
local_ai: Arc<AppFlowyLocalAI>, local_ai: Arc<AppFlowyLocalAI>,
local_ai_resource: Arc<LocalAIResourceController>, local_ai_resource: Arc<LocalAIResourceController>,
current_chat_id: Mutex<Option<String>>, current_chat_id: ArcSwapOption<String>,
store_preferences: Arc<KVStorePreferences>, store_preferences: Arc<KVStorePreferences>,
user_service: Arc<dyn AIUserService>, user_service: Arc<dyn AIUserService>,
} }
@ -80,7 +80,7 @@ impl LocalAIController {
res_impl, res_impl,
tx, tx,
)); ));
let current_chat_id = Mutex::new(None); let current_chat_id = ArcSwapOption::default();
let mut running_state_rx = local_ai.subscribe_running_state(); let mut running_state_rx = local_ai.subscribe_running_state();
let cloned_llm_res = llm_res.clone(); let cloned_llm_res = llm_res.clone();
@ -205,12 +205,14 @@ impl LocalAIController {
// Only keep one chat open at a time. Since loading multiple models at the same time will cause // Only keep one chat open at a time. Since loading multiple models at the same time will cause
// memory issues. // memory issues.
if let Some(current_chat_id) = self.current_chat_id.lock().as_ref() { if let Some(current_chat_id) = self.current_chat_id.load().as_ref() {
debug!("[AI Plugin] close previous chat: {}", current_chat_id); debug!("[AI Plugin] close previous chat: {}", current_chat_id);
self.close_chat(current_chat_id); self.close_chat(current_chat_id);
} }
*self.current_chat_id.lock() = Some(chat_id.to_string()); self
.current_chat_id
.store(Some(Arc::new(chat_id.to_string())));
let chat_id = chat_id.to_string(); let chat_id = chat_id.to_string();
let weak_ctrl = Arc::downgrade(&self.local_ai); let weak_ctrl = Arc::downgrade(&self.local_ai);
tokio::spawn(async move { tokio::spawn(async move {
@ -534,7 +536,7 @@ impl LLMResourceService for LLMResourceServiceImpl {
fn store_setting(&self, setting: LLMSetting) -> Result<(), Error> { fn store_setting(&self, setting: LLMSetting) -> Result<(), Error> {
self self
.store_preferences .store_preferences
.set_object(LOCAL_AI_SETTING_KEY, setting)?; .set_object(LOCAL_AI_SETTING_KEY, &setting)?;
Ok(()) Ok(())
} }

View File

@ -9,8 +9,8 @@ use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use futures::Sink; use futures::Sink;
use futures_util::SinkExt; use futures_util::SinkExt;
use lib_infra::async_trait::async_trait; use lib_infra::async_trait::async_trait;
use parking_lot::RwLock;
use arc_swap::ArcSwapOption;
use lib_infra::util::{get_operating_system, OperatingSystem}; use lib_infra::util::{get_operating_system, OperatingSystem};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
@ -64,10 +64,10 @@ impl DownloadTask {
pub struct LocalAIResourceController { pub struct LocalAIResourceController {
user_service: Arc<dyn AIUserService>, user_service: Arc<dyn AIUserService>,
resource_service: Arc<dyn LLMResourceService>, resource_service: Arc<dyn LLMResourceService>,
llm_setting: RwLock<Option<LLMSetting>>, llm_setting: ArcSwapOption<LLMSetting>,
// The ai_config will be set when user try to get latest local ai config from server // The ai_config will be set when user try to get latest local ai config from server
ai_config: RwLock<Option<LocalAIConfig>>, ai_config: ArcSwapOption<LocalAIConfig>,
download_task: Arc<RwLock<Option<DownloadTask>>>, download_task: Arc<ArcSwapOption<DownloadTask>>,
resource_notify: tokio::sync::mpsc::Sender<()>, resource_notify: tokio::sync::mpsc::Sender<()>,
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))] #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
#[allow(dead_code)] #[allow(dead_code)]
@ -82,7 +82,7 @@ impl LocalAIResourceController {
resource_notify: tokio::sync::mpsc::Sender<()>, resource_notify: tokio::sync::mpsc::Sender<()>,
) -> Self { ) -> Self {
let (offline_app_state_sender, _) = tokio::sync::broadcast::channel(1); let (offline_app_state_sender, _) = tokio::sync::broadcast::channel(1);
let llm_setting = RwLock::new(resource_service.retrieve_setting()); let llm_setting = resource_service.retrieve_setting().map(Arc::new);
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))] #[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
let mut offline_app_disk_watch: Option<WatchContext> = None; let mut offline_app_disk_watch: Option<WatchContext> = None;
@ -109,7 +109,7 @@ impl LocalAIResourceController {
Self { Self {
user_service, user_service,
resource_service: Arc::new(resource_service), resource_service: Arc::new(resource_service),
llm_setting, llm_setting: ArcSwapOption::new(llm_setting),
ai_config: Default::default(), ai_config: Default::default(),
download_task: Default::default(), download_task: Default::default(),
resource_notify, resource_notify,
@ -125,7 +125,7 @@ impl LocalAIResourceController {
} }
fn set_llm_setting(&self, llm_setting: LLMSetting) { fn set_llm_setting(&self, llm_setting: LLMSetting) {
*self.llm_setting.write() = Some(llm_setting); self.llm_setting.store(Some(llm_setting.into()));
} }
/// Returns true when all resources are downloaded and ready to use. /// Returns true when all resources are downloaded and ready to use.
@ -153,7 +153,7 @@ impl LocalAIResourceController {
return Err(FlowyError::local_ai().with_context("No model found")); return Err(FlowyError::local_ai().with_context("No model found"));
} }
*self.ai_config.write() = Some(ai_config.clone()); self.ai_config.store(Some(ai_config.clone().into()));
let selected_model = self.select_model(&ai_config)?; let selected_model = self.select_model(&ai_config)?;
let llm_setting = LLMSetting { let llm_setting = LLMSetting {
@ -173,7 +173,7 @@ impl LocalAIResourceController {
pub fn use_local_llm(&self, llm_id: i64) -> FlowyResult<LocalModelResourcePB> { pub fn use_local_llm(&self, llm_id: i64) -> FlowyResult<LocalModelResourcePB> {
let (app, llm_model) = self let (app, llm_model) = self
.ai_config .ai_config
.read() .load()
.as_ref() .as_ref()
.and_then(|config| { .and_then(|config| {
config config
@ -209,7 +209,7 @@ impl LocalAIResourceController {
let pending_resources = self.calculate_pending_resources().ok()?; let pending_resources = self.calculate_pending_resources().ok()?;
let is_ready = pending_resources.is_empty(); let is_ready = pending_resources.is_empty();
let is_downloading = self.download_task.read().is_some(); let is_downloading = self.download_task.load().is_some();
let pending_resources: Vec<_> = pending_resources let pending_resources: Vec<_> = pending_resources
.into_iter() .into_iter()
.flat_map(|res| match res { .flat_map(|res| match res {
@ -243,7 +243,7 @@ impl LocalAIResourceController {
/// Returns true when all resources are downloaded and ready to use. /// Returns true when all resources are downloaded and ready to use.
pub fn calculate_pending_resources(&self) -> FlowyResult<Vec<PendingResource>> { pub fn calculate_pending_resources(&self) -> FlowyResult<Vec<PendingResource>> {
match self.llm_setting.read().as_ref() { match self.llm_setting.load().as_ref() {
None => Err(FlowyError::local_ai().with_context("Can't find any llm config")), None => Err(FlowyError::local_ai().with_context("Can't find any llm config")),
Some(llm_setting) => { Some(llm_setting) => {
let mut resources = vec![]; let mut resources = vec![];
@ -296,7 +296,7 @@ impl LocalAIResourceController {
info!("notify download finish, need to reload resources"); info!("notify download finish, need to reload resources");
let _ = resource_notify.send(()).await; let _ = resource_notify.send(()).await;
if let Some(download_task) = weak_download_task.upgrade() { if let Some(download_task) = weak_download_task.upgrade() {
if let Some(task) = download_task.write().take() { if let Some(task) = download_task.swap(None) {
task.cancel(); task.cancel();
} }
} }
@ -307,7 +307,9 @@ impl LocalAIResourceController {
}; };
// return immediately if download task already exists // return immediately if download task already exists
if let Some(download_task) = self.download_task.read().as_ref() { {
let guard = self.download_task.load();
if let Some(download_task) = &*guard {
trace!( trace!(
"Download task already exists, return the task id: {}", "Download task already exists, return the task id: {}",
task_id task_id
@ -315,17 +317,17 @@ impl LocalAIResourceController {
progress_notify(download_task.tx.subscribe()); progress_notify(download_task.tx.subscribe());
return Ok(task_id); return Ok(task_id);
} }
}
// If download task is not exists, create a new download task. // If download task is not exists, create a new download task.
info!("[LLM Resource] Start new download task"); info!("[LLM Resource] Start new download task");
let llm_setting = self let llm_setting = self
.llm_setting .llm_setting
.read() .load_full()
.clone()
.ok_or_else(|| FlowyError::local_ai().with_context("No local ai config found"))?; .ok_or_else(|| FlowyError::local_ai().with_context("No local ai config found"))?;
let download_task = DownloadTask::new(); let download_task = Arc::new(DownloadTask::new());
*self.download_task.write() = Some(download_task.clone()); self.download_task.store(Some(download_task.clone()));
progress_notify(download_task.tx.subscribe()); progress_notify(download_task.tx.subscribe());
let model_dir = self.user_model_folder()?; let model_dir = self.user_model_folder()?;
@ -339,15 +341,15 @@ impl LocalAIResourceController {
// After download the plugin, start downloading models // After download the plugin, start downloading models
let chat_model_file = ( let chat_model_file = (
model_dir.join(&llm_setting.llm_model.chat_model.file_name), model_dir.join(&llm_setting.llm_model.chat_model.file_name),
llm_setting.llm_model.chat_model.file_name, &llm_setting.llm_model.chat_model.file_name,
llm_setting.llm_model.chat_model.name, &llm_setting.llm_model.chat_model.name,
llm_setting.llm_model.chat_model.download_url, &llm_setting.llm_model.chat_model.download_url,
); );
let embedding_model_file = ( let embedding_model_file = (
model_dir.join(&llm_setting.llm_model.embedding_model.file_name), model_dir.join(&llm_setting.llm_model.embedding_model.file_name),
llm_setting.llm_model.embedding_model.file_name, &llm_setting.llm_model.embedding_model.file_name,
llm_setting.llm_model.embedding_model.name, &llm_setting.llm_model.embedding_model.name,
llm_setting.llm_model.embedding_model.download_url, &llm_setting.llm_model.embedding_model.download_url,
); );
for (file_path, file_name, model_name, url) in [chat_model_file, embedding_model_file] { for (file_path, file_name, model_name, url) in [chat_model_file, embedding_model_file] {
if file_path.exists() { if file_path.exists() {
@ -370,9 +372,9 @@ impl LocalAIResourceController {
} }
}); });
match download_model( match download_model(
&url, url,
&model_dir, &model_dir,
&file_name, file_name,
Some(progress), Some(progress),
Some(download_task.cancel_token.clone()), Some(download_task.cancel_token.clone()),
) )
@ -400,7 +402,7 @@ impl LocalAIResourceController {
} }
pub fn cancel_download(&self) -> FlowyResult<()> { pub fn cancel_download(&self) -> FlowyResult<()> {
if let Some(cancel_token) = self.download_task.write().take() { if let Some(cancel_token) = self.download_task.swap(None) {
info!("[LLM Resource] Cancel download"); info!("[LLM Resource] Cancel download");
cancel_token.cancel(); cancel_token.cancel();
} }
@ -416,9 +418,7 @@ impl LocalAIResourceController {
let llm_setting = self let llm_setting = self
.llm_setting .llm_setting
.read() .load_full()
.as_ref()
.cloned()
.ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?; .ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?;
let model_dir = self.user_model_folder()?; let model_dir = self.user_model_folder()?;
@ -475,16 +475,14 @@ impl LocalAIResourceController {
} }
pub fn get_selected_model(&self) -> Option<LLMModel> { pub fn get_selected_model(&self) -> Option<LLMModel> {
self let setting = self.llm_setting.load();
.llm_setting Some(setting.as_ref()?.llm_model.clone())
.read()
.as_ref()
.map(|setting| setting.llm_model.clone())
} }
/// Selects the appropriate model based on the current settings or defaults to the first model. /// Selects the appropriate model based on the current settings or defaults to the first model.
fn select_model(&self, ai_config: &LocalAIConfig) -> FlowyResult<LLMModel> { fn select_model(&self, ai_config: &LocalAIConfig) -> FlowyResult<LLMModel> {
let selected_model = match self.llm_setting.read().as_ref() { let llm_setting = self.llm_setting.load();
let selected_model = match &*llm_setting {
None => ai_config.models[0].clone(), None => ai_config.models[0].clone(),
Some(llm_setting) => { Some(llm_setting) => {
match ai_config match ai_config

View File

@ -44,8 +44,9 @@ bytes.workspace = true
tokio = { workspace = true, features = ["full"] } tokio = { workspace = true, features = ["full"] }
tokio-stream = { workspace = true, features = ["sync"] } tokio-stream = { workspace = true, features = ["sync"] }
console-subscriber = { version = "0.2", optional = true } console-subscriber = { version = "0.2", optional = true }
parking_lot.workspace = true
anyhow.workspace = true anyhow.workspace = true
dashmap.workspace = true
arc-swap.workspace = true
base64 = "0.21.5" base64 = "0.21.5"
lib-infra = { workspace = true } lib-infra = { workspace = true }

View File

@ -6,7 +6,6 @@ use semver::Version;
use tracing::{error, info}; use tracing::{error, info};
use flowy_server_pub::af_cloud_config::AFCloudConfiguration; use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_user::services::entities::URL_SAFE_ENGINE; use flowy_user::services::entities::URL_SAFE_ENGINE;
use lib_infra::file_util::copy_dir_recursive; use lib_infra::file_util::copy_dir_recursive;
use lib_infra::util::OperatingSystem; use lib_infra::util::OperatingSystem;
@ -85,13 +84,7 @@ impl AppFlowyCoreConfig {
) -> Self { ) -> Self {
let cloud_config = AFCloudConfiguration::from_env().ok(); let cloud_config = AFCloudConfiguration::from_env().ok();
let storage_path = match &cloud_config { let storage_path = match &cloud_config {
None => {
let supabase_config = SupabaseConfiguration::from_env().ok();
match &supabase_config {
None => custom_application_path, None => custom_application_path,
Some(config) => make_user_data_folder(&custom_application_path, &config.url),
}
},
Some(config) => make_user_data_folder(&custom_application_path, &config.base_url), Some(config) => make_user_data_folder(&custom_application_path, &config.base_url),
}; };
let log_filter = create_log_filter("info".to_owned(), vec![], OperatingSystem::from(&platform)); let log_filter = create_log_filter("info".to_owned(), vec![], OperatingSystem::from(&platform));

View File

@ -11,7 +11,7 @@ use flowy_database2::DatabaseManager;
use flowy_document::entities::DocumentDataPB; use flowy_document::entities::DocumentDataPB;
use flowy_document::manager::DocumentManager; use flowy_document::manager::DocumentManager;
use flowy_document::parser::json::parser::JsonToDocumentParser; use flowy_document::parser::json::parser::JsonToDocumentParser;
use flowy_error::FlowyError; use flowy_error::{FlowyError, FlowyResult};
use flowy_folder::entities::{CreateViewParams, ViewLayoutPB}; use flowy_folder::entities::{CreateViewParams, ViewLayoutPB};
use flowy_folder::manager::{FolderManager, FolderUser}; use flowy_folder::manager::{FolderManager, FolderUser};
use flowy_folder::share::ImportType; use flowy_folder::share::ImportType;
@ -26,7 +26,6 @@ use flowy_sqlite::kv::KVStorePreferences;
use flowy_user::services::authenticate_user::AuthenticateUser; use flowy_user::services::authenticate_user::AuthenticateUser;
use flowy_user::services::data_import::{load_collab_by_object_id, load_collab_by_object_ids}; use flowy_user::services::data_import::{load_collab_by_object_id, load_collab_by_object_ids};
use lib_dispatch::prelude::ToBytes; use lib_dispatch::prelude::ToBytes;
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
@ -111,6 +110,10 @@ impl FolderUser for FolderUserImpl {
fn collab_db(&self, uid: i64) -> Result<Weak<CollabKVDB>, FlowyError> { fn collab_db(&self, uid: i64) -> Result<Weak<CollabKVDB>, FlowyError> {
self.upgrade_user()?.get_collab_db(uid) self.upgrade_user()?.get_collab_db(uid)
} }
fn is_folder_exist_on_disk(&self, uid: i64, workspace_id: &str) -> FlowyResult<bool> {
self.upgrade_user()?.is_collab_on_disk(uid, workspace_id)
}
} }
struct DocumentFolderOperation(Arc<DocumentManager>); struct DocumentFolderOperation(Arc<DocumentManager>);

View File

@ -1,18 +1,17 @@
use std::collections::HashMap; use arc_swap::ArcSwapOption;
use dashmap::DashMap;
use std::fmt::{Display, Formatter}; use std::fmt::{Display, Formatter};
use std::sync::atomic::{AtomicBool, AtomicU8, Ordering};
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use parking_lot::RwLock;
use serde_repr::*; use serde_repr::*;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_server::af_cloud::define::ServerUser; use flowy_server::af_cloud::define::ServerUser;
use flowy_server::af_cloud::AppFlowyCloudServer; use flowy_server::af_cloud::AppFlowyCloudServer;
use flowy_server::local_server::{LocalServer, LocalServerDB}; use flowy_server::local_server::{LocalServer, LocalServerDB};
use flowy_server::supabase::SupabaseServer;
use flowy_server::{AppFlowyEncryption, AppFlowyServer, EncryptionImpl}; use flowy_server::{AppFlowyEncryption, AppFlowyServer, EncryptionImpl};
use flowy_server_pub::af_cloud_config::AFCloudConfiguration; use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_server_pub::AuthenticatorType; use flowy_server_pub::AuthenticatorType;
use flowy_sqlite::kv::KVStorePreferences; use flowy_sqlite::kv::KVStorePreferences;
use flowy_user_pub::entities::*; use flowy_user_pub::entities::*;
@ -26,12 +25,8 @@ pub enum Server {
/// Offline mode, no user authentication and the data is stored locally. /// Offline mode, no user authentication and the data is stored locally.
Local = 0, Local = 0,
/// AppFlowy Cloud server provider. /// AppFlowy Cloud server provider.
/// The [AppFlowy-Server](https://github.com/AppFlowy-IO/AppFlowy-Cloud) is still a work in /// See: https://github.com/AppFlowy-IO/AppFlowy-Cloud
/// progress.
AppFlowyCloud = 1, AppFlowyCloud = 1,
/// Supabase server provider.
/// It uses supabase postgresql database to store data and user authentication.
Supabase = 2,
} }
impl Server { impl Server {
@ -45,7 +40,6 @@ impl Display for Server {
match self { match self {
Server::Local => write!(f, "Local"), Server::Local => write!(f, "Local"),
Server::AppFlowyCloud => write!(f, "AppFlowyCloud"), Server::AppFlowyCloud => write!(f, "AppFlowyCloud"),
Server::Supabase => write!(f, "Supabase"),
} }
} }
} }
@ -56,16 +50,16 @@ impl Display for Server {
/// Each server implements the [AppFlowyServer] trait, which provides the [UserCloudService], etc. /// Each server implements the [AppFlowyServer] trait, which provides the [UserCloudService], etc.
pub struct ServerProvider { pub struct ServerProvider {
config: AppFlowyCoreConfig, config: AppFlowyCoreConfig,
providers: RwLock<HashMap<Server, Arc<dyn AppFlowyServer>>>, providers: DashMap<Server, Arc<dyn AppFlowyServer>>,
pub(crate) encryption: RwLock<Arc<dyn AppFlowyEncryption>>, pub(crate) encryption: Arc<dyn AppFlowyEncryption>,
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) store_preferences: Weak<KVStorePreferences>, pub(crate) store_preferences: Weak<KVStorePreferences>,
pub(crate) user_enable_sync: RwLock<bool>, pub(crate) user_enable_sync: AtomicBool,
/// The authenticator type of the user. /// The authenticator type of the user.
authenticator: RwLock<Authenticator>, authenticator: AtomicU8,
user: Arc<dyn ServerUser>, user: Arc<dyn ServerUser>,
pub(crate) uid: Arc<RwLock<Option<i64>>>, pub(crate) uid: Arc<ArcSwapOption<i64>>,
} }
impl ServerProvider { impl ServerProvider {
@ -79,10 +73,10 @@ impl ServerProvider {
let encryption = EncryptionImpl::new(None); let encryption = EncryptionImpl::new(None);
Self { Self {
config, config,
providers: RwLock::new(HashMap::new()), providers: DashMap::new(),
user_enable_sync: RwLock::new(true), user_enable_sync: AtomicBool::new(true),
authenticator: RwLock::new(Authenticator::from(server)), authenticator: AtomicU8::new(Authenticator::from(server) as u8),
encryption: RwLock::new(Arc::new(encryption)), encryption: Arc::new(encryption),
store_preferences, store_preferences,
uid: Default::default(), uid: Default::default(),
user, user,
@ -90,33 +84,34 @@ impl ServerProvider {
} }
pub fn get_server_type(&self) -> Server { pub fn get_server_type(&self) -> Server {
match &*self.authenticator.read() { match Authenticator::from(self.authenticator.load(Ordering::Acquire) as i32) {
Authenticator::Local => Server::Local, Authenticator::Local => Server::Local,
Authenticator::AppFlowyCloud => Server::AppFlowyCloud, Authenticator::AppFlowyCloud => Server::AppFlowyCloud,
Authenticator::Supabase => Server::Supabase,
} }
} }
pub fn set_authenticator(&self, authenticator: Authenticator) { pub fn set_authenticator(&self, authenticator: Authenticator) {
let old_server_type = self.get_server_type(); let old_server_type = self.get_server_type();
*self.authenticator.write() = authenticator; self
.authenticator
.store(authenticator as u8, Ordering::Release);
let new_server_type = self.get_server_type(); let new_server_type = self.get_server_type();
if old_server_type != new_server_type { if old_server_type != new_server_type {
self.providers.write().remove(&old_server_type); self.providers.remove(&old_server_type);
} }
} }
pub fn get_authenticator(&self) -> Authenticator { pub fn get_authenticator(&self) -> Authenticator {
self.authenticator.read().clone() Authenticator::from(self.authenticator.load(Ordering::Acquire) as i32)
} }
/// Returns a [AppFlowyServer] trait implementation base on the provider_type. /// Returns a [AppFlowyServer] trait implementation base on the provider_type.
pub fn get_server(&self) -> FlowyResult<Arc<dyn AppFlowyServer>> { pub fn get_server(&self) -> FlowyResult<Arc<dyn AppFlowyServer>> {
let server_type = self.get_server_type(); let server_type = self.get_server_type();
if let Some(provider) = self.providers.read().get(&server_type) { if let Some(provider) = self.providers.get(&server_type) {
return Ok(provider.clone()); return Ok(provider.value().clone());
} }
let server = match server_type { let server = match server_type {
@ -131,7 +126,7 @@ impl ServerProvider {
let config = AFCloudConfiguration::from_env()?; let config = AFCloudConfiguration::from_env()?;
let server = Arc::new(AppFlowyCloudServer::new( let server = Arc::new(AppFlowyCloudServer::new(
config, config,
*self.user_enable_sync.read(), self.user_enable_sync.load(Ordering::Acquire),
self.config.device_id.clone(), self.config.device_id.clone(),
self.config.app_version.clone(), self.config.app_version.clone(),
self.user.clone(), self.user.clone(),
@ -139,25 +134,9 @@ impl ServerProvider {
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(server) Ok::<Arc<dyn AppFlowyServer>, FlowyError>(server)
}, },
Server::Supabase => {
let config = SupabaseConfiguration::from_env()?;
let uid = self.uid.clone();
tracing::trace!("🔑Supabase config: {:?}", config);
let encryption = Arc::downgrade(&*self.encryption.read());
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(Arc::new(SupabaseServer::new(
uid,
config,
*self.user_enable_sync.read(),
self.config.device_id.clone(),
encryption,
)))
},
}?; }?;
self self.providers.insert(server_type.clone(), server.clone());
.providers
.write()
.insert(server_type.clone(), server.clone());
Ok(server) Ok(server)
} }
} }
@ -167,7 +146,6 @@ impl From<Authenticator> for Server {
match auth_provider { match auth_provider {
Authenticator::Local => Server::Local, Authenticator::Local => Server::Local,
Authenticator::AppFlowyCloud => Server::AppFlowyCloud, Authenticator::AppFlowyCloud => Server::AppFlowyCloud,
Authenticator::Supabase => Server::Supabase,
} }
} }
} }
@ -177,7 +155,6 @@ impl From<Server> for Authenticator {
match ty { match ty {
Server::Local => Authenticator::Local, Server::Local => Authenticator::Local,
Server::AppFlowyCloud => Authenticator::AppFlowyCloud, Server::AppFlowyCloud => Authenticator::AppFlowyCloud,
Server::Supabase => Authenticator::Supabase,
} }
} }
} }
@ -190,7 +167,6 @@ impl From<&Authenticator> for Server {
pub fn current_server_type() -> Server { pub fn current_server_type() -> Server {
match AuthenticatorType::from_env() { match AuthenticatorType::from_env() {
AuthenticatorType::Local => Server::Local, AuthenticatorType::Local => Server::Local,
AuthenticatorType::Supabase => Server::Supabase,
AuthenticatorType::AppFlowyCloud => Server::AppFlowyCloud, AuthenticatorType::AppFlowyCloud => Server::AppFlowyCloud,
} }
} }

View File

@ -2,6 +2,7 @@ use client_api::entity::search_dto::SearchDocumentResponseItem;
use flowy_search_pub::cloud::SearchCloudService; use flowy_search_pub::cloud::SearchCloudService;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::sync::atomic::Ordering;
use std::sync::Arc; use std::sync::Arc;
use anyhow::Error; use anyhow::Error;
@ -9,10 +10,9 @@ use client_api::collab_sync::{SinkConfig, SyncObject, SyncPlugin};
use client_api::entity::ai_dto::{CompletionType, RepeatedRelatedQuestion}; use client_api::entity::ai_dto::{CompletionType, RepeatedRelatedQuestion};
use client_api::entity::ChatMessageType; use client_api::entity::ChatMessageType;
use collab::core::origin::{CollabClient, CollabOrigin}; use collab::core::origin::{CollabClient, CollabOrigin};
use collab::entity::EncodedCollab;
use collab::preclude::CollabPlugin; use collab::preclude::CollabPlugin;
use collab_entity::CollabType; use collab_entity::CollabType;
use collab_plugins::cloud_storage::postgres::SupabaseDBPlugin;
use serde_json::Value; use serde_json::Value;
use tokio_stream::wrappers::WatchStream; use tokio_stream::wrappers::WatchStream;
use tracing::{debug, info}; use tracing::{debug, info};
@ -25,8 +25,8 @@ use flowy_ai_pub::cloud::{
RepeatedChatMessage, StreamAnswer, StreamComplete, RepeatedChatMessage, StreamAnswer, StreamComplete,
}; };
use flowy_database_pub::cloud::{ use flowy_database_pub::cloud::{
CollabDocStateByOid, DatabaseAIService, DatabaseCloudService, DatabaseSnapshot, DatabaseAIService, DatabaseCloudService, DatabaseSnapshot, EncodeCollabByOid, SummaryRowContent,
SummaryRowContent, TranslateRowContent, TranslateRowResponse, TranslateRowContent, TranslateRowResponse,
}; };
use flowy_document::deps::DocumentData; use flowy_document::deps::DocumentData;
use flowy_document_pub::cloud::{DocumentCloudService, DocumentSnapshot}; use flowy_document_pub::cloud::{DocumentCloudService, DocumentSnapshot};
@ -36,13 +36,11 @@ use flowy_folder_pub::cloud::{
}; };
use flowy_folder_pub::entities::{PublishInfoResponse, PublishPayload}; use flowy_folder_pub::entities::{PublishInfoResponse, PublishPayload};
use flowy_server_pub::af_cloud_config::AFCloudConfiguration; use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_server_pub::supabase_config::SupabaseConfiguration;
use flowy_storage_pub::cloud::{ObjectIdentity, ObjectValue, StorageCloudService}; use flowy_storage_pub::cloud::{ObjectIdentity, ObjectValue, StorageCloudService};
use flowy_storage_pub::storage::{CompletedPartRequest, CreateUploadResponse, UploadPartResponse}; use flowy_storage_pub::storage::{CompletedPartRequest, CreateUploadResponse, UploadPartResponse};
use flowy_user_pub::cloud::{UserCloudService, UserCloudServiceProvider}; use flowy_user_pub::cloud::{UserCloudService, UserCloudServiceProvider};
use flowy_user_pub::entities::{Authenticator, UserTokenState}; use flowy_user_pub::entities::{Authenticator, UserTokenState};
use lib_infra::async_trait::async_trait; use lib_infra::async_trait::async_trait;
use lib_infra::future::FutureResult;
use crate::integrate::server::{Server, ServerProvider}; use crate::integrate::server::{Server, ServerProvider};
@ -168,8 +166,8 @@ impl UserCloudServiceProvider for ServerProvider {
fn set_enable_sync(&self, uid: i64, enable_sync: bool) { fn set_enable_sync(&self, uid: i64, enable_sync: bool) {
if let Ok(server) = self.get_server() { if let Ok(server) = self.get_server() {
server.set_enable_sync(uid, enable_sync); server.set_enable_sync(uid, enable_sync);
*self.user_enable_sync.write() = enable_sync; self.user_enable_sync.store(enable_sync, Ordering::Release);
*self.uid.write() = Some(uid); self.uid.store(Some(uid.into()));
} }
} }
@ -195,7 +193,7 @@ impl UserCloudServiceProvider for ServerProvider {
fn set_encrypt_secret(&self, secret: String) { fn set_encrypt_secret(&self, secret: String) {
tracing::info!("🔑Set encrypt secret"); tracing::info!("🔑Set encrypt secret");
self.encryption.write().set_secret(secret); self.encryption.set_secret(secret);
} }
/// Returns the [UserCloudService] base on the current [Server]. /// Returns the [UserCloudService] base on the current [Server].
@ -211,93 +209,87 @@ impl UserCloudServiceProvider for ServerProvider {
Server::AppFlowyCloud => AFCloudConfiguration::from_env() Server::AppFlowyCloud => AFCloudConfiguration::from_env()
.map(|config| config.base_url) .map(|config| config.base_url)
.unwrap_or_default(), .unwrap_or_default(),
Server::Supabase => SupabaseConfiguration::from_env()
.map(|config| config.url)
.unwrap_or_default(),
} }
} }
} }
#[async_trait]
impl FolderCloudService for ServerProvider { impl FolderCloudService for ServerProvider {
fn create_workspace(&self, uid: i64, name: &str) -> FutureResult<Workspace, Error> { async fn create_workspace(&self, uid: i64, name: &str) -> Result<Workspace, Error> {
let server = self.get_server(); let server = self.get_server()?;
let name = name.to_string(); let name = name.to_string();
FutureResult::new(async move { server?.folder_service().create_workspace(uid, &name).await }) server.folder_service().create_workspace(uid, &name).await
} }
fn open_workspace(&self, workspace_id: &str) -> FutureResult<(), Error> { async fn open_workspace(&self, workspace_id: &str) -> Result<(), Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move { server?.folder_service().open_workspace(&workspace_id).await }) server.folder_service().open_workspace(&workspace_id).await
} }
fn get_all_workspace(&self) -> FutureResult<Vec<WorkspaceRecord>, Error> { async fn get_all_workspace(&self) -> Result<Vec<WorkspaceRecord>, Error> {
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move { server?.folder_service().get_all_workspace().await }) server.folder_service().get_all_workspace().await
} }
fn get_folder_data( async fn get_folder_data(
&self, &self,
workspace_id: &str, workspace_id: &str,
uid: &i64, uid: &i64,
) -> FutureResult<Option<FolderData>, Error> { ) -> Result<Option<FolderData>, Error> {
let uid = *uid; let uid = *uid;
let server = self.get_server(); let server = self.get_server()?;
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.get_folder_data(&workspace_id, &uid) .get_folder_data(&workspace_id, &uid)
.await .await
})
} }
fn get_folder_snapshots( async fn get_folder_snapshots(
&self, &self,
workspace_id: &str, workspace_id: &str,
limit: usize, limit: usize,
) -> FutureResult<Vec<FolderSnapshot>, Error> { ) -> Result<Vec<FolderSnapshot>, Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.get_folder_snapshots(&workspace_id, limit) .get_folder_snapshots(&workspace_id, limit)
.await .await
})
} }
fn get_folder_doc_state( async fn get_folder_doc_state(
&self, &self,
workspace_id: &str, workspace_id: &str,
uid: i64, uid: i64,
collab_type: CollabType, collab_type: CollabType,
object_id: &str, object_id: &str,
) -> FutureResult<Vec<u8>, Error> { ) -> Result<Vec<u8>, Error> {
let object_id = object_id.to_string(); let object_id = object_id.to_string();
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.get_folder_doc_state(&workspace_id, uid, collab_type, &object_id) .get_folder_doc_state(&workspace_id, uid, collab_type, &object_id)
.await .await
})
} }
fn batch_create_folder_collab_objects( async fn batch_create_folder_collab_objects(
&self, &self,
workspace_id: &str, workspace_id: &str,
objects: Vec<FolderCollabParams>, objects: Vec<FolderCollabParams>,
) -> FutureResult<(), Error> { ) -> Result<(), Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.batch_create_folder_collab_objects(&workspace_id, objects) .batch_create_folder_collab_objects(&workspace_id, objects)
.await .await
})
} }
fn service_name(&self) -> String { fn service_name(&self) -> String {
@ -307,114 +299,106 @@ impl FolderCloudService for ServerProvider {
.unwrap_or_default() .unwrap_or_default()
} }
fn publish_view( async fn publish_view(
&self, &self,
workspace_id: &str, workspace_id: &str,
payload: Vec<PublishPayload>, payload: Vec<PublishPayload>,
) -> FutureResult<(), Error> { ) -> Result<(), Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.publish_view(&workspace_id, payload) .publish_view(&workspace_id, payload)
.await .await
})
} }
fn unpublish_views(&self, workspace_id: &str, view_ids: Vec<String>) -> FutureResult<(), Error> { async fn unpublish_views(&self, workspace_id: &str, view_ids: Vec<String>) -> Result<(), Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.unpublish_views(&workspace_id, view_ids) .unpublish_views(&workspace_id, view_ids)
.await .await
})
} }
fn get_publish_info(&self, view_id: &str) -> FutureResult<PublishInfoResponse, Error> { async fn get_publish_info(&self, view_id: &str) -> Result<PublishInfoResponse, Error> {
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move { server?.folder_service().get_publish_info(&view_id).await }) server.folder_service().get_publish_info(&view_id).await
} }
fn set_publish_namespace( async fn set_publish_namespace(
&self, &self,
workspace_id: &str, workspace_id: &str,
new_namespace: &str, new_namespace: &str,
) -> FutureResult<(), Error> { ) -> Result<(), Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let new_namespace = new_namespace.to_string(); let new_namespace = new_namespace.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.set_publish_namespace(&workspace_id, &new_namespace) .set_publish_namespace(&workspace_id, &new_namespace)
.await .await
})
} }
fn get_publish_namespace(&self, workspace_id: &str) -> FutureResult<String, Error> { async fn get_publish_namespace(&self, workspace_id: &str) -> Result<String, Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.folder_service() .folder_service()
.get_publish_namespace(&workspace_id) .get_publish_namespace(&workspace_id)
.await .await
})
} }
} }
#[async_trait] #[async_trait]
impl DatabaseCloudService for ServerProvider { impl DatabaseCloudService for ServerProvider {
fn get_database_object_doc_state( async fn get_database_encode_collab(
&self, &self,
object_id: &str, object_id: &str,
collab_type: CollabType, collab_type: CollabType,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<Option<Vec<u8>>, Error> { ) -> Result<Option<EncodedCollab>, Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
let database_id = object_id.to_string(); let database_id = object_id.to_string();
FutureResult::new(async move { server
server?
.database_service() .database_service()
.get_database_object_doc_state(&database_id, collab_type, &workspace_id) .get_database_encode_collab(&database_id, collab_type, &workspace_id)
.await .await
})
} }
fn batch_get_database_object_doc_state( async fn batch_get_database_encode_collab(
&self, &self,
object_ids: Vec<String>, object_ids: Vec<String>,
object_ty: CollabType, object_ty: CollabType,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<CollabDocStateByOid, Error> { ) -> Result<EncodeCollabByOid, Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.database_service() .database_service()
.batch_get_database_object_doc_state(object_ids, object_ty, &workspace_id) .batch_get_database_encode_collab(object_ids, object_ty, &workspace_id)
.await .await
})
} }
fn get_database_collab_object_snapshots( async fn get_database_collab_object_snapshots(
&self, &self,
object_id: &str, object_id: &str,
limit: usize, limit: usize,
) -> FutureResult<Vec<DatabaseSnapshot>, Error> { ) -> Result<Vec<DatabaseSnapshot>, Error> {
let server = self.get_server(); let server = self.get_server()?;
let database_id = object_id.to_string(); let database_id = object_id.to_string();
FutureResult::new(async move {
server? server
.database_service() .database_service()
.get_database_collab_object_snapshots(&database_id, limit) .get_database_collab_object_snapshots(&database_id, limit)
.await .await
})
} }
} }
@ -449,54 +433,52 @@ impl DatabaseAIService for ServerProvider {
} }
} }
#[async_trait]
impl DocumentCloudService for ServerProvider { impl DocumentCloudService for ServerProvider {
fn get_document_doc_state( async fn get_document_doc_state(
&self, &self,
document_id: &str, document_id: &str,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<Vec<u8>, FlowyError> { ) -> Result<Vec<u8>, FlowyError> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let document_id = document_id.to_string(); let document_id = document_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
FutureResult::new(async move {
server? server
.document_service() .document_service()
.get_document_doc_state(&document_id, &workspace_id) .get_document_doc_state(&document_id, &workspace_id)
.await .await
})
} }
fn get_document_snapshots( async fn get_document_snapshots(
&self, &self,
document_id: &str, document_id: &str,
limit: usize, limit: usize,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<Vec<DocumentSnapshot>, Error> { ) -> Result<Vec<DocumentSnapshot>, Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
let document_id = document_id.to_string(); let document_id = document_id.to_string();
FutureResult::new(async move {
server? server
.document_service() .document_service()
.get_document_snapshots(&document_id, limit, &workspace_id) .get_document_snapshots(&document_id, limit, &workspace_id)
.await .await
})
} }
fn get_document_data( async fn get_document_data(
&self, &self,
document_id: &str, document_id: &str,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<Option<DocumentData>, Error> { ) -> Result<Option<DocumentData>, Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_server(); let server = self.get_server()?;
let document_id = document_id.to_string(); let document_id = document_id.to_string();
FutureResult::new(async move {
server? server
.document_service() .document_service()
.get_document_data(&document_id, &workspace_id) .get_document_data(&document_id, &workspace_id)
.await .await
})
} }
} }
@ -563,34 +545,11 @@ impl CollabCloudPluginProvider for ServerProvider {
vec![] vec![]
} }
}, },
CollabPluginProviderContext::Supabase {
uid,
collab_object,
local_collab,
local_collab_db,
} => {
let mut plugins: Vec<Box<dyn CollabPlugin>> = vec![];
if let Some(remote_collab_storage) = self
.get_server()
.ok()
.and_then(|provider| provider.collab_storage(&collab_object))
{
plugins.push(Box::new(SupabaseDBPlugin::new(
uid,
collab_object,
local_collab,
1,
remote_collab_storage,
local_collab_db,
)));
}
plugins
},
} }
} }
fn is_sync_enabled(&self) -> bool { fn is_sync_enabled(&self) -> bool {
*self.user_enable_sync.read() self.user_enable_sync.load(Ordering::Acquire)
} }
} }

View File

@ -131,21 +131,12 @@ impl UserStatusCallback for UserStatusCallbackImpl {
create_if_not_exist: true, create_if_not_exist: true,
}, },
Server::AppFlowyCloud => FolderInitDataSource::Cloud(doc_state), Server::AppFlowyCloud => FolderInitDataSource::Cloud(doc_state),
Server::Supabase => {
if is_new_user {
FolderInitDataSource::LocalDisk {
create_if_not_exist: true,
}
} else {
FolderInitDataSource::Cloud(doc_state)
}
},
}, },
Err(err) => match server_type { Err(err) => match server_type {
Server::Local => FolderInitDataSource::LocalDisk { Server::Local => FolderInitDataSource::LocalDisk {
create_if_not_exist: true, create_if_not_exist: true,
}, },
Server::AppFlowyCloud | Server::Supabase => { Server::AppFlowyCloud => {
return Err(FlowyError::from(err)); return Err(FlowyError::from(err));
}, },
}, },

View File

@ -2,7 +2,6 @@
use flowy_search::folder::indexer::FolderIndexManagerImpl; use flowy_search::folder::indexer::FolderIndexManagerImpl;
use flowy_search::services::manager::SearchManager; use flowy_search::services::manager::SearchManager;
use parking_lot::Mutex;
use std::rc::Rc; use std::rc::Rc;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use std::time::Duration; use std::time::Duration;
@ -302,7 +301,6 @@ impl From<Server> for CollabPluginProviderType {
match server_type { match server_type {
Server::Local => CollabPluginProviderType::Local, Server::Local => CollabPluginProviderType::Local,
Server::AppFlowyCloud => CollabPluginProviderType::AppFlowyCloud, Server::AppFlowyCloud => CollabPluginProviderType::AppFlowyCloud,
Server::Supabase => CollabPluginProviderType::Supabase,
} }
} }
} }
@ -323,13 +321,3 @@ impl ServerUser for ServerUserImpl {
self.upgrade_user()?.workspace_id() self.upgrade_user()?.workspace_id()
} }
} }
pub struct MutexAppFlowyCore(pub Rc<Mutex<AppFlowyCore>>);
impl MutexAppFlowyCore {
pub fn new(appflowy_core: AppFlowyCore) -> Self {
Self(Rc::new(Mutex::new(appflowy_core)))
}
}
unsafe impl Sync for MutexAppFlowyCore {}
unsafe impl Send for MutexAppFlowyCore {}

View File

@ -1,13 +1,12 @@
use anyhow::Error; use anyhow::Error;
pub use client_api::entity::ai_dto::{TranslateItem, TranslateRowResponse}; pub use client_api::entity::ai_dto::{TranslateItem, TranslateRowResponse};
use collab::core::collab::DataSource; use collab::entity::EncodedCollab;
use collab_entity::CollabType; use collab_entity::CollabType;
use flowy_error::FlowyError; use flowy_error::FlowyError;
use lib_infra::async_trait::async_trait; use lib_infra::async_trait::async_trait;
use lib_infra::future::FutureResult;
use std::collections::HashMap; use std::collections::HashMap;
pub type CollabDocStateByOid = HashMap<String, DataSource>; pub type EncodeCollabByOid = HashMap<String, EncodedCollab>;
pub type SummaryRowContent = HashMap<String, String>; pub type SummaryRowContent = HashMap<String, String>;
pub type TranslateRowContent = Vec<TranslateItem>; pub type TranslateRowContent = Vec<TranslateItem>;
@ -41,25 +40,25 @@ pub trait DatabaseAIService: Send + Sync {
/// ///
#[async_trait] #[async_trait]
pub trait DatabaseCloudService: Send + Sync { pub trait DatabaseCloudService: Send + Sync {
fn get_database_object_doc_state( async fn get_database_encode_collab(
&self, &self,
object_id: &str, object_id: &str,
collab_type: CollabType, collab_type: CollabType,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<Option<Vec<u8>>, Error>; ) -> Result<Option<EncodedCollab>, Error>;
fn batch_get_database_object_doc_state( async fn batch_get_database_encode_collab(
&self, &self,
object_ids: Vec<String>, object_ids: Vec<String>,
object_ty: CollabType, object_ty: CollabType,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<CollabDocStateByOid, Error>; ) -> Result<EncodeCollabByOid, Error>;
fn get_database_collab_object_snapshots( async fn get_database_collab_object_snapshots(
&self, &self,
object_id: &str, object_id: &str,
limit: usize, limit: usize,
) -> FutureResult<Vec<DatabaseSnapshot>, Error>; ) -> Result<Vec<DatabaseSnapshot>, Error>;
} }
pub struct DatabaseSnapshot { pub struct DatabaseSnapshot {

View File

@ -15,7 +15,6 @@ flowy-database-pub = { workspace = true }
flowy-derive.workspace = true flowy-derive.workspace = true
flowy-notification = { workspace = true } flowy-notification = { workspace = true }
parking_lot.workspace = true
protobuf.workspace = true protobuf.workspace = true
flowy-error = { path = "../flowy-error", features = [ flowy-error = { path = "../flowy-error", features = [
"impl_from_dispatch_error", "impl_from_dispatch_error",
@ -29,6 +28,7 @@ tracing.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
serde_repr.workspace = true serde_repr.workspace = true
arc-swap.workspace = true
lib-infra = { workspace = true } lib-infra = { workspace = true }
chrono = { workspace = true, default-features = false, features = ["clock"] } chrono = { workspace = true, default-features = false, features = ["clock"] }
rust_decimal = "1.28.1" rust_decimal = "1.28.1"

View File

@ -3,7 +3,7 @@ use std::sync::{Arc, Weak};
use collab_database::rows::RowId; use collab_database::rows::RowId;
use lib_infra::box_any::BoxAny; use lib_infra::box_any::BoxAny;
use tokio::sync::oneshot; use tokio::sync::oneshot;
use tracing::error; use tracing::{error, trace};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use lib_dispatch::prelude::{af_spawn, data_result_ok, AFPluginData, AFPluginState, DataResult}; use lib_dispatch::prelude::{af_spawn, data_result_ok, AFPluginData, AFPluginState, DataResult};
@ -33,8 +33,17 @@ pub(crate) async fn get_database_data_handler(
) -> DataResult<DatabasePB, FlowyError> { ) -> DataResult<DatabasePB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_id = manager
.get_database_id_with_view_id(view_id.as_ref())
.await?;
let database_editor = manager.get_database_editor(&database_id).await?;
let data = database_editor.get_database_data(view_id.as_ref()).await?; let data = database_editor.get_database_data(view_id.as_ref()).await?;
trace!(
"layout: {:?}, rows: {}, fields: {}",
data.layout_type,
data.rows.len(),
data.fields.len()
);
data_result_ok(data) data_result_ok(data)
} }
@ -72,7 +81,9 @@ pub(crate) async fn get_database_setting_handler(
) -> DataResult<DatabaseViewSettingPB, FlowyError> { ) -> DataResult<DatabaseViewSettingPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager
.get_database_editor_with_view_id(view_id.as_ref())
.await?;
let data = database_editor let data = database_editor
.get_database_view_setting(view_id.as_ref()) .get_database_view_setting(view_id.as_ref())
.await?; .await?;
@ -86,7 +97,9 @@ pub(crate) async fn update_database_setting_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params = data.try_into_inner()?; let params = data.try_into_inner()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
if let Some(payload) = params.insert_filter { if let Some(payload) = params.insert_filter {
database_editor database_editor
@ -139,7 +152,9 @@ pub(crate) async fn get_all_filters_handler(
) -> DataResult<RepeatedFilterPB, FlowyError> { ) -> DataResult<RepeatedFilterPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager
.get_database_editor_with_view_id(view_id.as_ref())
.await?;
let filters = database_editor.get_all_filters(view_id.as_ref()).await; let filters = database_editor.get_all_filters(view_id.as_ref()).await;
data_result_ok(filters) data_result_ok(filters)
} }
@ -151,7 +166,9 @@ pub(crate) async fn get_all_sorts_handler(
) -> DataResult<RepeatedSortPB, FlowyError> { ) -> DataResult<RepeatedSortPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager
.get_database_editor_with_view_id(view_id.as_ref())
.await?;
let sorts = database_editor.get_all_sorts(view_id.as_ref()).await; let sorts = database_editor.get_all_sorts(view_id.as_ref()).await;
data_result_ok(sorts) data_result_ok(sorts)
} }
@ -163,7 +180,9 @@ pub(crate) async fn delete_all_sorts_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager
.get_database_editor_with_view_id(view_id.as_ref())
.await?;
database_editor.delete_all_sorts(view_id.as_ref()).await; database_editor.delete_all_sorts(view_id.as_ref()).await;
Ok(()) Ok(())
} }
@ -175,9 +194,12 @@ pub(crate) async fn get_fields_handler(
) -> DataResult<RepeatedFieldPB, FlowyError> { ) -> DataResult<RepeatedFieldPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: GetFieldParams = data.into_inner().try_into()?; let params: GetFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let fields = database_editor let fields = database_editor
.get_fields(&params.view_id, params.field_ids) .get_fields(&params.view_id, params.field_ids)
.await
.into_iter() .into_iter()
.map(FieldPB::new) .map(FieldPB::new)
.collect::<Vec<FieldPB>>() .collect::<Vec<FieldPB>>()
@ -192,9 +214,10 @@ pub(crate) async fn get_primary_field_handler(
) -> DataResult<FieldPB, FlowyError> { ) -> DataResult<FieldPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id = data.into_inner().value; let view_id = data.into_inner().value;
let database_editor = manager.get_database_with_view_id(&view_id).await?; let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
let mut fields = database_editor let mut fields = database_editor
.get_fields(&view_id, None) .get_fields(&view_id, None)
.await
.into_iter() .into_iter()
.filter(|field| field.is_primary) .filter(|field| field.is_primary)
.map(FieldPB::new) .map(FieldPB::new)
@ -221,7 +244,9 @@ pub(crate) async fn update_field_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: FieldChangesetParams = data.into_inner().try_into()?; let params: FieldChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor.update_field(params).await?; database_editor.update_field(params).await?;
Ok(()) Ok(())
} }
@ -233,8 +258,10 @@ pub(crate) async fn update_field_type_option_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: TypeOptionChangesetParams = data.into_inner().try_into()?; let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
if let Some(old_field) = database_editor.get_field(&params.field_id) { .get_database_editor_with_view_id(&params.view_id)
.await?;
if let Some(old_field) = database_editor.get_field(&params.field_id).await {
let field_type = FieldType::from(old_field.field_type); let field_type = FieldType::from(old_field.field_type);
let type_option_data = type_option_data_from_pb(params.type_option_data, &field_type)?; let type_option_data = type_option_data_from_pb(params.type_option_data, &field_type)?;
database_editor database_editor
@ -251,7 +278,9 @@ pub(crate) async fn delete_field_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor.delete_field(&params.field_id).await?; database_editor.delete_field(&params.field_id).await?;
Ok(()) Ok(())
} }
@ -263,7 +292,9 @@ pub(crate) async fn clear_field_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.clear_field(&params.view_id, &params.field_id) .clear_field(&params.view_id, &params.field_id)
.await?; .await?;
@ -277,14 +308,17 @@ pub(crate) async fn switch_to_field_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: EditFieldParams = data.into_inner().try_into()?; let params: EditFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
let old_field = database_editor.get_field(&params.field_id); .get_database_editor_with_view_id(&params.view_id)
.await?;
let old_field = database_editor.get_field(&params.field_id).await;
database_editor database_editor
.switch_to_field_type(&params.field_id, params.field_type) .switch_to_field_type(&params.field_id, params.field_type)
.await?; .await?;
if let Some(new_type_option) = database_editor if let Some(new_type_option) = database_editor
.get_field(&params.field_id) .get_field(&params.field_id)
.await
.map(|field| field.get_any_type_option(field.field_type)) .map(|field| field.get_any_type_option(field.field_type))
{ {
match (old_field, new_type_option) { match (old_field, new_type_option) {
@ -308,7 +342,9 @@ pub(crate) async fn duplicate_field_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: DuplicateFieldPayloadPB = data.into_inner(); let params: DuplicateFieldPayloadPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.duplicate_field(&params.view_id, &params.field_id) .duplicate_field(&params.view_id, &params.field_id)
.await?; .await?;
@ -323,7 +359,9 @@ pub(crate) async fn create_field_handler(
) -> DataResult<FieldPB, FlowyError> { ) -> DataResult<FieldPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: CreateFieldParams = data.into_inner().try_into()?; let params: CreateFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let data = database_editor let data = database_editor
.create_field_with_type_option(params) .create_field_with_type_option(params)
.await?; .await?;
@ -338,7 +376,9 @@ pub(crate) async fn move_field_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: MoveFieldParams = data.into_inner().try_into()?; let params: MoveFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor.move_field(params).await?; database_editor.move_field(params).await?;
Ok(()) Ok(())
} }
@ -350,21 +390,42 @@ pub(crate) async fn get_row_handler(
) -> DataResult<OptionalRowPB, FlowyError> { ) -> DataResult<OptionalRowPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let row = database_editor let row = database_editor
.get_row(&params.view_id, &params.row_id) .get_row(&params.view_id, &params.row_id)
.await
.map(RowPB::from); .map(RowPB::from);
data_result_ok(OptionalRowPB { row }) data_result_ok(OptionalRowPB { row })
} }
pub(crate) async fn init_row_handler(
data: AFPluginData<RowIdPB>,
manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor.init_database_row(&params.row_id).await?;
Ok(())
}
pub(crate) async fn get_row_meta_handler( pub(crate) async fn get_row_meta_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Weak<DatabaseManager>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RowMetaPB, FlowyError> { ) -> DataResult<RowMetaPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
match database_editor.get_row_meta(&params.view_id, &params.row_id) { .get_database_editor_with_view_id(&params.view_id)
.await?;
match database_editor
.get_row_meta(&params.view_id, &params.row_id)
.await
{
None => Err(FlowyError::record_not_found()), None => Err(FlowyError::record_not_found()),
Some(row) => data_result_ok(row), Some(row) => data_result_ok(row),
} }
@ -376,7 +437,9 @@ pub(crate) async fn update_row_meta_handler(
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: UpdateRowMetaParams = data.into_inner().try_into()?; let params: UpdateRowMetaParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let row_id = RowId::from(params.id.clone()); let row_id = RowId::from(params.id.clone());
database_editor database_editor
.update_row_meta(&row_id.clone(), params) .update_row_meta(&row_id.clone(), params)
@ -391,7 +454,9 @@ pub(crate) async fn delete_rows_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: RepeatedRowIdPB = data.into_inner(); let params: RepeatedRowIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let row_ids = params let row_ids = params
.row_ids .row_ids
.into_iter() .into_iter()
@ -408,7 +473,9 @@ pub(crate) async fn duplicate_row_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.duplicate_row(&params.view_id, &params.row_id) .duplicate_row(&params.view_id, &params.row_id)
.await?; .await?;
@ -422,7 +489,9 @@ pub(crate) async fn move_row_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: MoveRowParams = data.into_inner().try_into()?; let params: MoveRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.move_row(&params.view_id, params.from_row_id, params.to_row_id) .move_row(&params.view_id, params.from_row_id, params.to_row_id)
.await?; .await?;
@ -436,7 +505,9 @@ pub(crate) async fn create_row_handler(
) -> DataResult<RowMetaPB, FlowyError> { ) -> DataResult<RowMetaPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params = data.try_into_inner()?; let params = data.try_into_inner()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
match database_editor.create_row(params).await? { match database_editor.create_row(params).await? {
Some(row) => data_result_ok(RowMetaPB::from(row)), Some(row) => data_result_ok(RowMetaPB::from(row)),
@ -451,7 +522,9 @@ pub(crate) async fn get_cell_handler(
) -> DataResult<CellPB, FlowyError> { ) -> DataResult<CellPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let cell = database_editor let cell = database_editor
.get_cell_pb(&params.field_id, &params.row_id) .get_cell_pb(&params.field_id, &params.row_id)
.await .await
@ -466,7 +539,9 @@ pub(crate) async fn update_cell_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: CellChangesetPB = data.into_inner(); let params: CellChangesetPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.update_cell_with_changeset( .update_cell_with_changeset(
&params.view_id, &params.view_id,
@ -485,7 +560,9 @@ pub(crate) async fn new_select_option_handler(
) -> DataResult<SelectOptionPB, FlowyError> { ) -> DataResult<SelectOptionPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: CreateSelectOptionParams = data.into_inner().try_into()?; let params: CreateSelectOptionParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let result = database_editor let result = database_editor
.create_select_option(&params.field_id, params.option_name) .create_select_option(&params.field_id, params.option_name)
.await; .await;
@ -505,7 +582,9 @@ pub(crate) async fn insert_or_update_select_option_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params = data.into_inner(); let params = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.insert_select_options( .insert_select_options(
&params.view_id, &params.view_id,
@ -524,7 +603,9 @@ pub(crate) async fn delete_select_option_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params = data.into_inner(); let params = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.delete_select_options( .delete_select_options(
&params.view_id, &params.view_id,
@ -544,7 +625,7 @@ pub(crate) async fn update_select_option_cell_handler(
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?; let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let database_editor = manager let database_editor = manager
.get_database_with_view_id(&params.cell_identifier.view_id) .get_database_editor_with_view_id(&params.cell_identifier.view_id)
.await?; .await?;
let changeset = SelectOptionCellChangeset { let changeset = SelectOptionCellChangeset {
insert_option_ids: params.insert_option_ids, insert_option_ids: params.insert_option_ids,
@ -568,7 +649,9 @@ pub(crate) async fn update_checklist_cell_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: ChecklistCellDataChangesetParams = data.into_inner().try_into()?; let params: ChecklistCellDataChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let changeset = ChecklistCellChangeset { let changeset = ChecklistCellChangeset {
insert_options: params insert_options: params
.insert_options .insert_options
@ -609,7 +692,9 @@ pub(crate) async fn update_date_cell_handler(
reminder_id: data.reminder_id, reminder_id: data.reminder_id,
}; };
let database_editor = manager.get_database_with_view_id(&cell_id.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&cell_id.view_id)
.await?;
database_editor database_editor
.update_cell_with_changeset( .update_cell_with_changeset(
&cell_id.view_id, &cell_id.view_id,
@ -628,7 +713,9 @@ pub(crate) async fn get_groups_handler(
) -> DataResult<RepeatedGroupPB, FlowyError> { ) -> DataResult<RepeatedGroupPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: DatabaseViewIdPB = data.into_inner(); let params: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(params.as_ref()).await?; let database_editor = manager
.get_database_editor_with_view_id(params.as_ref())
.await?;
let groups = database_editor.load_groups(params.as_ref()).await?; let groups = database_editor.load_groups(params.as_ref()).await?;
data_result_ok(groups) data_result_ok(groups)
} }
@ -640,7 +727,9 @@ pub(crate) async fn get_group_handler(
) -> DataResult<GroupPB, FlowyError> { ) -> DataResult<GroupPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: DatabaseGroupIdParams = data.into_inner().try_into()?; let params: DatabaseGroupIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let group = database_editor let group = database_editor
.get_group(&params.view_id, &params.group_id) .get_group(&params.view_id, &params.group_id)
.await?; .await?;
@ -654,7 +743,9 @@ pub(crate) async fn set_group_by_field_handler(
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: GroupByFieldParams = data.into_inner().try_into()?; let params: GroupByFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.set_group_by_field(&params.view_id, &params.field_id, params.setting_content) .set_group_by_field(&params.view_id, &params.field_id, params.setting_content)
.await?; .await?;
@ -669,17 +760,11 @@ pub(crate) async fn update_group_handler(
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: UpdateGroupParams = data.into_inner().try_into()?; let params: UpdateGroupParams = data.into_inner().try_into()?;
let view_id = params.view_id.clone(); let view_id = params.view_id.clone();
let database_editor = manager.get_database_with_view_id(&view_id).await?; let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
let group_changeset = GroupChangeset::from(params); let group_changeset = GroupChangeset::from(params);
let (tx, rx) = oneshot::channel(); database_editor
af_spawn(async move {
let result = database_editor
.update_group(&view_id, vec![group_changeset]) .update_group(&view_id, vec![group_changeset])
.await; .await?;
let _ = tx.send(result);
});
let _ = rx.await?;
Ok(()) Ok(())
} }
@ -690,7 +775,9 @@ pub(crate) async fn move_group_handler(
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: MoveGroupParams = data.into_inner().try_into()?; let params: MoveGroupParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.move_group(&params.view_id, &params.from_group_id, &params.to_group_id) .move_group(&params.view_id, &params.from_group_id, &params.to_group_id)
.await?; .await?;
@ -704,7 +791,9 @@ pub(crate) async fn move_group_row_handler(
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: MoveGroupRowParams = data.into_inner().try_into()?; let params: MoveGroupRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.move_group_row( .move_group_row(
&params.view_id, &params.view_id,
@ -724,7 +813,9 @@ pub(crate) async fn create_group_handler(
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: CreateGroupParams = data.into_inner().try_into()?; let params: CreateGroupParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.create_group(&params.view_id, &params.name) .create_group(&params.view_id, &params.name)
.await?; .await?;
@ -738,7 +829,9 @@ pub(crate) async fn delete_group_handler(
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: DeleteGroupParams = data.into_inner().try_into()?; let params: DeleteGroupParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor.delete_group(params).await?; database_editor.delete_group(params).await?;
Ok(()) Ok(())
} }
@ -792,7 +885,7 @@ pub(crate) async fn set_layout_setting_handler(
let changeset = data.into_inner(); let changeset = data.into_inner();
let view_id = changeset.view_id.clone(); let view_id = changeset.view_id.clone();
let params: LayoutSettingChangeset = changeset.try_into()?; let params: LayoutSettingChangeset = changeset.try_into()?;
let database_editor = manager.get_database_with_view_id(&view_id).await?; let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
database_editor.set_layout_setting(&view_id, params).await?; database_editor.set_layout_setting(&view_id, params).await?;
Ok(()) Ok(())
} }
@ -803,7 +896,9 @@ pub(crate) async fn get_layout_setting_handler(
) -> DataResult<DatabaseLayoutSettingPB, FlowyError> { ) -> DataResult<DatabaseLayoutSettingPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: DatabaseLayoutMeta = data.into_inner().try_into()?; let params: DatabaseLayoutMeta = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let layout_setting_pb = database_editor let layout_setting_pb = database_editor
.get_layout_setting(&params.view_id, params.layout) .get_layout_setting(&params.view_id, params.layout)
.await .await
@ -819,7 +914,9 @@ pub(crate) async fn get_calendar_events_handler(
) -> DataResult<RepeatedCalendarEventPB, FlowyError> { ) -> DataResult<RepeatedCalendarEventPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: CalendarEventRequestParams = data.into_inner().try_into()?; let params: CalendarEventRequestParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let events = database_editor let events = database_editor
.get_all_calendar_events(&params.view_id) .get_all_calendar_events(&params.view_id)
.await; .await;
@ -833,7 +930,9 @@ pub(crate) async fn get_no_date_calendar_events_handler(
) -> DataResult<RepeatedNoDateCalendarEventPB, FlowyError> { ) -> DataResult<RepeatedNoDateCalendarEventPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: CalendarEventRequestParams = data.into_inner().try_into()?; let params: CalendarEventRequestParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let _events = database_editor let _events = database_editor
.get_all_no_date_calendar_events(&params.view_id) .get_all_no_date_calendar_events(&params.view_id)
.await; .await;
@ -847,7 +946,9 @@ pub(crate) async fn get_calendar_event_handler(
) -> DataResult<CalendarEventPB, FlowyError> { ) -> DataResult<CalendarEventPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
let event = database_editor let event = database_editor
.get_calendar_event(&params.view_id, params.row_id) .get_calendar_event(&params.view_id, params.row_id)
.await; .await;
@ -869,7 +970,9 @@ pub(crate) async fn move_calendar_event_handler(
date: Some(data.timestamp), date: Some(data.timestamp),
..Default::default() ..Default::default()
}; };
let database_editor = manager.get_database_with_view_id(&cell_id.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&cell_id.view_id)
.await?;
database_editor database_editor
.update_cell_with_changeset( .update_cell_with_changeset(
&cell_id.view_id, &cell_id.view_id,
@ -897,7 +1000,7 @@ pub(crate) async fn export_csv_handler(
) -> DataResult<DatabaseExportDataPB, FlowyError> { ) -> DataResult<DatabaseExportDataPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id = data.into_inner().value; let view_id = data.into_inner().value;
let database = manager.get_database_with_view_id(&view_id).await?; let database = manager.get_database_editor_with_view_id(&view_id).await?;
let data = database.export_csv(CSVFormat::Original).await?; let data = database.export_csv(CSVFormat::Original).await?;
data_result_ok(DatabaseExportDataPB { data_result_ok(DatabaseExportDataPB {
export_type: DatabaseExportDataType::CSV, export_type: DatabaseExportDataType::CSV,
@ -923,7 +1026,7 @@ pub(crate) async fn get_field_settings_handler(
) -> DataResult<RepeatedFieldSettingsPB, FlowyError> { ) -> DataResult<RepeatedFieldSettingsPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let (view_id, field_ids) = data.into_inner().try_into()?; let (view_id, field_ids) = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&view_id).await?; let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
let field_settings = database_editor let field_settings = database_editor
.get_field_settings(&view_id, field_ids.clone()) .get_field_settings(&view_id, field_ids.clone())
@ -944,7 +1047,9 @@ pub(crate) async fn get_all_field_settings_handler(
) -> DataResult<RepeatedFieldSettingsPB, FlowyError> { ) -> DataResult<RepeatedFieldSettingsPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id = data.into_inner(); let view_id = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager
.get_database_editor_with_view_id(view_id.as_ref())
.await?;
let field_settings = database_editor let field_settings = database_editor
.get_all_field_settings(view_id.as_ref()) .get_all_field_settings(view_id.as_ref())
@ -965,7 +1070,9 @@ pub(crate) async fn update_field_settings_handler(
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params = data.try_into_inner()?; let params = data.try_into_inner()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
database_editor database_editor
.update_field_settings_with_changeset(params) .update_field_settings_with_changeset(params)
.await?; .await?;
@ -979,7 +1086,9 @@ pub(crate) async fn get_all_calculations_handler(
) -> DataResult<RepeatedCalculationsPB, FlowyError> { ) -> DataResult<RepeatedCalculationsPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let view_id = data.into_inner(); let view_id = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager
.get_database_editor_with_view_id(view_id.as_ref())
.await?;
let calculations = database_editor.get_all_calculations(view_id.as_ref()).await; let calculations = database_editor.get_all_calculations(view_id.as_ref()).await;
@ -993,7 +1102,9 @@ pub(crate) async fn update_calculation_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: UpdateCalculationChangesetPB = data.into_inner(); let params: UpdateCalculationChangesetPB = data.into_inner();
let editor = manager.get_database_with_view_id(&params.view_id).await?; let editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
editor.update_calculation(params).await?; editor.update_calculation(params).await?;
@ -1007,7 +1118,9 @@ pub(crate) async fn remove_calculation_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: RemoveCalculationChangesetPB = data.into_inner(); let params: RemoveCalculationChangesetPB = data.into_inner();
let editor = manager.get_database_with_view_id(&params.view_id).await?; let editor = manager
.get_database_editor_with_view_id(&params.view_id)
.await?;
editor.remove_calculation(params).await?; editor.remove_calculation(params).await?;
@ -1041,7 +1154,7 @@ pub(crate) async fn update_relation_cell_handler(
removed_row_ids: params.removed_row_ids.into_iter().map(Into::into).collect(), removed_row_ids: params.removed_row_ids.into_iter().map(Into::into).collect(),
}; };
let database_editor = manager.get_database_with_view_id(&view_id).await?; let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
// // get the related database // // get the related database
// let related_database_id = database_editor // let related_database_id = database_editor
@ -1072,7 +1185,7 @@ pub(crate) async fn get_related_row_datas_handler(
) -> DataResult<RepeatedRelatedRowDataPB, FlowyError> { ) -> DataResult<RepeatedRelatedRowDataPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let params: GetRelatedRowDataPB = data.into_inner(); let params: GetRelatedRowDataPB = data.into_inner();
let database_editor = manager.get_database(&params.database_id).await?; let database_editor = manager.get_database_editor(&params.database_id).await?;
let row_datas = database_editor let row_datas = database_editor
.get_related_rows(Some(&params.row_ids)) .get_related_rows(Some(&params.row_ids))
.await?; .await?;
@ -1086,7 +1199,7 @@ pub(crate) async fn get_related_database_rows_handler(
) -> DataResult<RepeatedRelatedRowDataPB, FlowyError> { ) -> DataResult<RepeatedRelatedRowDataPB, FlowyError> {
let manager = upgrade_manager(manager)?; let manager = upgrade_manager(manager)?;
let database_id = data.into_inner().value; let database_id = data.into_inner().value;
let database_editor = manager.get_database(&database_id).await?; let database_editor = manager.get_database_editor(&database_id).await?;
let row_datas = database_editor.get_related_rows(None).await?; let row_datas = database_editor.get_related_rows(None).await?;
data_result_ok(RepeatedRelatedRowDataPB { rows: row_datas }) data_result_ok(RepeatedRelatedRowDataPB { rows: row_datas })

View File

@ -35,6 +35,7 @@ pub fn init(database_manager: Weak<DatabaseManager>) -> AFPlugin {
// Row // Row
.event(DatabaseEvent::CreateRow, create_row_handler) .event(DatabaseEvent::CreateRow, create_row_handler)
.event(DatabaseEvent::GetRow, get_row_handler) .event(DatabaseEvent::GetRow, get_row_handler)
.event(DatabaseEvent::InitRow, init_row_handler)
.event(DatabaseEvent::GetRowMeta, get_row_meta_handler) .event(DatabaseEvent::GetRowMeta, get_row_meta_handler)
.event(DatabaseEvent::UpdateRowMeta, update_row_meta_handler) .event(DatabaseEvent::UpdateRowMeta, update_row_meta_handler)
.event(DatabaseEvent::DeleteRows, delete_rows_handler) .event(DatabaseEvent::DeleteRows, delete_rows_handler)
@ -377,4 +378,7 @@ pub enum DatabaseEvent {
#[event(input = "TranslateRowPB")] #[event(input = "TranslateRowPB")]
TranslateRow = 175, TranslateRow = 175,
#[event(input = "RowIdPB")]
InitRow = 176,
} }

View File

@ -1,22 +1,28 @@
use anyhow::anyhow; use anyhow::anyhow;
use arc_swap::ArcSwapOption;
use async_trait::async_trait;
use std::borrow::BorrowMut;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use collab::core::collab::{DataSource, MutexCollab}; use collab::core::collab::DataSource;
use collab_database::database::{DatabaseData, MutexDatabase}; use collab::preclude::Collab;
use collab_database::database::{Database, DatabaseData};
use collab_database::error::DatabaseError; use collab_database::error::DatabaseError;
use collab_database::rows::RowId; use collab_database::rows::RowId;
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout}; use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
use collab_database::workspace_database::{ use collab_database::workspace_database::{
CollabDocStateByOid, CollabFuture, DatabaseCollabService, DatabaseMeta, WorkspaceDatabase, DatabaseCollabService, DatabaseMeta, EncodeCollabByOid, WorkspaceDatabase,
}; };
use collab_entity::{CollabType, EncodedCollab}; use collab_entity::{CollabType, EncodedCollab};
use collab_plugins::local_storage::kv::KVTransactionDB; use collab_plugins::local_storage::kv::KVTransactionDB;
use tokio::sync::{Mutex, RwLock}; use tokio::sync::{Mutex, RwLock};
use tracing::{event, instrument, trace}; use tracing::{event, instrument, trace};
use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig}; use collab_integrate::collab_builder::{
use collab_integrate::{CollabKVAction, CollabKVDB, CollabPersistenceConfig}; AppFlowyCollabBuilder, CollabBuilderConfig, KVDBCollabPersistenceImpl,
};
use collab_integrate::{CollabKVAction, CollabKVDB};
use flowy_database_pub::cloud::{ use flowy_database_pub::cloud::{
DatabaseAIService, DatabaseCloudService, SummaryRowContent, TranslateItem, TranslateRowContent, DatabaseAIService, DatabaseCloudService, SummaryRowContent, TranslateItem, TranslateRowContent,
}; };
@ -42,7 +48,7 @@ pub trait DatabaseUser: Send + Sync {
pub struct DatabaseManager { pub struct DatabaseManager {
user: Arc<dyn DatabaseUser>, user: Arc<dyn DatabaseUser>,
workspace_database: Arc<RwLock<Option<Arc<WorkspaceDatabase>>>>, workspace_database: ArcSwapOption<RwLock<WorkspaceDatabase>>,
task_scheduler: Arc<RwLock<TaskDispatcher>>, task_scheduler: Arc<RwLock<TaskDispatcher>>,
editors: Mutex<HashMap<String, Arc<DatabaseEditor>>>, editors: Mutex<HashMap<String, Arc<DatabaseEditor>>>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
@ -89,10 +95,10 @@ impl DatabaseManager {
} }
self.editors.lock().await.clear(); self.editors.lock().await.clear();
// 3. Clear the workspace database // 3. Clear the workspace database
if let Some(old_workspace_database) = self.workspace_database.write().await.take() { if let Some(old_workspace_database) = self.workspace_database.swap(None) {
old_workspace_database.close(); let wdb = old_workspace_database.read().await;
wdb.close();
} }
*self.workspace_database.write().await = None;
let collab_db = self.user.collab_db(uid)?; let collab_db = self.user.collab_db(uid)?;
let collab_builder = UserDatabaseCollabServiceImpl { let collab_builder = UserDatabaseCollabServiceImpl {
@ -100,30 +106,27 @@ impl DatabaseManager {
collab_builder: self.collab_builder.clone(), collab_builder: self.collab_builder.clone(),
cloud_service: self.cloud_service.clone(), cloud_service: self.cloud_service.clone(),
}; };
let config = CollabPersistenceConfig::new().snapshot_per_update(100);
let workspace_id = self.user.workspace_id()?; let workspace_id = self.user.workspace_id()?;
let workspace_database_object_id = self.user.workspace_database_object_id()?; let workspace_database_object_id = self.user.workspace_database_object_id()?;
let mut workspace_database_doc_state = DataSource::Disk; let mut workspace_database_doc_state =
KVDBCollabPersistenceImpl::new(collab_db.clone(), uid).into_data_source();
// If the workspace database not exist in disk, try to fetch from remote. // If the workspace database not exist in disk, try to fetch from remote.
if !self.is_collab_exist(uid, &collab_db, &workspace_database_object_id) { if !self.is_collab_exist(uid, &collab_db, &workspace_database_object_id) {
trace!("workspace database not exist, try to fetch from remote"); trace!("workspace database not exist, try to fetch from remote");
match self match self
.cloud_service .cloud_service
.get_database_object_doc_state( .get_database_encode_collab(
&workspace_database_object_id, &workspace_database_object_id,
CollabType::WorkspaceDatabase, CollabType::WorkspaceDatabase,
&workspace_id, &workspace_id,
) )
.await .await
{ {
Ok(doc_state) => match doc_state { Ok(value) => {
Some(doc_state) => { if let Some(encode_collab) = value {
workspace_database_doc_state = DataSource::DocStateV1(doc_state); workspace_database_doc_state = DataSource::from(encode_collab);
}, }
None => {
workspace_database_doc_state = DataSource::Disk;
},
}, },
Err(err) => { Err(err) => {
return Err(FlowyError::record_not_found().with_context(format!( return Err(FlowyError::record_not_found().with_context(format!(
@ -140,20 +143,64 @@ impl DatabaseManager {
"open aggregate database views object: {}", "open aggregate database views object: {}",
&workspace_database_object_id &workspace_database_object_id
); );
let collab = collab_builder.build_collab_with_config(
let workspace_id = self
.user
.workspace_id()
.map_err(|err| DatabaseError::Internal(err.into()))?;
let collab_object = self.collab_builder.collab_object(
&workspace_id,
uid, uid,
&workspace_database_object_id, &workspace_database_object_id,
CollabType::WorkspaceDatabase, CollabType::WorkspaceDatabase,
collab_db.clone(),
workspace_database_doc_state,
config.clone(),
)?; )?;
let workspace_database = let workspace_database = self.collab_builder.create_workspace_database(
WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder); collab_object,
*self.workspace_database.write().await = Some(Arc::new(workspace_database)); workspace_database_doc_state,
collab_db,
CollabBuilderConfig::default().sync_enable(true),
collab_builder,
)?;
self.workspace_database.store(Some(workspace_database));
Ok(()) Ok(())
} }
//FIXME: we need to initialize sync plugin for newly created collabs
#[allow(dead_code)]
fn initialize_plugins<T>(
&self,
uid: i64,
object_id: &str,
collab_type: CollabType,
collab: Arc<RwLock<T>>,
) -> FlowyResult<Arc<RwLock<T>>>
where
T: BorrowMut<Collab> + Send + Sync + 'static,
{
//FIXME: unfortunately UserDatabaseCollabService::build_collab_with_config is broken by
// design as it assumes that we can split collab building process, which we cannot because:
// 1. We should not be able to run plugins ie. SyncPlugin over not-fully initialized collab,
// and that's what originally build_collab_with_config did.
// 2. We cannot fully initialize collab from UserDatabaseCollabService, because
// WorkspaceDatabase itself requires UserDatabaseCollabService as constructor parameter.
// Ideally we should never need to initialize plugins that require collab instance as part of
// that collab construction process itself - it means that we should redesign SyncPlugin to only
// be fired once a collab is fully initialized.
let workspace_id = self
.user
.workspace_id()
.map_err(|err| DatabaseError::Internal(err.into()))?;
let object = self
.collab_builder
.collab_object(&workspace_id, uid, object_id, collab_type)?;
let collab = self.collab_builder.finalize(
object,
CollabBuilderConfig::default().sync_enable(true),
collab,
)?;
Ok(collab)
}
#[instrument( #[instrument(
name = "database_initialize_with_new_user", name = "database_initialize_with_new_user",
level = "debug", level = "debug",
@ -166,19 +213,24 @@ impl DatabaseManager {
} }
pub async fn get_database_inline_view_id(&self, database_id: &str) -> FlowyResult<String> { pub async fn get_database_inline_view_id(&self, database_id: &str) -> FlowyResult<String> {
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let database_collab = wdb.get_database(database_id).await.ok_or_else(|| { let wdb = lock.read().await;
FlowyError::record_not_found().with_context(format!("The database:{} not found", database_id)) let database_collab = wdb
.get_or_create_database(database_id)
.await
.ok_or_else(|| {
FlowyError::record_not_found()
.with_context(format!("The database:{} not found", database_id))
})?; })?;
let lock_guard = database_collab.lock(); let lock_guard = database_collab.read().await;
Ok(lock_guard.get_inline_view_id()) Ok(lock_guard.get_inline_view_id())
} }
pub async fn get_all_databases_meta(&self) -> Vec<DatabaseMeta> { pub async fn get_all_databases_meta(&self) -> Vec<DatabaseMeta> {
let mut items = vec![]; let mut items = vec![];
if let Ok(wdb) = self.get_database_indexer().await { if let Some(lock) = self.workspace_database.load_full() {
let wdb = lock.read().await;
items = wdb.get_all_database_meta() items = wdb.get_all_database_meta()
} }
items items
@ -188,7 +240,8 @@ impl DatabaseManager {
&self, &self,
view_ids_by_database_id: HashMap<String, Vec<String>>, view_ids_by_database_id: HashMap<String, Vec<String>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let mut wdb = lock.write().await;
view_ids_by_database_id view_ids_by_database_id
.into_iter() .into_iter()
.for_each(|(database_id, view_ids)| { .for_each(|(database_id, view_ids)| {
@ -197,13 +250,9 @@ impl DatabaseManager {
Ok(()) Ok(())
} }
pub async fn get_database_with_view_id(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
let database_id = self.get_database_id_with_view_id(view_id).await?;
self.get_database(&database_id).await
}
pub async fn get_database_id_with_view_id(&self, view_id: &str) -> FlowyResult<String> { pub async fn get_database_id_with_view_id(&self, view_id: &str) -> FlowyResult<String> {
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let wdb = lock.read().await;
wdb.get_database_id_with_view_id(view_id).ok_or_else(|| { wdb.get_database_id_with_view_id(view_id).ok_or_else(|| {
FlowyError::record_not_found() FlowyError::record_not_found()
.with_context(format!("The database for view id: {} not found", view_id)) .with_context(format!("The database for view id: {} not found", view_id))
@ -211,28 +260,44 @@ impl DatabaseManager {
} }
pub async fn get_database_row_ids_with_view_id(&self, view_id: &str) -> FlowyResult<Vec<RowId>> { pub async fn get_database_row_ids_with_view_id(&self, view_id: &str) -> FlowyResult<Vec<RowId>> {
let database = self.get_database_with_view_id(view_id).await?; let database = self.get_database_editor_with_view_id(view_id).await?;
Ok(database.get_row_ids()) Ok(database.get_row_ids().await)
} }
pub async fn get_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> { pub async fn get_database_editor_with_view_id(
&self,
view_id: &str,
) -> FlowyResult<Arc<DatabaseEditor>> {
let database_id = self.get_database_id_with_view_id(view_id).await?;
self.get_database_editor(&database_id).await
}
pub async fn get_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
if let Some(editor) = self.editors.lock().await.get(database_id).cloned() { if let Some(editor) = self.editors.lock().await.get(database_id).cloned() {
return Ok(editor); return Ok(editor);
} }
// TODO(nathan): refactor the get_database that split the database creation and database opening.
self.open_database(database_id).await self.open_database(database_id).await
} }
pub async fn open_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> { pub async fn open_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
trace!("open database editor:{}", database_id); trace!("open database editor:{}", database_id);
let database = self let lock = self.workspace_database()?;
.get_database_indexer() let database = lock
.await? .read()
.get_database(database_id) .await
.get_or_create_database(database_id)
.await .await
.ok_or_else(|| FlowyError::collab_not_sync().with_context("open database error"))?; .ok_or_else(|| FlowyError::collab_not_sync().with_context("open database error"))?;
let editor = Arc::new(DatabaseEditor::new(database, self.task_scheduler.clone()).await?); let editor = Arc::new(
DatabaseEditor::new(
self.user.clone(),
database,
self.task_scheduler.clone(),
self.collab_builder.clone(),
)
.await?,
);
self self
.editors .editors
.lock() .lock()
@ -241,17 +306,14 @@ impl DatabaseManager {
Ok(editor) Ok(editor)
} }
/// Open the database view
pub async fn open_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> { pub async fn open_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
let view_id = view_id.as_ref(); let view_id = view_id.as_ref();
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
if let Some(database_id) = wdb.get_database_id_with_view_id(view_id) { let workspace_database = lock.read().await;
if let Some(database) = wdb.open_database(&database_id) { if let Some(database_id) = workspace_database.get_database_id_with_view_id(view_id) {
if let Some(lock_database) = database.try_lock() { if self.editors.lock().await.get(&database_id).is_none() {
if let Some(lock_collab) = lock_database.get_collab().try_lock() { self.open_database(&database_id).await?;
trace!("{} database start init sync", view_id);
lock_collab.start_init_sync();
}
}
} }
} }
Ok(()) Ok(())
@ -259,20 +321,23 @@ impl DatabaseManager {
pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> { pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
let view_id = view_id.as_ref(); let view_id = view_id.as_ref();
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let database_id = wdb.get_database_id_with_view_id(view_id); let workspace_database = lock.read().await;
let database_id = workspace_database.get_database_id_with_view_id(view_id);
if let Some(database_id) = database_id { if let Some(database_id) = database_id {
let mut editors = self.editors.lock().await; let mut editors = self.editors.lock().await;
let mut should_remove = false; let mut should_remove = false;
if let Some(editor) = editors.get(&database_id) { if let Some(editor) = editors.get(&database_id) {
editor.close_view(view_id).await; editor.close_view(view_id).await;
should_remove = editor.num_views().await == 0; // when there is no opening views, mark the database to be removed.
should_remove = editor.num_of_opening_views().await == 0;
} }
if should_remove { if should_remove {
trace!("remove database editor:{}", database_id); trace!("remove database editor:{}", database_id);
editors.remove(&database_id); editors.remove(&database_id);
wdb.close_database(&database_id); workspace_database.close_database(&database_id);
} }
} }
@ -280,13 +345,14 @@ impl DatabaseManager {
} }
pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult<()> { pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult<()> {
let database = self.get_database_with_view_id(view_id).await?; let database = self.get_database_editor_with_view_id(view_id).await?;
let _ = database.delete_database_view(view_id).await?; let _ = database.delete_database_view(view_id).await?;
Ok(()) Ok(())
} }
pub async fn duplicate_database(&self, view_id: &str) -> FlowyResult<Vec<u8>> { pub async fn duplicate_database(&self, view_id: &str) -> FlowyResult<Vec<u8>> {
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let wdb = lock.read().await;
let data = wdb.get_database_data(view_id).await?; let data = wdb.get_database_data(view_id).await?;
let json_bytes = data.to_json_bytes()?; let json_bytes = data.to_json_bytes()?;
Ok(json_bytes) Ok(json_bytes)
@ -313,12 +379,12 @@ impl DatabaseManager {
create_view_params.view_id = view_id.to_string(); create_view_params.view_id = view_id.to_string();
} }
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let mut wdb = lock.write().await;
let database = wdb.create_database(create_database_params)?; let database = wdb.create_database(create_database_params)?;
let encoded_collab = database let encoded_collab = database
.lock() .read()
.get_collab() .await
.lock()
.encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?; .encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?;
Ok(encoded_collab) Ok(encoded_collab)
} }
@ -326,9 +392,11 @@ impl DatabaseManager {
pub async fn create_database_with_params( pub async fn create_database_with_params(
&self, &self,
params: CreateDatabaseParams, params: CreateDatabaseParams,
) -> FlowyResult<Arc<MutexDatabase>> { ) -> FlowyResult<Arc<RwLock<Database>>> {
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let mut wdb = lock.write().await;
let database = wdb.create_database(params)?; let database = wdb.create_database(params)?;
Ok(database) Ok(database)
} }
@ -342,12 +410,14 @@ impl DatabaseManager {
database_view_id: String, database_view_id: String,
database_parent_view_id: String, database_parent_view_id: String,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let wdb = self.get_database_indexer().await?; let lock = self.workspace_database()?;
let mut wdb = lock.write().await;
let mut params = CreateViewParams::new(database_id.clone(), database_view_id, name, layout); let mut params = CreateViewParams::new(database_id.clone(), database_view_id, name, layout);
if let Some(database) = wdb.get_database(&database_id).await { if let Some(database) = wdb.get_or_create_database(&database_id).await {
let (field, layout_setting, field_settings_map) = let (field, layout_setting, field_settings_map) =
DatabaseLayoutDepsResolver::new(database, layout) DatabaseLayoutDepsResolver::new(database, layout)
.resolve_deps_when_create_database_linked_view(&database_parent_view_id); .resolve_deps_when_create_database_linked_view(&database_parent_view_id)
.await;
if let Some(field) = field { if let Some(field) = field {
params = params.with_deps_fields(vec![field], vec![default_field_settings_by_layout_map()]); params = params.with_deps_fields(vec![field], vec![default_field_settings_by_layout_map()]);
} }
@ -374,18 +444,12 @@ impl DatabaseManager {
.await .await
.map_err(internal_error)??; .map_err(internal_error)??;
// Currently, we only support importing up to 500 rows. We can support more rows in the future.
if !cfg!(debug_assertions) && params.rows.len() > 500 {
return Err(FlowyError::internal().with_context("The number of rows exceeds the limit"));
}
let view_id = params.inline_view_id.clone(); let view_id = params.inline_view_id.clone();
let database_id = params.database_id.clone(); let database_id = params.database_id.clone();
let database = self.create_database_with_params(params).await?; let database = self.create_database_with_params(params).await?;
let encoded_collab = database let encoded_collab = database
.lock() .read()
.get_collab() .await
.lock()
.encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?; .encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?;
let result = ImportResult { let result = ImportResult {
database_id, database_id,
@ -405,7 +469,7 @@ impl DatabaseManager {
} }
pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult<String> { pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult<String> {
let database = self.get_database_with_view_id(view_id).await?; let database = self.get_database_editor_with_view_id(view_id).await?;
database.export_csv(style).await database.export_csv(style).await
} }
@ -414,7 +478,7 @@ impl DatabaseManager {
view_id: &str, view_id: &str,
layout: DatabaseLayoutPB, layout: DatabaseLayoutPB,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let database = self.get_database_with_view_id(view_id).await?; let database = self.get_database_editor_with_view_id(view_id).await?;
database.update_view_layout(view_id, layout.into()).await database.update_view_layout(view_id, layout.into()).await
} }
@ -440,14 +504,11 @@ impl DatabaseManager {
Ok(snapshots) Ok(snapshots)
} }
/// Return the database indexer. fn workspace_database(&self) -> FlowyResult<Arc<RwLock<WorkspaceDatabase>>> {
/// Each workspace has itw own Database indexer that manages all the databases and database views self
async fn get_database_indexer(&self) -> FlowyResult<Arc<WorkspaceDatabase>> { .workspace_database
let database = self.workspace_database.read().await; .load_full()
match &*database { .ok_or_else(|| FlowyError::internal().with_context("Workspace database not initialized"))
None => Err(FlowyError::internal().with_context("Workspace database not initialized")),
Some(user_database) => Ok(user_database.clone()),
}
} }
#[instrument(level = "debug", skip_all)] #[instrument(level = "debug", skip_all)]
@ -457,10 +518,10 @@ impl DatabaseManager {
row_id: RowId, row_id: RowId,
field_id: String, field_id: String,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let database = self.get_database_with_view_id(&view_id).await?; let database = self.get_database_editor_with_view_id(&view_id).await?;
let mut summary_row_content = SummaryRowContent::new(); let mut summary_row_content = SummaryRowContent::new();
if let Some(row) = database.get_row(&view_id, &row_id) { if let Some(row) = database.get_row(&view_id, &row_id).await {
let fields = database.get_fields(&view_id, None); let fields = database.get_fields(&view_id, None).await;
for field in fields { for field in fields {
// When summarizing a row, skip the content in the "AI summary" cell; it does not need to // When summarizing a row, skip the content in the "AI summary" cell; it does not need to
// be summarized. // be summarized.
@ -501,12 +562,12 @@ impl DatabaseManager {
row_id: RowId, row_id: RowId,
field_id: String, field_id: String,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let database = self.get_database_with_view_id(&view_id).await?; let database = self.get_database_editor_with_view_id(&view_id).await?;
let mut translate_row_content = TranslateRowContent::new(); let mut translate_row_content = TranslateRowContent::new();
let mut language = "english".to_string(); let mut language = "english".to_string();
if let Some(row) = database.get_row(&view_id, &row_id) { if let Some(row) = database.get_row(&view_id, &row_id).await {
let fields = database.get_fields(&view_id, None); let fields = database.get_fields(&view_id, None).await;
for field in fields { for field in fields {
// When translate a row, skip the content in the "AI Translate" cell; it does not need to // When translate a row, skip the content in the "AI Translate" cell; it does not need to
// be translated. // be translated.
@ -582,79 +643,73 @@ struct UserDatabaseCollabServiceImpl {
cloud_service: Arc<dyn DatabaseCloudService>, cloud_service: Arc<dyn DatabaseCloudService>,
} }
#[async_trait]
impl DatabaseCollabService for UserDatabaseCollabServiceImpl { impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
fn get_collab_doc_state( async fn get_encode_collab(
&self, &self,
object_id: &str, object_id: &str,
object_ty: CollabType, object_ty: CollabType,
) -> CollabFuture<Result<DataSource, DatabaseError>> { ) -> Result<Option<EncodedCollab>, DatabaseError> {
let workspace_id = self.user.workspace_id().unwrap(); let workspace_id = self.user.workspace_id().unwrap();
let object_id = object_id.to_string(); let object_id = object_id.to_string();
let weak_cloud_service = Arc::downgrade(&self.cloud_service); let weak_cloud_service = Arc::downgrade(&self.cloud_service);
Box::pin(async move {
match weak_cloud_service.upgrade() { match weak_cloud_service.upgrade() {
None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))), None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))),
Some(cloud_service) => { Some(cloud_service) => {
let doc_state = cloud_service let encode_collab = cloud_service
.get_database_object_doc_state(&object_id, object_ty, &workspace_id) .get_database_encode_collab(&object_id, object_ty, &workspace_id)
.await?; .await?;
match doc_state { Ok(encode_collab)
None => Ok(DataSource::Disk),
Some(doc_state) => Ok(DataSource::DocStateV1(doc_state)),
}
}, },
} }
})
} }
fn batch_get_collab_update( async fn batch_get_encode_collab(
&self, &self,
object_ids: Vec<String>, object_ids: Vec<String>,
object_ty: CollabType, object_ty: CollabType,
) -> CollabFuture<Result<CollabDocStateByOid, DatabaseError>> { ) -> Result<EncodeCollabByOid, DatabaseError> {
let cloned_user = self.user.clone(); let cloned_user = self.user.clone();
let weak_cloud_service = Arc::downgrade(&self.cloud_service); let weak_cloud_service = Arc::downgrade(&self.cloud_service);
Box::pin(async move {
let workspace_id = cloned_user let workspace_id = cloned_user
.workspace_id() .workspace_id()
.map_err(|err| DatabaseError::Internal(err.into()))?; .map_err(|err| DatabaseError::Internal(err.into()))?;
match weak_cloud_service.upgrade() { match weak_cloud_service.upgrade() {
None => { None => {
tracing::warn!("Cloud service is dropped"); tracing::warn!("Cloud service is dropped");
Ok(CollabDocStateByOid::default()) Ok(EncodeCollabByOid::default())
}, },
Some(cloud_service) => { Some(cloud_service) => {
let updates = cloud_service let updates = cloud_service
.batch_get_database_object_doc_state(object_ids, object_ty, &workspace_id) .batch_get_database_encode_collab(object_ids, object_ty, &workspace_id)
.await?; .await?;
Ok(updates) Ok(updates)
}, },
} }
})
} }
fn build_collab_with_config( ///NOTE: this method doesn't initialize plugins, however it is passed into WorkspaceDatabase,
/// therefore all Database/DatabaseRow creation methods must initialize plugins thmselves.
fn build_collab(
&self, &self,
uid: i64, uid: i64,
object_id: &str, object_id: &str,
object_type: CollabType, object_type: CollabType,
collab_db: Weak<CollabKVDB>, collab_db: Weak<CollabKVDB>,
collab_raw_data: DataSource, data_source: DataSource,
_persistence_config: CollabPersistenceConfig, ) -> Result<Collab, DatabaseError> {
) -> Result<Arc<MutexCollab>, DatabaseError> {
let workspace_id = self let workspace_id = self
.user .user
.workspace_id() .workspace_id()
.map_err(|err| DatabaseError::Internal(err.into()))?; .map_err(|err| DatabaseError::Internal(err.into()))?;
let collab = self.collab_builder.build_with_config( let object = self
&workspace_id, .collab_builder
uid, .collab_object(&workspace_id, uid, object_id, object_type)?;
object_id, let collab = self
object_type.clone(), .collab_builder
collab_db.clone(), .build_collab(&object, &collab_db, data_source)?;
collab_raw_data,
CollabBuilderConfig::default().sync_enable(true),
)?;
Ok(collab) Ok(collab)
} }
} }

View File

@ -1,6 +1,5 @@
use parking_lot::RwLock;
use std::sync::Arc; use std::sync::Arc;
use crate::utils::cache::AnyTypeCache; use crate::utils::cache::AnyTypeCache;
pub type CalculationsByFieldIdCache = Arc<RwLock<AnyTypeCache<String>>>; pub type CalculationsByFieldIdCache = Arc<AnyTypeCache<String>>;

View File

@ -1,3 +1,4 @@
use async_trait::async_trait;
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
@ -7,7 +8,6 @@ use flowy_error::FlowyResult;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use lib_infra::future::Fut;
use lib_infra::priority_task::{QualityOfService, Task, TaskContent, TaskDispatcher}; use lib_infra::priority_task::{QualityOfService, Task, TaskContent, TaskDispatcher};
use crate::entities::{ use crate::entities::{
@ -19,13 +19,14 @@ use crate::utils::cache::AnyTypeCache;
use super::{Calculation, CalculationChangeset, CalculationsService}; use super::{Calculation, CalculationChangeset, CalculationsService};
#[async_trait]
pub trait CalculationsDelegate: Send + Sync + 'static { pub trait CalculationsDelegate: Send + Sync + 'static {
fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut<Vec<Arc<RowCell>>>; async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec<Arc<RowCell>>;
fn get_field(&self, field_id: &str) -> Option<Field>; async fn get_field(&self, field_id: &str) -> Option<Field>;
fn get_calculation(&self, view_id: &str, field_id: &str) -> Fut<Option<Arc<Calculation>>>; async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Arc<Calculation>>;
fn get_all_calculations(&self, view_id: &str) -> Fut<Arc<Vec<Arc<Calculation>>>>; async fn get_all_calculations(&self, view_id: &str) -> Arc<Vec<Arc<Calculation>>>;
fn update_calculation(&self, view_id: &str, calculation: Calculation); async fn update_calculation(&self, view_id: &str, calculation: Calculation);
fn remove_calculation(&self, view_id: &str, calculation_id: &str); async fn remove_calculation(&self, view_id: &str, calculation_id: &str);
} }
pub struct CalculationsController { pub struct CalculationsController {
@ -45,7 +46,7 @@ impl Drop for CalculationsController {
} }
impl CalculationsController { impl CalculationsController {
pub async fn new<T>( pub fn new<T>(
view_id: &str, view_id: &str,
handler_id: &str, handler_id: &str,
delegate: T, delegate: T,
@ -65,7 +66,7 @@ impl CalculationsController {
calculations_service: CalculationsService::new(), calculations_service: CalculationsService::new(),
notifier, notifier,
}; };
this.update_cache(calculations).await; this.update_cache(calculations);
this this
} }
@ -130,7 +131,8 @@ impl CalculationsController {
if let Some(calculation) = calculation { if let Some(calculation) = calculation {
self self
.delegate .delegate
.remove_calculation(&self.view_id, &calculation.id); .remove_calculation(&self.view_id, &calculation.id)
.await;
let notification = CalculationChangesetNotificationPB::from_delete( let notification = CalculationChangesetNotificationPB::from_delete(
&self.view_id, &self.view_id,
@ -165,7 +167,8 @@ impl CalculationsController {
if !calc_type.is_allowed(new_field_type) { if !calc_type.is_allowed(new_field_type) {
self self
.delegate .delegate
.remove_calculation(&self.view_id, &calculation.id); .remove_calculation(&self.view_id, &calculation.id)
.await;
let notification = CalculationChangesetNotificationPB::from_delete( let notification = CalculationChangesetNotificationPB::from_delete(
&self.view_id, &self.view_id,
@ -201,7 +204,8 @@ impl CalculationsController {
if let Some(update) = update { if let Some(update) = update {
self self
.delegate .delegate
.update_calculation(&self.view_id, update.clone()); .update_calculation(&self.view_id, update.clone())
.await;
let notification = CalculationChangesetNotificationPB::from_update( let notification = CalculationChangesetNotificationPB::from_update(
&self.view_id, &self.view_id,
@ -238,7 +242,10 @@ impl CalculationsController {
let update = self.get_updated_calculation(calculation.clone()).await; let update = self.get_updated_calculation(calculation.clone()).await;
if let Some(update) = update { if let Some(update) = update {
updates.push(CalculationPB::from(&update)); updates.push(CalculationPB::from(&update));
self.delegate.update_calculation(&self.view_id, update); self
.delegate
.update_calculation(&self.view_id, update)
.await;
} }
} }
} }
@ -252,7 +259,10 @@ impl CalculationsController {
if let Some(update) = update { if let Some(update) = update {
updates.push(CalculationPB::from(&update)); updates.push(CalculationPB::from(&update));
self.delegate.update_calculation(&self.view_id, update); self
.delegate
.update_calculation(&self.view_id, update)
.await;
} }
} }
} }
@ -273,7 +283,7 @@ impl CalculationsController {
.delegate .delegate
.get_cells_for_field(&self.view_id, &calculation.field_id) .get_cells_for_field(&self.view_id, &calculation.field_id)
.await; .await;
let field = self.delegate.get_field(&calculation.field_id)?; let field = self.delegate.get_field(&calculation.field_id).await?;
let value = let value =
self self
@ -299,7 +309,7 @@ impl CalculationsController {
.get_cells_for_field(&self.view_id, &insert.field_id) .get_cells_for_field(&self.view_id, &insert.field_id)
.await; .await;
let field = self.delegate.get_field(&insert.field_id)?; let field = self.delegate.get_field(&insert.field_id).await?;
let value = self let value = self
.calculations_service .calculations_service
@ -331,12 +341,11 @@ impl CalculationsController {
notification notification
} }
async fn update_cache(&self, calculations: Vec<Arc<Calculation>>) { fn update_cache(&self, calculations: Vec<Arc<Calculation>>) {
for calculation in calculations { for calculation in calculations {
let field_id = &calculation.field_id; let field_id = &calculation.field_id;
self self
.calculations_by_field_cache .calculations_by_field_cache
.write()
.insert(field_id, calculation.clone()); .insert(field_id, calculation.clone());
} }
} }

View File

@ -1,14 +1,17 @@
use anyhow::bail; use collab::preclude::encoding::serde::from_any;
use collab::core::any_map::AnyMapExtension; use collab::preclude::Any;
use collab_database::views::{CalculationMap, CalculationMapBuilder}; use collab_database::views::{CalculationMap, CalculationMapBuilder};
use serde::Deserialize;
use crate::entities::CalculationPB; use crate::entities::CalculationPB;
#[derive(Debug, Clone)] #[derive(Debug, Clone, Deserialize)]
pub struct Calculation { pub struct Calculation {
pub id: String, pub id: String,
pub field_id: String, pub field_id: String,
#[serde(default, rename = "ty")]
pub calculation_type: i64, pub calculation_type: i64,
#[serde(default, rename = "calculation_value")]
pub value: String, pub value: String,
} }
@ -19,12 +22,12 @@ const CALCULATION_VALUE: &str = "calculation_value";
impl From<Calculation> for CalculationMap { impl From<Calculation> for CalculationMap {
fn from(data: Calculation) -> Self { fn from(data: Calculation) -> Self {
CalculationMapBuilder::new() CalculationMapBuilder::from([
.insert_str_value(CALCULATION_ID, data.id) (CALCULATION_ID.into(), data.id.into()),
.insert_str_value(FIELD_ID, data.field_id) (FIELD_ID.into(), data.field_id.into()),
.insert_i64_value(CALCULATION_TYPE, data.calculation_type) (CALCULATION_TYPE.into(), data.calculation_type.into()),
.insert_str_value(CALCULATION_VALUE, data.value) (CALCULATION_VALUE.into(), data.value.into()),
.build() ])
} }
} }
@ -45,29 +48,7 @@ impl TryFrom<CalculationMap> for Calculation {
type Error = anyhow::Error; type Error = anyhow::Error;
fn try_from(calculation: CalculationMap) -> Result<Self, Self::Error> { fn try_from(calculation: CalculationMap) -> Result<Self, Self::Error> {
match ( from_any(&Any::from(calculation)).map_err(|e| e.into())
calculation.get_str_value(CALCULATION_ID),
calculation.get_str_value(FIELD_ID),
) {
(Some(id), Some(field_id)) => {
let value = calculation
.get_str_value(CALCULATION_VALUE)
.unwrap_or_default();
let calculation_type = calculation
.get_i64_value(CALCULATION_TYPE)
.unwrap_or_default();
Ok(Calculation {
id,
field_id,
calculation_type,
value,
})
},
_ => {
bail!("Invalid calculation data")
},
}
} }
} }

View File

@ -1,6 +1,5 @@
use parking_lot::RwLock;
use std::sync::Arc; use std::sync::Arc;
use crate::utils::cache::AnyTypeCache; use crate::utils::cache::AnyTypeCache;
pub type CellCache = Arc<RwLock<AnyTypeCache<u64>>>; pub type CellCache = Arc<AnyTypeCache<u64>>;

View File

@ -2,7 +2,7 @@ use crate::entities::{DatabaseSyncStatePB, DidFetchRowPB, RowsChangePB};
use crate::notification::{send_notification, DatabaseNotification, DATABASE_OBSERVABLE_SOURCE}; use crate::notification::{send_notification, DatabaseNotification, DATABASE_OBSERVABLE_SOURCE};
use crate::services::database::UpdatedRow; use crate::services::database::UpdatedRow;
use collab_database::blocks::BlockEvent; use collab_database::blocks::BlockEvent;
use collab_database::database::MutexDatabase; use collab_database::database::Database;
use collab_database::fields::FieldChange; use collab_database::fields::FieldChange;
use collab_database::rows::{RowChange, RowId}; use collab_database::rows::{RowChange, RowId};
use collab_database::views::DatabaseViewChange; use collab_database::views::DatabaseViewChange;
@ -10,11 +10,12 @@ use flowy_notification::{DebounceNotificationSender, NotificationBuilder};
use futures::StreamExt; use futures::StreamExt;
use lib_dispatch::prelude::af_spawn; use lib_dispatch::prelude::af_spawn;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::RwLock;
use tracing::{trace, warn}; use tracing::{trace, warn};
pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc<MutexDatabase>) { pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc<RwLock<Database>>) {
let weak_database = Arc::downgrade(database); let weak_database = Arc::downgrade(database);
let mut sync_state = database.lock().subscribe_sync_state(); let mut sync_state = database.read().await.subscribe_sync_state();
let database_id = database_id.to_string(); let database_id = database_id.to_string();
af_spawn(async move { af_spawn(async move {
while let Some(sync_state) = sync_state.next().await { while let Some(sync_state) = sync_state.next().await {
@ -35,13 +36,13 @@ pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc<MutexDa
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) async fn observe_rows_change( pub(crate) async fn observe_rows_change(
database_id: &str, database_id: &str,
database: &Arc<MutexDatabase>, database: &Arc<RwLock<Database>>,
notification_sender: &Arc<DebounceNotificationSender>, notification_sender: &Arc<DebounceNotificationSender>,
) { ) {
let notification_sender = notification_sender.clone(); let notification_sender = notification_sender.clone();
let database_id = database_id.to_string(); let database_id = database_id.to_string();
let weak_database = Arc::downgrade(database); let weak_database = Arc::downgrade(database);
let mut row_change = database.lock().subscribe_row_change(); let mut row_change = database.read().await.subscribe_row_change();
af_spawn(async move { af_spawn(async move {
while let Ok(row_change) = row_change.recv().await { while let Ok(row_change) = row_change.recv().await {
if let Some(database) = weak_database.upgrade() { if let Some(database) = weak_database.upgrade() {
@ -59,7 +60,7 @@ pub(crate) async fn observe_rows_change(
let cell_id = format!("{}:{}", row_id, field_id); let cell_id = format!("{}:{}", row_id, field_id);
notify_cell(&notification_sender, &cell_id); notify_cell(&notification_sender, &cell_id);
let views = database.lock().get_all_database_views_meta(); let views = database.read().await.get_all_database_views_meta();
for view in views { for view in views {
notify_row(&notification_sender, &view.id, &field_id, &row_id); notify_row(&notification_sender, &view.id, &field_id, &row_id);
} }
@ -75,10 +76,10 @@ pub(crate) async fn observe_rows_change(
}); });
} }
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) async fn observe_field_change(database_id: &str, database: &Arc<MutexDatabase>) { pub(crate) async fn observe_field_change(database_id: &str, database: &Arc<RwLock<Database>>) {
let database_id = database_id.to_string(); let database_id = database_id.to_string();
let weak_database = Arc::downgrade(database); let weak_database = Arc::downgrade(database);
let mut field_change = database.lock().subscribe_field_change(); let mut field_change = database.read().await.subscribe_field_change();
af_spawn(async move { af_spawn(async move {
while let Ok(field_change) = field_change.recv().await { while let Ok(field_change) = field_change.recv().await {
if weak_database.upgrade().is_none() { if weak_database.upgrade().is_none() {
@ -100,10 +101,10 @@ pub(crate) async fn observe_field_change(database_id: &str, database: &Arc<Mutex
} }
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) async fn observe_view_change(database_id: &str, database: &Arc<MutexDatabase>) { pub(crate) async fn observe_view_change(database_id: &str, database: &Arc<RwLock<Database>>) {
let database_id = database_id.to_string(); let database_id = database_id.to_string();
let weak_database = Arc::downgrade(database); let weak_database = Arc::downgrade(database);
let mut view_change = database.lock().subscribe_view_change(); let mut view_change = database.read().await.subscribe_view_change();
af_spawn(async move { af_spawn(async move {
while let Ok(view_change) = view_change.recv().await { while let Ok(view_change) = view_change.recv().await {
if weak_database.upgrade().is_none() { if weak_database.upgrade().is_none() {
@ -136,10 +137,10 @@ pub(crate) async fn observe_view_change(database_id: &str, database: &Arc<MutexD
} }
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) async fn observe_block_event(database_id: &str, database: &Arc<MutexDatabase>) { pub(crate) async fn observe_block_event(database_id: &str, database: &Arc<RwLock<Database>>) {
let database_id = database_id.to_string(); let database_id = database_id.to_string();
let weak_database = Arc::downgrade(database); let weak_database = Arc::downgrade(database);
let mut block_event_rx = database.lock().subscribe_block_event(); let mut block_event_rx = database.read().await.subscribe_block_event();
af_spawn(async move { af_spawn(async move {
while let Ok(event) = block_event_rx.recv().await { while let Ok(event) = block_event_rx.recv().await {
if weak_database.upgrade().is_none() { if weak_database.upgrade().is_none() {

View File

@ -1,9 +1,10 @@
use collab_database::database::{gen_field_id, MutexDatabase}; use collab_database::database::{gen_field_id, Database};
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::views::{ use collab_database::views::{
DatabaseLayout, FieldSettingsByFieldIdMap, LayoutSetting, OrderObjectPosition, DatabaseLayout, FieldSettingsByFieldIdMap, LayoutSetting, OrderObjectPosition,
}; };
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::RwLock;
use crate::entities::FieldType; use crate::entities::FieldType;
use crate::services::field::{DateTypeOption, SingleSelectTypeOption}; use crate::services::field::{DateTypeOption, SingleSelectTypeOption};
@ -15,20 +16,20 @@ use crate::services::setting::{BoardLayoutSetting, CalendarLayoutSetting};
/// view depends on a field that can be used to group rows while a calendar view /// view depends on a field that can be used to group rows while a calendar view
/// depends on a date field. /// depends on a date field.
pub struct DatabaseLayoutDepsResolver { pub struct DatabaseLayoutDepsResolver {
pub database: Arc<MutexDatabase>, pub database: Arc<RwLock<Database>>,
/// The new database layout. /// The new database layout.
pub database_layout: DatabaseLayout, pub database_layout: DatabaseLayout,
} }
impl DatabaseLayoutDepsResolver { impl DatabaseLayoutDepsResolver {
pub fn new(database: Arc<MutexDatabase>, database_layout: DatabaseLayout) -> Self { pub fn new(database: Arc<RwLock<Database>>, database_layout: DatabaseLayout) -> Self {
Self { Self {
database, database,
database_layout, database_layout,
} }
} }
pub fn resolve_deps_when_create_database_linked_view( pub async fn resolve_deps_when_create_database_linked_view(
&self, &self,
view_id: &str, view_id: &str,
) -> ( ) -> (
@ -41,9 +42,8 @@ impl DatabaseLayoutDepsResolver {
DatabaseLayout::Board => { DatabaseLayout::Board => {
let layout_settings = BoardLayoutSetting::new().into(); let layout_settings = BoardLayoutSetting::new().into();
let field = if !self let database = self.database.read().await;
.database let field = if !database
.lock()
.get_fields(None) .get_fields(None)
.into_iter() .into_iter()
.any(|field| FieldType::from(field.field_type).can_be_group()) .any(|field| FieldType::from(field.field_type).can_be_group())
@ -53,7 +53,7 @@ impl DatabaseLayoutDepsResolver {
None None
}; };
let field_settings_map = self.database.lock().get_field_settings(view_id, None); let field_settings_map = database.get_field_settings(view_id, None);
tracing::info!( tracing::info!(
"resolve_deps_when_create_database_linked_view {:?}", "resolve_deps_when_create_database_linked_view {:?}",
field_settings_map field_settings_map
@ -68,7 +68,8 @@ impl DatabaseLayoutDepsResolver {
DatabaseLayout::Calendar => { DatabaseLayout::Calendar => {
match self match self
.database .database
.lock() .read()
.await
.get_fields(None) .get_fields(None)
.into_iter() .into_iter()
.find(|field| FieldType::from(field.field_type) == FieldType::DateTime) .find(|field| FieldType::from(field.field_type) == FieldType::DateTime)
@ -89,13 +90,20 @@ impl DatabaseLayoutDepsResolver {
/// If the new layout type is a calendar and there is not date field in the database, it will add /// If the new layout type is a calendar and there is not date field in the database, it will add
/// a new date field to the database and create the corresponding layout setting. /// a new date field to the database and create the corresponding layout setting.
pub fn resolve_deps_when_update_layout_type(&self, view_id: &str) { pub async fn resolve_deps_when_update_layout_type(&self, view_id: &str) {
let fields = self.database.lock().get_fields(None); let mut database = self.database.write().await;
let fields = database.get_fields(None);
// Insert the layout setting if it's not exist // Insert the layout setting if it's not exist
match &self.database_layout { match &self.database_layout {
DatabaseLayout::Grid => {}, DatabaseLayout::Grid => {},
DatabaseLayout::Board => { DatabaseLayout::Board => {
self.create_board_layout_setting_if_need(view_id); if database
.get_layout_setting::<BoardLayoutSetting>(view_id, &self.database_layout)
.is_none()
{
let layout_setting = BoardLayoutSetting::new();
database.insert_layout_setting(view_id, &self.database_layout, layout_setting);
}
}, },
DatabaseLayout::Calendar => { DatabaseLayout::Calendar => {
let date_field_id = match fields let date_field_id = match fields
@ -106,7 +114,7 @@ impl DatabaseLayoutDepsResolver {
tracing::trace!("Create a new date field after layout type change"); tracing::trace!("Create a new date field after layout type change");
let field = self.create_date_field(); let field = self.create_date_field();
let field_id = field.id.clone(); let field_id = field.id.clone();
self.database.lock().create_field( database.create_field(
None, None,
field, field,
&OrderObjectPosition::End, &OrderObjectPosition::End,
@ -116,38 +124,14 @@ impl DatabaseLayoutDepsResolver {
}, },
Some(date_field) => date_field.id, Some(date_field) => date_field.id,
}; };
self.create_calendar_layout_setting_if_need(view_id, &date_field_id); if database
},
}
}
fn create_board_layout_setting_if_need(&self, view_id: &str) {
if self
.database
.lock()
.get_layout_setting::<BoardLayoutSetting>(view_id, &self.database_layout)
.is_none()
{
let layout_setting = BoardLayoutSetting::new();
self
.database
.lock()
.insert_layout_setting(view_id, &self.database_layout, layout_setting);
}
}
fn create_calendar_layout_setting_if_need(&self, view_id: &str, field_id: &str) {
if self
.database
.lock()
.get_layout_setting::<CalendarLayoutSetting>(view_id, &self.database_layout) .get_layout_setting::<CalendarLayoutSetting>(view_id, &self.database_layout)
.is_none() .is_none()
{ {
let layout_setting = CalendarLayoutSetting::new(field_id.to_string()); let layout_setting = CalendarLayoutSetting::new(date_field_id);
self database.insert_layout_setting(view_id, &self.database_layout, layout_setting);
.database }
.lock() },
.insert_layout_setting(view_id, &self.database_layout, layout_setting);
} }
} }

View File

@ -1,8 +1,8 @@
use async_trait::async_trait;
use collab_database::fields::Field; use collab_database::fields::Field;
use std::sync::Arc; use std::sync::Arc;
use collab_database::rows::RowCell; use collab_database::rows::RowCell;
use lib_infra::future::{to_fut, Fut};
use crate::services::calculations::{ use crate::services::calculations::{
Calculation, CalculationsController, CalculationsDelegate, CalculationsTaskHandler, Calculation, CalculationsController, CalculationsDelegate, CalculationsTaskHandler,
@ -17,7 +17,7 @@ pub async fn make_calculations_controller(
delegate: Arc<dyn DatabaseViewOperation>, delegate: Arc<dyn DatabaseViewOperation>,
notifier: DatabaseViewChangedNotifier, notifier: DatabaseViewChangedNotifier,
) -> Arc<CalculationsController> { ) -> Arc<CalculationsController> {
let calculations = delegate.get_all_calculations(view_id); let calculations = delegate.get_all_calculations(view_id).await;
let task_scheduler = delegate.get_task_scheduler(); let task_scheduler = delegate.get_task_scheduler();
let calculations_delegate = DatabaseViewCalculationsDelegateImpl(delegate.clone()); let calculations_delegate = DatabaseViewCalculationsDelegateImpl(delegate.clone());
let handler_id = gen_handler_id(); let handler_id = gen_handler_id();
@ -29,8 +29,7 @@ pub async fn make_calculations_controller(
calculations, calculations,
task_scheduler.clone(), task_scheduler.clone(),
notifier, notifier,
) );
.await;
let calculations_controller = Arc::new(calculations_controller); let calculations_controller = Arc::new(calculations_controller);
task_scheduler task_scheduler
@ -45,30 +44,33 @@ pub async fn make_calculations_controller(
struct DatabaseViewCalculationsDelegateImpl(Arc<dyn DatabaseViewOperation>); struct DatabaseViewCalculationsDelegateImpl(Arc<dyn DatabaseViewOperation>);
#[async_trait]
impl CalculationsDelegate for DatabaseViewCalculationsDelegateImpl { impl CalculationsDelegate for DatabaseViewCalculationsDelegateImpl {
fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut<Vec<Arc<RowCell>>> { async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec<Arc<RowCell>> {
self.0.get_cells_for_field(view_id, field_id) self.0.get_cells_for_field(view_id, field_id).await
} }
fn get_field(&self, field_id: &str) -> Option<Field> { async fn get_field(&self, field_id: &str) -> Option<Field> {
self.0.get_field(field_id) self.0.get_field(field_id).await
} }
fn get_calculation(&self, view_id: &str, field_id: &str) -> Fut<Option<Arc<Calculation>>> { async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Arc<Calculation>> {
let calculation = self.0.get_calculation(view_id, field_id).map(Arc::new); self
to_fut(async move { calculation }) .0
.get_calculation(view_id, field_id)
.await
.map(Arc::new)
} }
fn update_calculation(&self, view_id: &str, calculation: Calculation) { async fn update_calculation(&self, view_id: &str, calculation: Calculation) {
self.0.update_calculation(view_id, calculation) self.0.update_calculation(view_id, calculation).await
} }
fn remove_calculation(&self, view_id: &str, calculation_id: &str) { async fn remove_calculation(&self, view_id: &str, calculation_id: &str) {
self.0.remove_calculation(view_id, calculation_id) self.0.remove_calculation(view_id, calculation_id).await
} }
fn get_all_calculations(&self, view_id: &str) -> Fut<Arc<Vec<Arc<Calculation>>>> { async fn get_all_calculations(&self, view_id: &str) -> Arc<Vec<Arc<Calculation>>> {
let calculations = Arc::new(self.0.get_all_calculations(view_id)); self.0.get_all_calculations(view_id).await.into()
to_fut(async move { calculations })
} }
} }

View File

@ -156,6 +156,7 @@ impl DatabaseViewEditor {
let field = self let field = self
.delegate .delegate
.get_field(controller.get_grouping_field_id()) .get_field(controller.get_grouping_field_id())
.await
.ok_or_else(|| FlowyError::internal().with_context("Failed to get grouping field"))?; .ok_or_else(|| FlowyError::internal().with_context("Failed to get grouping field"))?;
controller.will_create_row(&mut cells, &field, &group_id); controller.will_create_row(&mut cells, &field, &group_id);
} }
@ -249,7 +250,10 @@ impl DatabaseViewEditor {
field_id: Option<String>, field_id: Option<String>,
) { ) {
if let Some(controller) = self.group_controller.write().await.as_mut() { if let Some(controller) = self.group_controller.write().await.as_mut() {
let field = self.delegate.get_field(controller.get_grouping_field_id()); let field = self
.delegate
.get_field(controller.get_grouping_field_id())
.await;
if let Some(field) = field { if let Some(field) = field {
let mut row_details = vec![Arc::new(row_detail.clone())]; let mut row_details = vec![Arc::new(row_detail.clone())];
@ -413,8 +417,11 @@ impl DatabaseViewEditor {
pub async fn v_create_group(&self, name: &str) -> FlowyResult<()> { pub async fn v_create_group(&self, name: &str) -> FlowyResult<()> {
let mut old_field: Option<Field> = None; let mut old_field: Option<Field> = None;
let result = if let Some(controller) = self.group_controller.write().await.as_mut() { let result = if let Some(controller) = self.group_controller.write().await.as_mut() {
let create_group_results = controller.create_group(name.to_string())?; let create_group_results = controller.create_group(name.to_string()).await?;
old_field = self.delegate.get_field(controller.get_grouping_field_id()); old_field = self
.delegate
.get_field(controller.get_grouping_field_id())
.await;
create_group_results create_group_results
} else { } else {
(None, None) (None, None)
@ -447,20 +454,22 @@ impl DatabaseViewEditor {
None => return Ok(RowsChangePB::default()), None => return Ok(RowsChangePB::default()),
}; };
let old_field = self.delegate.get_field(controller.get_grouping_field_id()); let old_field = self
let (row_ids, type_option_data) = controller.delete_group(group_id)?; .delegate
.get_field(controller.get_grouping_field_id())
.await;
let (row_ids, type_option_data) = controller.delete_group(group_id).await?;
drop(group_controller); drop(group_controller);
let mut changes = RowsChangePB::default(); let mut changes = RowsChangePB::default();
if let Some(field) = old_field { if let Some(field) = old_field {
let deleted_rows = row_ids for row_id in row_ids {
.iter() if let Some(row) = self.delegate.remove_row(&row_id).await {
.filter_map(|row_id| self.delegate.remove_row(row_id)) changes.deleted_rows.push(row.id.into_inner());
.map(|row| row.id.into_inner()); }
}
changes.deleted_rows.extend(deleted_rows);
if let Some(type_option) = type_option_data { if let Some(type_option) = type_option_data {
self.delegate.update_field(type_option, field).await?; self.delegate.update_field(type_option, field).await?;
@ -478,10 +487,14 @@ impl DatabaseViewEditor {
pub async fn v_update_group(&self, changeset: Vec<GroupChangeset>) -> FlowyResult<()> { pub async fn v_update_group(&self, changeset: Vec<GroupChangeset>) -> FlowyResult<()> {
let mut type_option_data = None; let mut type_option_data = None;
let (old_field, updated_groups) = let (old_field, updated_groups) = if let Some(controller) =
if let Some(controller) = self.group_controller.write().await.as_mut() { self.group_controller.write().await.as_mut()
let old_field = self.delegate.get_field(controller.get_grouping_field_id()); {
let (updated_groups, new_type_option) = controller.apply_group_changeset(&changeset)?; let old_field = self
.delegate
.get_field(controller.get_grouping_field_id())
.await;
let (updated_groups, new_type_option) = controller.apply_group_changeset(&changeset).await?;
if new_type_option.is_some() { if new_type_option.is_some() {
type_option_data = new_type_option; type_option_data = new_type_option;
@ -511,7 +524,7 @@ impl DatabaseViewEditor {
} }
pub async fn v_get_all_sorts(&self) -> Vec<Sort> { pub async fn v_get_all_sorts(&self) -> Vec<Sort> {
self.delegate.get_all_sorts(&self.view_id) self.delegate.get_all_sorts(&self.view_id).await
} }
#[tracing::instrument(level = "trace", skip(self), err)] #[tracing::instrument(level = "trace", skip(self), err)]
@ -528,7 +541,7 @@ impl DatabaseViewEditor {
condition: params.condition.into(), condition: params.condition.into(),
}; };
self.delegate.insert_sort(&self.view_id, sort.clone()); self.delegate.insert_sort(&self.view_id, sort.clone()).await;
let mut sort_controller = self.sort_controller.write().await; let mut sort_controller = self.sort_controller.write().await;
@ -549,7 +562,8 @@ impl DatabaseViewEditor {
pub async fn v_reorder_sort(&self, params: ReorderSortPayloadPB) -> FlowyResult<()> { pub async fn v_reorder_sort(&self, params: ReorderSortPayloadPB) -> FlowyResult<()> {
self self
.delegate .delegate
.move_sort(&self.view_id, &params.from_sort_id, &params.to_sort_id); .move_sort(&self.view_id, &params.from_sort_id, &params.to_sort_id)
.await;
let notification = self let notification = self
.sort_controller .sort_controller
@ -573,7 +587,10 @@ impl DatabaseViewEditor {
.apply_changeset(SortChangeset::from_delete(params.sort_id.clone())) .apply_changeset(SortChangeset::from_delete(params.sort_id.clone()))
.await; .await;
self.delegate.remove_sort(&self.view_id, &params.sort_id); self
.delegate
.remove_sort(&self.view_id, &params.sort_id)
.await;
notify_did_update_sort(notification).await; notify_did_update_sort(notification).await;
Ok(()) Ok(())
@ -583,7 +600,7 @@ impl DatabaseViewEditor {
let all_sorts = self.v_get_all_sorts().await; let all_sorts = self.v_get_all_sorts().await;
self.sort_controller.write().await.delete_all_sorts().await; self.sort_controller.write().await.delete_all_sorts().await;
self.delegate.remove_all_sorts(&self.view_id); self.delegate.remove_all_sorts(&self.view_id).await;
let mut notification = SortChangesetNotificationPB::new(self.view_id.clone()); let mut notification = SortChangesetNotificationPB::new(self.view_id.clone());
notification.delete_sorts = all_sorts.into_iter().map(SortPB::from).collect(); notification.delete_sorts = all_sorts.into_iter().map(SortPB::from).collect();
notify_did_update_sort(notification).await; notify_did_update_sort(notification).await;
@ -591,7 +608,7 @@ impl DatabaseViewEditor {
} }
pub async fn v_get_all_calculations(&self) -> Vec<Arc<Calculation>> { pub async fn v_get_all_calculations(&self) -> Vec<Arc<Calculation>> {
self.delegate.get_all_calculations(&self.view_id) self.delegate.get_all_calculations(&self.view_id).await
} }
pub async fn v_update_calculations( pub async fn v_update_calculations(
@ -620,7 +637,8 @@ impl DatabaseViewEditor {
let calculation: Calculation = Calculation::from(&insert); let calculation: Calculation = Calculation::from(&insert);
self self
.delegate .delegate
.update_calculation(&params.view_id, calculation); .update_calculation(&params.view_id, calculation)
.await;
} }
} }
@ -636,7 +654,8 @@ impl DatabaseViewEditor {
) -> FlowyResult<()> { ) -> FlowyResult<()> {
self self
.delegate .delegate
.remove_calculation(&params.view_id, &params.calculation_id); .remove_calculation(&params.view_id, &params.calculation_id)
.await;
let calculation = Calculation::none(params.calculation_id, params.field_id, None); let calculation = Calculation::none(params.calculation_id, params.field_id, None);
@ -653,11 +672,11 @@ impl DatabaseViewEditor {
} }
pub async fn v_get_all_filters(&self) -> Vec<Filter> { pub async fn v_get_all_filters(&self) -> Vec<Filter> {
self.delegate.get_all_filters(&self.view_id) self.delegate.get_all_filters(&self.view_id).await
} }
pub async fn v_get_filter(&self, filter_id: &str) -> Option<Filter> { pub async fn v_get_filter(&self, filter_id: &str) -> Option<Filter> {
self.delegate.get_filter(&self.view_id, filter_id) self.delegate.get_filter(&self.view_id, filter_id).await
} }
#[tracing::instrument(level = "trace", skip(self), err)] #[tracing::instrument(level = "trace", skip(self), err)]
@ -686,15 +705,23 @@ impl DatabaseViewEditor {
match layout_ty { match layout_ty {
DatabaseLayout::Grid => {}, DatabaseLayout::Grid => {},
DatabaseLayout::Board => { DatabaseLayout::Board => {
if let Some(value) = self.delegate.get_layout_setting(&self.view_id, layout_ty) { if let Some(value) = self
.delegate
.get_layout_setting(&self.view_id, layout_ty)
.await
{
layout_setting.board = Some(value.into()); layout_setting.board = Some(value.into());
} }
}, },
DatabaseLayout::Calendar => { DatabaseLayout::Calendar => {
if let Some(value) = self.delegate.get_layout_setting(&self.view_id, layout_ty) { if let Some(value) = self
.delegate
.get_layout_setting(&self.view_id, layout_ty)
.await
{
let calendar_setting = CalendarLayoutSetting::from(value); let calendar_setting = CalendarLayoutSetting::from(value);
// Check the field exist or not // Check the field exist or not
if let Some(field) = self.delegate.get_field(&calendar_setting.field_id) { if let Some(field) = self.delegate.get_field(&calendar_setting.field_id).await {
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
// Check the type of field is Datetime or not // Check the type of field is Datetime or not
@ -723,27 +750,33 @@ impl DatabaseViewEditor {
DatabaseLayout::Board => { DatabaseLayout::Board => {
let layout_setting = params.board.unwrap(); let layout_setting = params.board.unwrap();
self.delegate.insert_layout_setting( self
.delegate
.insert_layout_setting(
&self.view_id, &self.view_id,
&params.layout_type, &params.layout_type,
layout_setting.clone().into(), layout_setting.clone().into(),
); )
.await;
Some(DatabaseLayoutSettingPB::from_board(layout_setting)) Some(DatabaseLayoutSettingPB::from_board(layout_setting))
}, },
DatabaseLayout::Calendar => { DatabaseLayout::Calendar => {
let layout_setting = params.calendar.unwrap(); let layout_setting = params.calendar.unwrap();
if let Some(field) = self.delegate.get_field(&layout_setting.field_id) { if let Some(field) = self.delegate.get_field(&layout_setting.field_id).await {
if FieldType::from(field.field_type) != FieldType::DateTime { if FieldType::from(field.field_type) != FieldType::DateTime {
return Err(FlowyError::unexpect_calendar_field_type()); return Err(FlowyError::unexpect_calendar_field_type());
} }
self.delegate.insert_layout_setting( self
.delegate
.insert_layout_setting(
&self.view_id, &self.view_id,
&params.layout_type, &params.layout_type,
layout_setting.clone().into(), layout_setting.clone().into(),
); )
.await;
Some(DatabaseLayoutSettingPB::from_calendar(layout_setting)) Some(DatabaseLayoutSettingPB::from_calendar(layout_setting))
} else { } else {
@ -769,10 +802,10 @@ impl DatabaseViewEditor {
let notification = self.filter_controller.apply_changeset(changeset).await; let notification = self.filter_controller.apply_changeset(changeset).await;
notify_did_update_filter(notification).await; notify_did_update_filter(notification).await;
let sorts = self.delegate.get_all_sorts(&self.view_id); let sorts = self.delegate.get_all_sorts(&self.view_id).await;
if let Some(sort) = sorts.iter().find(|sort| sort.field_id == deleted_field_id) { if let Some(sort) = sorts.iter().find(|sort| sort.field_id == deleted_field_id) {
self.delegate.remove_sort(&self.view_id, &sort.id); self.delegate.remove_sort(&self.view_id, &sort.id).await;
let notification = self let notification = self
.sort_controller .sort_controller
.write() .write()
@ -810,7 +843,7 @@ impl DatabaseViewEditor {
pub async fn v_did_update_field_type_option(&self, old_field: &Field) -> FlowyResult<()> { pub async fn v_did_update_field_type_option(&self, old_field: &Field) -> FlowyResult<()> {
let field_id = &old_field.id; let field_id = &old_field.id;
if let Some(field) = self.delegate.get_field(field_id) { if let Some(field) = self.delegate.get_field(field_id).await {
self self
.sort_controller .sort_controller
.read() .read()
@ -839,7 +872,7 @@ impl DatabaseViewEditor {
/// Called when a grouping field is updated. /// Called when a grouping field is updated.
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub async fn v_group_by_field(&self, field_id: &str) -> FlowyResult<()> { pub async fn v_group_by_field(&self, field_id: &str) -> FlowyResult<()> {
if let Some(field) = self.delegate.get_field(field_id) { if let Some(field) = self.delegate.get_field(field_id).await {
tracing::trace!("create new group controller"); tracing::trace!("create new group controller");
let new_group_controller = new_group_controller( let new_group_controller = new_group_controller(
@ -890,7 +923,7 @@ impl DatabaseViewEditor {
let text_cell = get_cell_for_row(self.delegate.clone(), &primary_field.id, &row_id).await?; let text_cell = get_cell_for_row(self.delegate.clone(), &primary_field.id, &row_id).await?;
// Date // Date
let date_field = self.delegate.get_field(&calendar_setting.field_id)?; let date_field = self.delegate.get_field(&calendar_setting.field_id).await?;
let date_cell = get_cell_for_row(self.delegate.clone(), &date_field.id, &row_id).await?; let date_cell = get_cell_for_row(self.delegate.clone(), &date_field.id, &row_id).await?;
let title = text_cell let title = text_cell
@ -981,20 +1014,23 @@ impl DatabaseViewEditor {
} }
pub async fn v_get_layout_type(&self) -> DatabaseLayout { pub async fn v_get_layout_type(&self) -> DatabaseLayout {
self.delegate.get_layout_for_view(&self.view_id) self.delegate.get_layout_for_view(&self.view_id).await
} }
#[tracing::instrument(level = "trace", skip_all)] #[tracing::instrument(level = "trace", skip_all)]
pub async fn v_update_layout_type(&self, new_layout_type: DatabaseLayout) -> FlowyResult<()> { pub async fn v_update_layout_type(&self, new_layout_type: DatabaseLayout) -> FlowyResult<()> {
self self
.delegate .delegate
.update_layout_type(&self.view_id, &new_layout_type); .update_layout_type(&self.view_id, &new_layout_type)
.await;
// using the {} brackets to denote the lifetime of the resolver. Because the DatabaseLayoutDepsResolver // using the {} brackets to denote the lifetime of the resolver. Because the DatabaseLayoutDepsResolver
// is not sync and send, so we can't pass it to the async block. // is not sync and send, so we can't pass it to the async block.
{ {
let resolver = DatabaseLayoutDepsResolver::new(self.delegate.get_database(), new_layout_type); let resolver = DatabaseLayoutDepsResolver::new(self.delegate.get_database(), new_layout_type);
resolver.resolve_deps_when_update_layout_type(&self.view_id); resolver
.resolve_deps_when_update_layout_type(&self.view_id)
.await;
} }
// initialize the group controller if the current layout support grouping // initialize the group controller if the current layout support grouping
@ -1034,12 +1070,14 @@ impl DatabaseViewEditor {
} }
pub async fn v_get_field_settings(&self, field_ids: &[String]) -> HashMap<String, FieldSettings> { pub async fn v_get_field_settings(&self, field_ids: &[String]) -> HashMap<String, FieldSettings> {
self.delegate.get_field_settings(&self.view_id, field_ids) self
.delegate
.get_field_settings(&self.view_id, field_ids)
.await
} }
pub async fn v_update_field_settings(&self, params: FieldSettingsChangesetPB) -> FlowyResult<()> { pub async fn v_update_field_settings(&self, params: FieldSettingsChangesetPB) -> FlowyResult<()> {
self.delegate.update_field_settings(params); self.delegate.update_field_settings(params).await;
Ok(()) Ok(())
} }
@ -1053,7 +1091,7 @@ impl DatabaseViewEditor {
.await .await
.as_ref() .as_ref()
.map(|controller| controller.get_grouping_field_id().to_owned())?; .map(|controller| controller.get_grouping_field_id().to_owned())?;
let field = self.delegate.get_field(&group_field_id)?; let field = self.delegate.get_field(&group_field_id).await?;
let mut write_guard = self.group_controller.write().await; let mut write_guard = self.group_controller.write().await;
if let Some(group_controller) = &mut *write_guard { if let Some(group_controller) = &mut *write_guard {
f(group_controller, field).ok() f(group_controller, field).ok()

View File

@ -1,10 +1,9 @@
use async_trait::async_trait;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{RowDetail, RowId}; use collab_database::rows::{RowDetail, RowId};
use lib_infra::future::Fut;
use crate::services::cell::CellCache; use crate::services::cell::CellCache;
use crate::services::database_view::{ use crate::services::database_view::{
gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation, gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation,
@ -43,28 +42,29 @@ pub async fn make_filter_controller(
struct DatabaseViewFilterDelegateImpl(Arc<dyn DatabaseViewOperation>); struct DatabaseViewFilterDelegateImpl(Arc<dyn DatabaseViewOperation>);
#[async_trait]
impl FilterDelegate for DatabaseViewFilterDelegateImpl { impl FilterDelegate for DatabaseViewFilterDelegateImpl {
fn get_field(&self, field_id: &str) -> Option<Field> { async fn get_field(&self, field_id: &str) -> Option<Field> {
self.0.get_field(field_id) self.0.get_field(field_id).await
} }
fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Fut<Vec<Field>> { async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field> {
self.0.get_fields(view_id, field_ids) self.0.get_fields(view_id, field_ids).await
} }
fn get_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>> { async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
self.0.get_rows(view_id) self.0.get_rows(view_id).await
} }
fn get_row(&self, view_id: &str, rows_id: &RowId) -> Fut<Option<(usize, Arc<RowDetail>)>> { async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)> {
self.0.get_row(view_id, rows_id) self.0.get_row(view_id, rows_id).await
} }
fn get_all_filters(&self, view_id: &str) -> Vec<Filter> { async fn get_all_filters(&self, view_id: &str) -> Vec<Filter> {
self.0.get_all_filters(view_id) self.0.get_all_filters(view_id).await
} }
fn save_filters(&self, view_id: &str, filters: &[Filter]) { async fn save_filters(&self, view_id: &str, filters: &[Filter]) {
self.0.save_filters(view_id, filters) self.0.save_filters(view_id, filters).await
} }
} }

View File

@ -1,10 +1,10 @@
use async_trait::async_trait;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{RowDetail, RowId}; use collab_database::rows::{RowDetail, RowId};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
use lib_infra::future::{to_fut, Fut};
use crate::entities::FieldType; use crate::entities::FieldType;
use crate::services::database_view::DatabaseViewOperation; use crate::services::database_view::DatabaseViewOperation;
@ -21,7 +21,7 @@ pub async fn new_group_controller(
filter_controller: Arc<FilterController>, filter_controller: Arc<FilterController>,
grouping_field: Option<Field>, grouping_field: Option<Field>,
) -> FlowyResult<Option<Box<dyn GroupController>>> { ) -> FlowyResult<Option<Box<dyn GroupController>>> {
if !delegate.get_layout_for_view(&view_id).is_board() { if !delegate.get_layout_for_view(&view_id).await.is_board() {
return Ok(None); return Ok(None);
} }
@ -61,45 +61,45 @@ pub(crate) struct GroupControllerDelegateImpl {
filter_controller: Arc<FilterController>, filter_controller: Arc<FilterController>,
} }
#[async_trait]
impl GroupContextDelegate for GroupControllerDelegateImpl { impl GroupContextDelegate for GroupControllerDelegateImpl {
fn get_group_setting(&self, view_id: &str) -> Fut<Option<Arc<GroupSetting>>> { async fn get_group_setting(&self, view_id: &str) -> Option<Arc<GroupSetting>> {
let mut settings = self.delegate.get_group_setting(view_id); let mut settings = self.delegate.get_group_setting(view_id).await;
to_fut(async move {
if settings.is_empty() { if settings.is_empty() {
None None
} else { } else {
Some(Arc::new(settings.remove(0))) Some(Arc::new(settings.remove(0)))
} }
})
} }
fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Fut<Vec<RowSingleCellData>> { async fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Vec<RowSingleCellData> {
let field_id = field_id.to_owned();
let view_id = view_id.to_owned();
let delegate = self.delegate.clone(); let delegate = self.delegate.clone();
to_fut(async move { get_cells_for_field(delegate, &view_id, &field_id).await }) get_cells_for_field(delegate, view_id, field_id).await
} }
fn save_configuration(&self, view_id: &str, group_setting: GroupSetting) -> Fut<FlowyResult<()>> { async fn save_configuration(
self.delegate.insert_group_setting(view_id, group_setting); &self,
to_fut(async move { Ok(()) }) view_id: &str,
group_setting: GroupSetting,
) -> FlowyResult<()> {
self
.delegate
.insert_group_setting(view_id, group_setting)
.await;
Ok(())
} }
} }
#[async_trait]
impl GroupControllerDelegate for GroupControllerDelegateImpl { impl GroupControllerDelegate for GroupControllerDelegateImpl {
fn get_field(&self, field_id: &str) -> Option<Field> { async fn get_field(&self, field_id: &str) -> Option<Field> {
self.delegate.get_field(field_id) self.delegate.get_field(field_id).await
} }
fn get_all_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>> { async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
let view_id = view_id.to_string(); let mut row_details = self.delegate.get_rows(view_id).await;
let delegate = self.delegate.clone(); self.filter_controller.filter_rows(&mut row_details).await;
let filter_controller = self.filter_controller.clone();
to_fut(async move {
let mut row_details = delegate.get_rows(&view_id).await;
filter_controller.filter_rows(&mut row_details).await;
row_details row_details
})
} }
} }
@ -108,7 +108,7 @@ pub(crate) async fn get_cell_for_row(
field_id: &str, field_id: &str,
row_id: &RowId, row_id: &RowId,
) -> Option<RowSingleCellData> { ) -> Option<RowSingleCellData> {
let field = delegate.get_field(field_id)?; let field = delegate.get_field(field_id).await?;
let row_cell = delegate.get_cell_in_row(field_id, row_id).await; let row_cell = delegate.get_cell_in_row(field_id, row_id).await;
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
let handler = delegate.get_type_option_cell_handler(&field)?; let handler = delegate.get_type_option_cell_handler(&field)?;
@ -131,7 +131,7 @@ pub(crate) async fn get_cells_for_field(
view_id: &str, view_id: &str,
field_id: &str, field_id: &str,
) -> Vec<RowSingleCellData> { ) -> Vec<RowSingleCellData> {
if let Some(field) = delegate.get_field(field_id) { if let Some(field) = delegate.get_field(field_id).await {
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
if let Some(handler) = delegate.get_type_option_cell_handler(&field) { if let Some(handler) = delegate.get_type_option_cell_handler(&field) {
let cells = delegate.get_cells_for_field(view_id, field_id).await; let cells = delegate.get_cells_for_field(view_id, field_id).await;

View File

@ -1,14 +1,14 @@
use async_trait::async_trait;
use collab_database::database::Database;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use collab_database::database::MutexDatabase;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{Row, RowCell, RowDetail, RowId}; use collab_database::rows::{Row, RowCell, RowDetail, RowId};
use collab_database::views::{DatabaseLayout, DatabaseView, LayoutSetting}; use collab_database::views::{DatabaseLayout, DatabaseView, LayoutSetting};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use flowy_error::FlowyError; use flowy_error::FlowyError;
use lib_infra::future::{Fut, FutureResult};
use lib_infra::priority_task::TaskDispatcher; use lib_infra::priority_task::TaskDispatcher;
use crate::entities::{FieldSettingsChangesetPB, FieldType}; use crate::entities::{FieldSettingsChangesetPB, FieldType};
@ -20,97 +20,102 @@ use crate::services::group::GroupSetting;
use crate::services::sort::Sort; use crate::services::sort::Sort;
/// Defines the operation that can be performed on a database view /// Defines the operation that can be performed on a database view
#[async_trait]
pub trait DatabaseViewOperation: Send + Sync + 'static { pub trait DatabaseViewOperation: Send + Sync + 'static {
/// Get the database that the view belongs to /// Get the database that the view belongs to
fn get_database(&self) -> Arc<MutexDatabase>; fn get_database(&self) -> Arc<RwLock<Database>>;
/// Get the view of the database with the view_id /// Get the view of the database with the view_id
fn get_view(&self, view_id: &str) -> Fut<Option<DatabaseView>>; async fn get_view(&self, view_id: &str) -> Option<DatabaseView>;
/// If the field_ids is None, then it will return all the field revisions /// If the field_ids is None, then it will return all the field revisions
fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Fut<Vec<Field>>; async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>;
/// Returns the field with the field_id /// Returns the field with the field_id
fn get_field(&self, field_id: &str) -> Option<Field>; async fn get_field(&self, field_id: &str) -> Option<Field>;
fn create_field( async fn create_field(
&self, &self,
view_id: &str, view_id: &str,
name: &str, name: &str,
field_type: FieldType, field_type: FieldType,
type_option_data: TypeOptionData, type_option_data: TypeOptionData,
) -> Fut<Field>; ) -> Field;
fn update_field( async fn update_field(
&self, &self,
type_option_data: TypeOptionData, type_option_data: TypeOptionData,
old_field: Field, old_field: Field,
) -> FutureResult<(), FlowyError>; ) -> Result<(), FlowyError>;
fn get_primary_field(&self) -> Fut<Option<Arc<Field>>>; async fn get_primary_field(&self) -> Option<Arc<Field>>;
/// Returns the index of the row with row_id /// Returns the index of the row with row_id
fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Fut<Option<usize>>; async fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Option<usize>;
/// Returns the `index` and `RowRevision` with row_id /// Returns the `index` and `RowRevision` with row_id
fn get_row(&self, view_id: &str, row_id: &RowId) -> Fut<Option<(usize, Arc<RowDetail>)>>; async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option<(usize, Arc<RowDetail>)>;
/// Returns all the rows in the view /// Returns all the rows in the view
fn get_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>>; async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>;
fn remove_row(&self, row_id: &RowId) -> Option<Row>; async fn remove_row(&self, row_id: &RowId) -> Option<Row>;
fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut<Vec<Arc<RowCell>>>; async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec<Arc<RowCell>>;
fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Fut<Arc<RowCell>>; async fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Arc<RowCell>;
/// Return the database layout type for the view with given view_id /// Return the database layout type for the view with given view_id
/// The default layout type is [DatabaseLayout::Grid] /// The default layout type is [DatabaseLayout::Grid]
fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout; async fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout;
fn get_group_setting(&self, view_id: &str) -> Vec<GroupSetting>; async fn get_group_setting(&self, view_id: &str) -> Vec<GroupSetting>;
fn insert_group_setting(&self, view_id: &str, setting: GroupSetting); async fn insert_group_setting(&self, view_id: &str, setting: GroupSetting);
fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Sort>; async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Sort>;
fn insert_sort(&self, view_id: &str, sort: Sort); async fn insert_sort(&self, view_id: &str, sort: Sort);
fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str); async fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str);
fn remove_sort(&self, view_id: &str, sort_id: &str); async fn remove_sort(&self, view_id: &str, sort_id: &str);
fn get_all_sorts(&self, view_id: &str) -> Vec<Sort>; async fn get_all_sorts(&self, view_id: &str) -> Vec<Sort>;
fn remove_all_sorts(&self, view_id: &str); async fn remove_all_sorts(&self, view_id: &str);
fn get_all_calculations(&self, view_id: &str) -> Vec<Arc<Calculation>>; async fn get_all_calculations(&self, view_id: &str) -> Vec<Arc<Calculation>>;
fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Calculation>; async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Calculation>;
fn update_calculation(&self, view_id: &str, calculation: Calculation); async fn update_calculation(&self, view_id: &str, calculation: Calculation);
fn remove_calculation(&self, view_id: &str, calculation_id: &str); async fn remove_calculation(&self, view_id: &str, calculation_id: &str);
fn get_all_filters(&self, view_id: &str) -> Vec<Filter>; async fn get_all_filters(&self, view_id: &str) -> Vec<Filter>;
fn get_filter(&self, view_id: &str, filter_id: &str) -> Option<Filter>; async fn get_filter(&self, view_id: &str, filter_id: &str) -> Option<Filter>;
fn delete_filter(&self, view_id: &str, filter_id: &str); async fn delete_filter(&self, view_id: &str, filter_id: &str);
fn insert_filter(&self, view_id: &str, filter: Filter); async fn insert_filter(&self, view_id: &str, filter: Filter);
fn save_filters(&self, view_id: &str, filters: &[Filter]); async fn save_filters(&self, view_id: &str, filters: &[Filter]);
fn get_layout_setting(&self, view_id: &str, layout_ty: &DatabaseLayout) -> Option<LayoutSetting>; async fn get_layout_setting(
&self,
view_id: &str,
layout_ty: &DatabaseLayout,
) -> Option<LayoutSetting>;
fn insert_layout_setting( async fn insert_layout_setting(
&self, &self,
view_id: &str, view_id: &str,
layout_ty: &DatabaseLayout, layout_ty: &DatabaseLayout,
layout_setting: LayoutSetting, layout_setting: LayoutSetting,
); );
fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout); async fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout);
/// Returns a `TaskDispatcher` used to poll a `Task` /// Returns a `TaskDispatcher` used to poll a `Task`
fn get_task_scheduler(&self) -> Arc<RwLock<TaskDispatcher>>; fn get_task_scheduler(&self) -> Arc<RwLock<TaskDispatcher>>;
@ -120,11 +125,11 @@ pub trait DatabaseViewOperation: Send + Sync + 'static {
field: &Field, field: &Field,
) -> Option<Box<dyn TypeOptionCellDataHandler>>; ) -> Option<Box<dyn TypeOptionCellDataHandler>>;
fn get_field_settings( async fn get_field_settings(
&self, &self,
view_id: &str, view_id: &str,
field_ids: &[String], field_ids: &[String],
) -> HashMap<String, FieldSettings>; ) -> HashMap<String, FieldSettings>;
fn update_field_settings(&self, params: FieldSettingsChangesetPB); async fn update_field_settings(&self, params: FieldSettingsChangesetPB);
} }

View File

@ -1,11 +1,10 @@
use async_trait::async_trait;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::RowDetail; use collab_database::rows::RowDetail;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use lib_infra::future::{to_fut, Fut};
use crate::services::cell::CellCache; use crate::services::cell::CellCache;
use crate::services::database_view::{ use crate::services::database_view::{
gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation, gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation,
@ -23,6 +22,7 @@ pub(crate) async fn make_sort_controller(
let handler_id = gen_handler_id(); let handler_id = gen_handler_id();
let sorts = delegate let sorts = delegate
.get_all_sorts(view_id) .get_all_sorts(view_id)
.await
.into_iter() .into_iter()
.map(Arc::new) .map(Arc::new)
.collect(); .collect();
@ -53,38 +53,31 @@ struct DatabaseViewSortDelegateImpl {
filter_controller: Arc<FilterController>, filter_controller: Arc<FilterController>,
} }
#[async_trait]
impl SortDelegate for DatabaseViewSortDelegateImpl { impl SortDelegate for DatabaseViewSortDelegateImpl {
fn get_sort(&self, view_id: &str, sort_id: &str) -> Fut<Option<Arc<Sort>>> { async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Arc<Sort>> {
let sort = self.delegate.get_sort(view_id, sort_id).map(Arc::new); self.delegate.get_sort(view_id, sort_id).await.map(Arc::new)
to_fut(async move { sort })
} }
fn get_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>> { async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let delegate = self.delegate.clone(); let mut row_details = self.delegate.get_rows(&view_id).await;
let filter_controller = self.filter_controller.clone(); self.filter_controller.filter_rows(&mut row_details).await;
to_fut(async move {
let mut row_details = delegate.get_rows(&view_id).await;
filter_controller.filter_rows(&mut row_details).await;
row_details row_details
})
} }
fn filter_row(&self, row_detail: &RowDetail) -> Fut<bool> { async fn filter_row(&self, row_detail: &RowDetail) -> bool {
let filter_controller = self.filter_controller.clone();
let row_detail = row_detail.clone(); let row_detail = row_detail.clone();
to_fut(async move {
let mut row_details = vec![Arc::new(row_detail)]; let mut row_details = vec![Arc::new(row_detail)];
filter_controller.filter_rows(&mut row_details).await; self.filter_controller.filter_rows(&mut row_details).await;
!row_details.is_empty() !row_details.is_empty()
})
} }
fn get_field(&self, field_id: &str) -> Option<Field> { async fn get_field(&self, field_id: &str) -> Option<Field> {
self.delegate.get_field(field_id) self.delegate.get_field(field_id).await
} }
fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Fut<Vec<Field>> { async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field> {
self.delegate.get_fields(view_id, field_ids) self.delegate.get_fields(view_id, field_ids).await
} }
} }

View File

@ -1,11 +1,11 @@
use collab_database::database::Database;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use collab_database::database::MutexDatabase;
use nanoid::nanoid; use nanoid::nanoid;
use tokio::sync::{broadcast, RwLock}; use tokio::sync::{broadcast, RwLock};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::FlowyResult;
use crate::services::cell::CellCache; use crate::services::cell::CellCache;
use crate::services::database::DatabaseRowEvent; use crate::services::database::DatabaseRowEvent;
@ -17,7 +17,7 @@ pub type EditorByViewId = HashMap<String, Arc<DatabaseViewEditor>>;
pub struct DatabaseViews { pub struct DatabaseViews {
#[allow(dead_code)] #[allow(dead_code)]
database: Arc<MutexDatabase>, database: Arc<RwLock<Database>>,
cell_cache: CellCache, cell_cache: CellCache,
view_operation: Arc<dyn DatabaseViewOperation>, view_operation: Arc<dyn DatabaseViewOperation>,
view_editors: Arc<RwLock<EditorByViewId>>, view_editors: Arc<RwLock<EditorByViewId>>,
@ -25,7 +25,7 @@ pub struct DatabaseViews {
impl DatabaseViews { impl DatabaseViews {
pub async fn new( pub async fn new(
database: Arc<MutexDatabase>, database: Arc<RwLock<Database>>,
cell_cache: CellCache, cell_cache: CellCache,
view_operation: Arc<dyn DatabaseViewOperation>, view_operation: Arc<dyn DatabaseViewOperation>,
view_editors: Arc<RwLock<EditorByViewId>>, view_editors: Arc<RwLock<EditorByViewId>>,
@ -59,13 +59,10 @@ impl DatabaseViews {
return Ok(editor.clone()); return Ok(editor.clone());
} }
let mut editor_map = self.view_editors.try_write().map_err(|err| { //FIXME: not thread-safe
FlowyError::internal().with_context(format!( let mut editor_map = self.view_editors.write().await;
"fail to acquire the lock of editor_by_view_id: {}", let database_id = self.database.read().await.get_database_id();
err //FIXME: that method below is not Send+Sync
))
})?;
let database_id = self.database.lock().get_database_id();
let editor = Arc::new( let editor = Arc::new(
DatabaseViewEditor::new( DatabaseViewEditor::new(
database_id, database_id,

View File

@ -1,6 +1,6 @@
use std::sync::Arc; use std::sync::Arc;
use flowy_error::FlowyResult; use flowy_error::{FlowyError, FlowyResult};
use crate::entities::FieldType; use crate::entities::FieldType;
use crate::services::database::DatabaseEditor; use crate::services::database::DatabaseEditor;
@ -11,14 +11,15 @@ pub async fn edit_field_type_option<T: TypeOption>(
editor: Arc<DatabaseEditor>, editor: Arc<DatabaseEditor>,
action: impl FnOnce(&mut T), action: impl FnOnce(&mut T),
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let get_type_option = async { let field = editor
let field = editor.get_field(field_id)?; .get_field(field_id)
.await
.ok_or_else(FlowyError::field_record_not_found)?;
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
field.get_type_option::<T>(field_type) let get_type_option = field.get_type_option::<T>(field_type);
};
if let Some(mut type_option) = get_type_option.await { if let Some(mut type_option) = get_type_option {
if let Some(old_field) = editor.get_field(field_id) { if let Some(old_field) = editor.get_field(field_id).await {
action(&mut type_option); action(&mut type_option);
let type_option_data = type_option.into(); let type_option_data = type_option.into();
editor editor

View File

@ -35,7 +35,7 @@ impl From<TypeOptionData> for CheckboxTypeOption {
impl From<CheckboxTypeOption> for TypeOptionData { impl From<CheckboxTypeOption> for TypeOptionData {
fn from(_data: CheckboxTypeOption) -> Self { fn from(_data: CheckboxTypeOption) -> Self {
TypeOptionDataBuilder::new().build() TypeOptionDataBuilder::new()
} }
} }

View File

@ -1,7 +1,7 @@
use std::str::FromStr; use std::str::FromStr;
use bytes::Bytes; use bytes::Bytes;
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
@ -21,16 +21,16 @@ impl TypeOptionCellData for CheckboxCellDataPB {
impl From<&Cell> for CheckboxCellDataPB { impl From<&Cell> for CheckboxCellDataPB {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
let value = cell.get_str_value(CELL_DATA).unwrap_or_default(); let value: String = cell.get_as(CELL_DATA).unwrap_or_default();
CheckboxCellDataPB::from_str(&value).unwrap_or_default() CheckboxCellDataPB::from_str(&value).unwrap_or_default()
} }
} }
impl From<CheckboxCellDataPB> for Cell { impl From<CheckboxCellDataPB> for Cell {
fn from(data: CheckboxCellDataPB) -> Self { fn from(data: CheckboxCellDataPB) -> Self {
new_cell_builder(FieldType::Checkbox) let mut cell = new_cell_builder(FieldType::Checkbox);
.insert_str_value(CELL_DATA, data.to_string()) cell.insert(CELL_DATA.into(), data.to_string().into());
.build() cell
} }
} }

View File

@ -31,7 +31,7 @@ impl From<TypeOptionData> for ChecklistTypeOption {
impl From<ChecklistTypeOption> for TypeOptionData { impl From<ChecklistTypeOption> for TypeOptionData {
fn from(_data: ChecklistTypeOption) -> Self { fn from(_data: ChecklistTypeOption) -> Self {
TypeOptionDataBuilder::new().build() TypeOptionDataBuilder::new()
} }
} }

View File

@ -1,6 +1,6 @@
use crate::entities::FieldType; use crate::entities::FieldType;
use crate::services::field::{SelectOption, TypeOptionCellData, CELL_DATA}; use crate::services::field::{SelectOption, TypeOptionCellData, CELL_DATA};
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
@ -64,7 +64,7 @@ impl ChecklistCellData {
impl From<&Cell> for ChecklistCellData { impl From<&Cell> for ChecklistCellData {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
cell cell
.get_str_value(CELL_DATA) .get_as::<String>(CELL_DATA)
.map(|data| serde_json::from_str::<ChecklistCellData>(&data).unwrap_or_default()) .map(|data| serde_json::from_str::<ChecklistCellData>(&data).unwrap_or_default())
.unwrap_or_default() .unwrap_or_default()
} }
@ -73,9 +73,9 @@ impl From<&Cell> for ChecklistCellData {
impl From<ChecklistCellData> for Cell { impl From<ChecklistCellData> for Cell {
fn from(cell_data: ChecklistCellData) -> Self { fn from(cell_data: ChecklistCellData) -> Self {
let data = serde_json::to_string(&cell_data).unwrap_or_default(); let data = serde_json::to_string(&cell_data).unwrap_or_default();
new_cell_builder(FieldType::Checklist) let mut cell = new_cell_builder(FieldType::Checklist);
.insert_str_value(CELL_DATA, data) cell.insert(CELL_DATA.into(), data.into());
.build() cell
} }
} }

View File

@ -3,7 +3,7 @@ use std::str::FromStr;
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, NaiveTime, Offset, TimeZone}; use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, NaiveTime, Offset, TimeZone};
use chrono_tz::Tz; use chrono_tz::Tz;
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::Cell; use collab_database::rows::Cell;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -36,14 +36,14 @@ impl TypeOption for DateTypeOption {
impl From<TypeOptionData> for DateTypeOption { impl From<TypeOptionData> for DateTypeOption {
fn from(data: TypeOptionData) -> Self { fn from(data: TypeOptionData) -> Self {
let date_format = data let date_format = data
.get_i64_value("date_format") .get_as::<i64>("date_format")
.map(DateFormat::from) .map(DateFormat::from)
.unwrap_or_default(); .unwrap_or_default();
let time_format = data let time_format = data
.get_i64_value("time_format") .get_as::<i64>("time_format")
.map(TimeFormat::from) .map(TimeFormat::from)
.unwrap_or_default(); .unwrap_or_default();
let timezone_id = data.get_str_value("timezone_id").unwrap_or_default(); let timezone_id: String = data.get_as("timezone_id").unwrap_or_default();
Self { Self {
date_format, date_format,
time_format, time_format,
@ -54,11 +54,11 @@ impl From<TypeOptionData> for DateTypeOption {
impl From<DateTypeOption> for TypeOptionData { impl From<DateTypeOption> for TypeOptionData {
fn from(data: DateTypeOption) -> Self { fn from(data: DateTypeOption) -> Self {
TypeOptionDataBuilder::new() TypeOptionDataBuilder::from([
.insert_i64_value("date_format", data.date_format.value()) ("date_format".into(), data.date_format.value().into()),
.insert_i64_value("time_format", data.time_format.value()) ("time_format".into(), data.time_format.value().into()),
.insert_str_value("timezone_id", data.timezone_id) ("timezone_id".into(), data.timezone_id.into()),
.build() ])
} }
} }

View File

@ -1,7 +1,7 @@
#![allow(clippy::upper_case_acronyms)] #![allow(clippy::upper_case_acronyms)]
use bytes::Bytes; use bytes::Bytes;
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
use serde::de::Visitor; use serde::de::Visitor;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -58,14 +58,14 @@ impl TypeOptionCellData for DateCellData {
impl From<&Cell> for DateCellData { impl From<&Cell> for DateCellData {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
let timestamp = cell let timestamp = cell
.get_str_value(CELL_DATA) .get_as::<String>(CELL_DATA)
.and_then(|data| data.parse::<i64>().ok()); .and_then(|data| data.parse::<i64>().ok());
let end_timestamp = cell let end_timestamp = cell
.get_str_value("end_timestamp") .get_as::<String>("end_timestamp")
.and_then(|data| data.parse::<i64>().ok()); .and_then(|data| data.parse::<i64>().ok());
let include_time = cell.get_bool_value("include_time").unwrap_or_default(); let include_time: bool = cell.get_as("include_time").unwrap_or_default();
let is_range = cell.get_bool_value("is_range").unwrap_or_default(); let is_range: bool = cell.get_as("is_range").unwrap_or_default();
let reminder_id = cell.get_str_value("reminder_id").unwrap_or_default(); let reminder_id: String = cell.get_as("reminder_id").unwrap_or_default();
Self { Self {
timestamp, timestamp,
@ -101,13 +101,16 @@ impl From<&DateCellData> for Cell {
}; };
// Most of the case, don't use these keys in other places. Otherwise, we should define // Most of the case, don't use these keys in other places. Otherwise, we should define
// constants for them. // constants for them.
new_cell_builder(FieldType::DateTime) let mut cell = new_cell_builder(FieldType::DateTime);
.insert_str_value(CELL_DATA, timestamp_string) cell.insert(CELL_DATA.into(), timestamp_string.into());
.insert_str_value("end_timestamp", end_timestamp_string) cell.insert("end_timestamp".into(), end_timestamp_string.into());
.insert_bool_value("include_time", cell_data.include_time) cell.insert("include_time".into(), cell_data.include_time.into());
.insert_bool_value("is_range", cell_data.is_range) cell.insert("is_range".into(), cell_data.is_range.into());
.insert_str_value("reminder_id", cell_data.reminder_id.to_owned()) cell.insert(
.build() "reminder_id".into(),
cell_data.reminder_id.to_owned().into(),
);
cell
} }
} }

View File

@ -1,14 +1,16 @@
use collab::preclude::encoding::serde::from_any;
use collab::preclude::Any;
use collab::util::AnyMapExt;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::default::Default; use std::default::Default;
use std::str::FromStr; use std::str::FromStr;
use collab::core::any_map::AnyMapExtension;
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
use fancy_regex::Regex; use fancy_regex::Regex;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use rust_decimal::Decimal; use rust_decimal::Decimal;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -25,12 +27,24 @@ use crate::services::sort::SortCondition;
// Number // Number
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NumberTypeOption { pub struct NumberTypeOption {
#[serde(default, deserialize_with = "number_format_from_i64")]
pub format: NumberFormat, pub format: NumberFormat,
#[serde(default)]
pub scale: u32, pub scale: u32,
#[serde(default)]
pub symbol: String, pub symbol: String,
#[serde(default)]
pub name: String, pub name: String,
} }
fn number_format_from_i64<'de, D>(deserializer: D) -> Result<NumberFormat, D::Error>
where
D: Deserializer<'de>,
{
let value = i64::deserialize(deserializer)?;
Ok(NumberFormat::from(value))
}
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
pub struct NumberCellData(pub String); pub struct NumberCellData(pub String);
@ -42,15 +56,15 @@ impl TypeOptionCellData for NumberCellData {
impl From<&Cell> for NumberCellData { impl From<&Cell> for NumberCellData {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
Self(cell.get_str_value(CELL_DATA).unwrap_or_default()) Self(cell.get_as(CELL_DATA).unwrap_or_default())
} }
} }
impl From<NumberCellData> for Cell { impl From<NumberCellData> for Cell {
fn from(data: NumberCellData) -> Self { fn from(data: NumberCellData) -> Self {
new_cell_builder(FieldType::Number) let mut cell = new_cell_builder(FieldType::Number);
.insert_str_value(CELL_DATA, data.0) cell.insert(CELL_DATA.into(), data.0.into());
.build() cell
} }
} }
@ -75,30 +89,18 @@ impl TypeOption for NumberTypeOption {
impl From<TypeOptionData> for NumberTypeOption { impl From<TypeOptionData> for NumberTypeOption {
fn from(data: TypeOptionData) -> Self { fn from(data: TypeOptionData) -> Self {
let format = data from_any(&Any::from(data)).unwrap()
.get_i64_value("format")
.map(NumberFormat::from)
.unwrap_or_default();
let scale = data.get_i64_value("scale").unwrap_or_default() as u32;
let symbol = data.get_str_value("symbol").unwrap_or_default();
let name = data.get_str_value("name").unwrap_or_default();
Self {
format,
scale,
symbol,
name,
}
} }
} }
impl From<NumberTypeOption> for TypeOptionData { impl From<NumberTypeOption> for TypeOptionData {
fn from(data: NumberTypeOption) -> Self { fn from(data: NumberTypeOption) -> Self {
TypeOptionDataBuilder::new() TypeOptionDataBuilder::from([
.insert_i64_value("format", data.format.value()) ("format".into(), data.format.value().into()),
.insert_i64_value("scale", data.scale as i64) ("scale".into(), data.scale.into()),
.insert_str_value("name", data.name) ("name".into(), data.name.into()),
.insert_str_value("symbol", data.symbol) ("symbol".into(), data.symbol.into()),
.build() ])
} }
} }

View File

@ -1,6 +1,6 @@
use collab::util::AnyMapExt;
use std::cmp::Ordering; use std::cmp::Ordering;
use collab::core::any_map::AnyMapExtension;
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::Cell; use collab_database::rows::Cell;
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -23,16 +23,14 @@ pub struct RelationTypeOption {
impl From<TypeOptionData> for RelationTypeOption { impl From<TypeOptionData> for RelationTypeOption {
fn from(value: TypeOptionData) -> Self { fn from(value: TypeOptionData) -> Self {
let database_id = value.get_str_value("database_id").unwrap_or_default(); let database_id: String = value.get_as("database_id").unwrap_or_default();
Self { database_id } Self { database_id }
} }
} }
impl From<RelationTypeOption> for TypeOptionData { impl From<RelationTypeOption> for TypeOptionData {
fn from(value: RelationTypeOption) -> Self { fn from(value: RelationTypeOption) -> Self {
TypeOptionDataBuilder::new() TypeOptionDataBuilder::from([("database_id".into(), value.database_id.into())])
.insert_str_value("database_id", value.database_id)
.build()
} }
} }
@ -57,7 +55,7 @@ impl CellDataChangeset for RelationTypeOption {
return Ok(((&cell_data).into(), cell_data)); return Ok(((&cell_data).into(), cell_data));
} }
let cell_data: RelationCellData = cell.unwrap().as_ref().into(); let cell_data: RelationCellData = cell.as_ref().unwrap().into();
let mut row_ids = cell_data.row_ids.clone(); let mut row_ids = cell_data.row_ids.clone();
for inserted in changeset.inserted_row_ids.iter() { for inserted in changeset.inserted_row_ids.iter() {
if !row_ids.iter().any(|row_id| row_id == inserted) { if !row_ids.iter().any(|row_id| row_id == inserted) {

View File

@ -40,9 +40,9 @@ impl From<&RelationCellData> for Cell {
.map(|id| Any::String(Arc::from(id.to_string()))) .map(|id| Any::String(Arc::from(id.to_string())))
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
)); ));
new_cell_builder(FieldType::Relation) let mut cell = new_cell_builder(FieldType::Relation);
.insert_any(CELL_DATA, data) cell.insert(CELL_DATA.into(), data);
.build() cell
} }
} }

View File

@ -1,6 +1,6 @@
use collab::util::AnyMapExt;
use std::cmp::Ordering; use std::cmp::Ordering;
use collab::core::any_map::AnyMapExtension;
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::Cell; use collab_database::rows::Cell;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -33,8 +33,8 @@ impl TypeOption for MultiSelectTypeOption {
impl From<TypeOptionData> for MultiSelectTypeOption { impl From<TypeOptionData> for MultiSelectTypeOption {
fn from(data: TypeOptionData) -> Self { fn from(data: TypeOptionData) -> Self {
data data
.get_str_value("content") .get_as::<String>("content")
.map(|s| serde_json::from_str::<MultiSelectTypeOption>(&s).unwrap_or_default()) .map(|json| serde_json::from_str::<MultiSelectTypeOption>(&json).unwrap_or_default())
.unwrap_or_default() .unwrap_or_default()
} }
} }
@ -42,9 +42,7 @@ impl From<TypeOptionData> for MultiSelectTypeOption {
impl From<MultiSelectTypeOption> for TypeOptionData { impl From<MultiSelectTypeOption> for TypeOptionData {
fn from(data: MultiSelectTypeOption) -> Self { fn from(data: MultiSelectTypeOption) -> Self {
let content = serde_json::to_string(&data).unwrap_or_default(); let content = serde_json::to_string(&data).unwrap_or_default();
TypeOptionDataBuilder::new() TypeOptionDataBuilder::from([("content".into(), content.into())])
.insert_str_value("content", content)
.build()
} }
} }

View File

@ -1,6 +1,6 @@
use collab::util::AnyMapExt;
use std::str::FromStr; use std::str::FromStr;
use collab::core::any_map::AnyMapExtension;
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
use flowy_error::FlowyError; use flowy_error::FlowyError;
@ -26,9 +26,9 @@ impl SelectOptionIds {
self.0 self.0
} }
pub fn to_cell_data(&self, field_type: FieldType) -> Cell { pub fn to_cell_data(&self, field_type: FieldType) -> Cell {
new_cell_builder(field_type) let mut cell = new_cell_builder(field_type);
.insert_str_value(CELL_DATA, self.to_string()) cell.insert(CELL_DATA.into(), self.to_string().into());
.build() cell
} }
} }
@ -40,7 +40,7 @@ impl TypeOptionCellData for SelectOptionIds {
impl From<&Cell> for SelectOptionIds { impl From<&Cell> for SelectOptionIds {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
let value = cell.get_str_value(CELL_DATA).unwrap_or_default(); let value: String = cell.get_as(CELL_DATA).unwrap_or_default();
Self::from_str(&value).unwrap_or_default() Self::from_str(&value).unwrap_or_default()
} }
} }

View File

@ -8,7 +8,7 @@ use crate::services::field::{
SelectOptionCellChangeset, SelectOptionIds, SelectTypeOptionSharedAction, SelectOptionCellChangeset, SelectOptionIds, SelectTypeOptionSharedAction,
}; };
use crate::services::sort::SortCondition; use crate::services::sort::SortCondition;
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::Cell; use collab_database::rows::Cell;
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -32,7 +32,7 @@ impl TypeOption for SingleSelectTypeOption {
impl From<TypeOptionData> for SingleSelectTypeOption { impl From<TypeOptionData> for SingleSelectTypeOption {
fn from(data: TypeOptionData) -> Self { fn from(data: TypeOptionData) -> Self {
data data
.get_str_value("content") .get_as::<String>("content")
.map(|s| serde_json::from_str::<SingleSelectTypeOption>(&s).unwrap_or_default()) .map(|s| serde_json::from_str::<SingleSelectTypeOption>(&s).unwrap_or_default())
.unwrap_or_default() .unwrap_or_default()
} }
@ -41,9 +41,7 @@ impl From<TypeOptionData> for SingleSelectTypeOption {
impl From<SingleSelectTypeOption> for TypeOptionData { impl From<SingleSelectTypeOption> for TypeOptionData {
fn from(data: SingleSelectTypeOption) -> Self { fn from(data: SingleSelectTypeOption) -> Self {
let content = serde_json::to_string(&data).unwrap_or_default(); let content = serde_json::to_string(&data).unwrap_or_default();
TypeOptionDataBuilder::new() TypeOptionDataBuilder::from([("content".into(), content.into())])
.insert_str_value("content", content)
.build()
} }
} }

View File

@ -7,7 +7,7 @@ use crate::services::field::{
TypeOptionCellDataSerde, TypeOptionTransform, TypeOptionCellDataSerde, TypeOptionTransform,
}; };
use crate::services::sort::SortCondition; use crate::services::sort::SortCondition;
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::Cell; use collab_database::rows::Cell;
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -20,16 +20,14 @@ pub struct SummarizationTypeOption {
impl From<TypeOptionData> for SummarizationTypeOption { impl From<TypeOptionData> for SummarizationTypeOption {
fn from(value: TypeOptionData) -> Self { fn from(value: TypeOptionData) -> Self {
let auto_fill = value.get_bool_value("auto_fill").unwrap_or_default(); let auto_fill: bool = value.get_as("auto_fill").unwrap_or_default();
Self { auto_fill } Self { auto_fill }
} }
} }
impl From<SummarizationTypeOption> for TypeOptionData { impl From<SummarizationTypeOption> for TypeOptionData {
fn from(value: SummarizationTypeOption) -> Self { fn from(value: SummarizationTypeOption) -> Self {
TypeOptionDataBuilder::new() TypeOptionDataBuilder::from([("auto_fill".into(), value.auto_fill.into())])
.insert_bool_value("auto_fill", value.auto_fill)
.build()
} }
} }

View File

@ -1,6 +1,6 @@
use crate::entities::FieldType; use crate::entities::FieldType;
use crate::services::field::{TypeOptionCellData, CELL_DATA}; use crate::services::field::{TypeOptionCellData, CELL_DATA};
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
#[derive(Default, Debug, Clone)] #[derive(Default, Debug, Clone)]
@ -21,15 +21,15 @@ impl TypeOptionCellData for SummaryCellData {
impl From<&Cell> for SummaryCellData { impl From<&Cell> for SummaryCellData {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
Self(cell.get_str_value(CELL_DATA).unwrap_or_default()) Self(cell.get_as::<String>(CELL_DATA).unwrap_or_default())
} }
} }
impl From<SummaryCellData> for Cell { impl From<SummaryCellData> for Cell {
fn from(data: SummaryCellData) -> Self { fn from(data: SummaryCellData) -> Self {
new_cell_builder(FieldType::Summary) let mut cell = new_cell_builder(FieldType::Summary);
.insert_str_value(CELL_DATA, data.0) cell.insert(CELL_DATA.into(), data.0.into());
.build() cell
} }
} }

View File

@ -1,6 +1,6 @@
use collab::util::AnyMapExt;
use std::cmp::Ordering; use std::cmp::Ordering;
use collab::core::any_map::AnyMapExtension;
use collab_database::fields::{Field, TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{Field, TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -33,16 +33,15 @@ impl TypeOption for RichTextTypeOption {
impl From<TypeOptionData> for RichTextTypeOption { impl From<TypeOptionData> for RichTextTypeOption {
fn from(data: TypeOptionData) -> Self { fn from(data: TypeOptionData) -> Self {
let s = data.get_str_value(CELL_DATA).unwrap_or_default(); Self {
Self { inner: s } inner: data.get_as(CELL_DATA).unwrap_or_default(),
}
} }
} }
impl From<RichTextTypeOption> for TypeOptionData { impl From<RichTextTypeOption> for TypeOptionData {
fn from(data: RichTextTypeOption) -> Self { fn from(data: RichTextTypeOption) -> Self {
TypeOptionDataBuilder::new() TypeOptionDataBuilder::from([(CELL_DATA.into(), data.inner.into())])
.insert_str_value(CELL_DATA, data.inner)
.build()
} }
} }
@ -164,15 +163,15 @@ impl TypeOptionCellData for StringCellData {
impl From<&Cell> for StringCellData { impl From<&Cell> for StringCellData {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
Self(cell.get_str_value(CELL_DATA).unwrap_or_default()) Self(cell.get_as(CELL_DATA).unwrap_or_default())
} }
} }
impl From<StringCellData> for Cell { impl From<StringCellData> for Cell {
fn from(data: StringCellData) -> Self { fn from(data: StringCellData) -> Self {
new_cell_builder(FieldType::RichText) let mut cell = new_cell_builder(FieldType::RichText);
.insert_str_value(CELL_DATA, data.0) cell.insert(CELL_DATA.into(), data.0.into());
.build() cell
} }
} }

View File

@ -29,7 +29,7 @@ impl From<TypeOptionData> for TimeTypeOption {
impl From<TimeTypeOption> for TypeOptionData { impl From<TimeTypeOption> for TypeOptionData {
fn from(_data: TimeTypeOption) -> Self { fn from(_data: TimeTypeOption) -> Self {
TypeOptionDataBuilder::new().build() TypeOptionDataBuilder::new()
} }
} }

View File

@ -1,6 +1,6 @@
use crate::entities::FieldType; use crate::entities::FieldType;
use crate::services::field::{TypeOptionCellData, CELL_DATA}; use crate::services::field::{TypeOptionCellData, CELL_DATA};
use collab::core::any_map::AnyMapExtension; use collab::util::AnyMapExt;
use collab_database::rows::{new_cell_builder, Cell}; use collab_database::rows::{new_cell_builder, Cell};
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug, Default)]
@ -16,7 +16,7 @@ impl From<&Cell> for TimeCellData {
fn from(cell: &Cell) -> Self { fn from(cell: &Cell) -> Self {
Self( Self(
cell cell
.get_str_value(CELL_DATA) .get_as::<String>(CELL_DATA)
.and_then(|data| data.parse::<i64>().ok()), .and_then(|data| data.parse::<i64>().ok()),
) )
} }
@ -40,8 +40,8 @@ impl ToString for TimeCellData {
impl From<&TimeCellData> for Cell { impl From<&TimeCellData> for Cell {
fn from(data: &TimeCellData) -> Self { fn from(data: &TimeCellData) -> Self {
new_cell_builder(FieldType::Time) let mut cell = new_cell_builder(FieldType::Time);
.insert_str_value(CELL_DATA, data.to_string()) cell.insert(CELL_DATA.into(), data.to_string().into());
.build() cell
} }
} }

Some files were not shown because too many files have changed in this diff Show More