mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
move to latest appflowy collab version (#5894)
* chore: move to latest appflowy collab version * chore: filter mapping * chore: remove mutex folder * chore: cleanup borrow checker issues * chore: fixed flowy user crate compilation errors * chore: removed parking lot crate * chore: adjusting non locking approach * chore: remove with folder method * chore: fix folder manager * chore: fixed workspace database compilation errors * chore: initialize database plugins * chore: fix locks in flowy core * chore: remove supabase * chore: async traits * chore: add mutexes in dart ffi * chore: post rebase fixes * chore: remove supabase dart code * chore: fix deadlock * chore: fix page_id is empty * chore: use data source to init collab * chore: fix user awareness test * chore: fix database deadlock * fix: initialize user awareness * chore: fix open workspace test * chore: fix import csv * chore: fix update row meta deadlock * chore: fix document size test * fix: timestamp set/get type convert * fix: calculation * chore: revert Arc to Rc * chore: attach plugin to database and database row * chore: async get row * chore: clippy * chore: fix tauri build * chore: clippy * fix: duplicate view deadlock * chore: fmt * chore: tauri build --------- Co-authored-by: nathan <nathan@appflowy.io>
This commit is contained in:
parent
c2d7c5360d
commit
fd5299a13d
@ -1,93 +1,93 @@
|
||||
import 'package:appflowy/env/cloud_env.dart';
|
||||
import 'package:appflowy/workspace/application/settings/prelude.dart';
|
||||
import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart';
|
||||
import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart';
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
import 'package:integration_test/integration_test.dart';
|
||||
// import 'package:appflowy/env/cloud_env.dart';
|
||||
// import 'package:appflowy/workspace/application/settings/prelude.dart';
|
||||
// import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart';
|
||||
// import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart';
|
||||
// import 'package:flutter_test/flutter_test.dart';
|
||||
// import 'package:integration_test/integration_test.dart';
|
||||
|
||||
import '../shared/util.dart';
|
||||
// import '../shared/util.dart';
|
||||
|
||||
void main() {
|
||||
IntegrationTestWidgetsFlutterBinding.ensureInitialized();
|
||||
// void main() {
|
||||
// IntegrationTestWidgetsFlutterBinding.ensureInitialized();
|
||||
|
||||
group('supabase auth', () {
|
||||
testWidgets('sign in with supabase', (tester) async {
|
||||
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
await tester.tapGoogleLoginInButton();
|
||||
await tester.expectToSeeHomePageWithGetStartedPage();
|
||||
});
|
||||
// group('supabase auth', () {
|
||||
// testWidgets('sign in with supabase', (tester) async {
|
||||
// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
// await tester.tapGoogleLoginInButton();
|
||||
// await tester.expectToSeeHomePageWithGetStartedPage();
|
||||
// });
|
||||
|
||||
testWidgets('sign out with supabase', (tester) async {
|
||||
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
await tester.tapGoogleLoginInButton();
|
||||
// testWidgets('sign out with supabase', (tester) async {
|
||||
// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
// await tester.tapGoogleLoginInButton();
|
||||
|
||||
// Open the setting page and sign out
|
||||
await tester.openSettings();
|
||||
await tester.openSettingsPage(SettingsPage.account);
|
||||
await tester.logout();
|
||||
// // Open the setting page and sign out
|
||||
// await tester.openSettings();
|
||||
// await tester.openSettingsPage(SettingsPage.account);
|
||||
// await tester.logout();
|
||||
|
||||
// Go to the sign in page again
|
||||
await tester.pumpAndSettle(const Duration(seconds: 1));
|
||||
tester.expectToSeeGoogleLoginButton();
|
||||
});
|
||||
// // Go to the sign in page again
|
||||
// await tester.pumpAndSettle(const Duration(seconds: 1));
|
||||
// tester.expectToSeeGoogleLoginButton();
|
||||
// });
|
||||
|
||||
testWidgets('sign in as anonymous', (tester) async {
|
||||
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
await tester.tapSignInAsGuest();
|
||||
// testWidgets('sign in as anonymous', (tester) async {
|
||||
// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
// await tester.tapSignInAsGuest();
|
||||
|
||||
// should not see the sync setting page when sign in as anonymous
|
||||
await tester.openSettings();
|
||||
await tester.openSettingsPage(SettingsPage.account);
|
||||
// // should not see the sync setting page when sign in as anonymous
|
||||
// await tester.openSettings();
|
||||
// await tester.openSettingsPage(SettingsPage.account);
|
||||
|
||||
// Scroll to sign-out
|
||||
await tester.scrollUntilVisible(
|
||||
find.byType(SignInOutButton),
|
||||
100,
|
||||
scrollable: find.findSettingsScrollable(),
|
||||
);
|
||||
await tester.tapButton(find.byType(SignInOutButton));
|
||||
// // Scroll to sign-out
|
||||
// await tester.scrollUntilVisible(
|
||||
// find.byType(SignInOutButton),
|
||||
// 100,
|
||||
// scrollable: find.findSettingsScrollable(),
|
||||
// );
|
||||
// await tester.tapButton(find.byType(SignInOutButton));
|
||||
|
||||
tester.expectToSeeGoogleLoginButton();
|
||||
});
|
||||
// tester.expectToSeeGoogleLoginButton();
|
||||
// });
|
||||
|
||||
// testWidgets('enable encryption', (tester) async {
|
||||
// await tester.initializeAppFlowy(cloudType: CloudType.supabase);
|
||||
// await tester.tapGoogleLoginInButton();
|
||||
// // testWidgets('enable encryption', (tester) async {
|
||||
// // await tester.initializeAppFlowy(cloudType: CloudType.supabase);
|
||||
// // await tester.tapGoogleLoginInButton();
|
||||
|
||||
// // Open the setting page and sign out
|
||||
// await tester.openSettings();
|
||||
// await tester.openSettingsPage(SettingsPage.cloud);
|
||||
// // // Open the setting page and sign out
|
||||
// // await tester.openSettings();
|
||||
// // await tester.openSettingsPage(SettingsPage.cloud);
|
||||
|
||||
// // the switch should be off by default
|
||||
// tester.assertEnableEncryptSwitchValue(false);
|
||||
// await tester.toggleEnableEncrypt();
|
||||
// // // the switch should be off by default
|
||||
// // tester.assertEnableEncryptSwitchValue(false);
|
||||
// // await tester.toggleEnableEncrypt();
|
||||
|
||||
// // the switch should be on after toggling
|
||||
// tester.assertEnableEncryptSwitchValue(true);
|
||||
// // // the switch should be on after toggling
|
||||
// // tester.assertEnableEncryptSwitchValue(true);
|
||||
|
||||
// // the switch can not be toggled back to off
|
||||
// await tester.toggleEnableEncrypt();
|
||||
// tester.assertEnableEncryptSwitchValue(true);
|
||||
// });
|
||||
// // // the switch can not be toggled back to off
|
||||
// // await tester.toggleEnableEncrypt();
|
||||
// // tester.assertEnableEncryptSwitchValue(true);
|
||||
// // });
|
||||
|
||||
testWidgets('enable sync', (tester) async {
|
||||
await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
await tester.tapGoogleLoginInButton();
|
||||
// testWidgets('enable sync', (tester) async {
|
||||
// await tester.initializeAppFlowy(cloudType: AuthenticatorType.supabase);
|
||||
// await tester.tapGoogleLoginInButton();
|
||||
|
||||
// Open the setting page and sign out
|
||||
await tester.openSettings();
|
||||
await tester.openSettingsPage(SettingsPage.cloud);
|
||||
// // Open the setting page and sign out
|
||||
// await tester.openSettings();
|
||||
// await tester.openSettingsPage(SettingsPage.cloud);
|
||||
|
||||
// the switch should be on by default
|
||||
tester.assertSupabaseEnableSyncSwitchValue(true);
|
||||
await tester.toggleEnableSync(SupabaseEnableSync);
|
||||
// // the switch should be on by default
|
||||
// tester.assertSupabaseEnableSyncSwitchValue(true);
|
||||
// await tester.toggleEnableSync(SupabaseEnableSync);
|
||||
|
||||
// the switch should be off
|
||||
tester.assertSupabaseEnableSyncSwitchValue(false);
|
||||
// // the switch should be off
|
||||
// tester.assertSupabaseEnableSyncSwitchValue(false);
|
||||
|
||||
// the switch should be on after toggling
|
||||
await tester.toggleEnableSync(SupabaseEnableSync);
|
||||
tester.assertSupabaseEnableSyncSwitchValue(true);
|
||||
});
|
||||
});
|
||||
}
|
||||
// // the switch should be on after toggling
|
||||
// await tester.toggleEnableSync(SupabaseEnableSync);
|
||||
// tester.assertSupabaseEnableSyncSwitchValue(true);
|
||||
// });
|
||||
// });
|
||||
// }
|
||||
|
@ -2,7 +2,6 @@ import 'package:appflowy/generated/locale_keys.g.dart';
|
||||
import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart';
|
||||
import 'package:appflowy/workspace/presentation/settings/pages/settings_account_view.dart';
|
||||
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
|
||||
import 'package:appflowy/workspace/presentation/settings/widgets/setting_supabase_cloud.dart';
|
||||
import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
@ -52,26 +51,6 @@ extension AppFlowyAuthTest on WidgetTester {
|
||||
assert(isSwitched == value);
|
||||
}
|
||||
|
||||
void assertEnableEncryptSwitchValue(bool value) {
|
||||
assertSwitchValue(
|
||||
find.descendant(
|
||||
of: find.byType(EnableEncrypt),
|
||||
matching: find.byWidgetPredicate((widget) => widget is Switch),
|
||||
),
|
||||
value,
|
||||
);
|
||||
}
|
||||
|
||||
void assertSupabaseEnableSyncSwitchValue(bool value) {
|
||||
assertSwitchValue(
|
||||
find.descendant(
|
||||
of: find.byType(SupabaseEnableSync),
|
||||
matching: find.byWidgetPredicate((widget) => widget is Switch),
|
||||
),
|
||||
value,
|
||||
);
|
||||
}
|
||||
|
||||
void assertAppFlowyCloudEnableSyncSwitchValue(bool value) {
|
||||
assertToggleValue(
|
||||
find.descendant(
|
||||
@ -82,15 +61,6 @@ extension AppFlowyAuthTest on WidgetTester {
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> toggleEnableEncrypt() async {
|
||||
final finder = find.descendant(
|
||||
of: find.byType(EnableEncrypt),
|
||||
matching: find.byWidgetPredicate((widget) => widget is Switch),
|
||||
);
|
||||
|
||||
await tapButton(finder);
|
||||
}
|
||||
|
||||
Future<void> toggleEnableSync(Type syncButton) async {
|
||||
final finder = find.descendant(
|
||||
of: find.byType(syncButton),
|
||||
|
@ -7,7 +7,6 @@ import 'package:appflowy/startup/entry_point.dart';
|
||||
import 'package:appflowy/startup/startup.dart';
|
||||
import 'package:appflowy/user/application/auth/af_cloud_mock_auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/supabase_mock_auth_service.dart';
|
||||
import 'package:appflowy/user/presentation/presentation.dart';
|
||||
import 'package:appflowy/user/presentation/screens/sign_in_screen/widgets/widgets.dart';
|
||||
import 'package:appflowy/workspace/application/settings/prelude.dart';
|
||||
@ -55,8 +54,6 @@ extension AppFlowyTestBase on WidgetTester {
|
||||
switch (cloudType) {
|
||||
case AuthenticatorType.local:
|
||||
break;
|
||||
case AuthenticatorType.supabase:
|
||||
break;
|
||||
case AuthenticatorType.appflowyCloudSelfHost:
|
||||
rustEnvs["GOTRUE_ADMIN_EMAIL"] = "admin@example.com";
|
||||
rustEnvs["GOTRUE_ADMIN_PASSWORD"] = "password";
|
||||
@ -75,13 +72,6 @@ extension AppFlowyTestBase on WidgetTester {
|
||||
case AuthenticatorType.local:
|
||||
await useLocalServer();
|
||||
break;
|
||||
case AuthenticatorType.supabase:
|
||||
await useTestSupabaseCloud();
|
||||
getIt.unregister<AuthService>();
|
||||
getIt.registerFactory<AuthService>(
|
||||
() => SupabaseMockAuthService(),
|
||||
);
|
||||
break;
|
||||
case AuthenticatorType.appflowyCloudSelfHost:
|
||||
await useTestSelfHostedAppFlowyCloud();
|
||||
getIt.unregister<AuthService>();
|
||||
@ -242,13 +232,6 @@ extension AppFlowyFinderTestBase on CommonFinders {
|
||||
}
|
||||
}
|
||||
|
||||
Future<void> useTestSupabaseCloud() async {
|
||||
await useSupabaseCloud(
|
||||
url: TestEnv.supabaseUrl,
|
||||
anonKey: TestEnv.supabaseAnonKey,
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> useTestSelfHostedAppFlowyCloud() async {
|
||||
await useSelfHostedAppFlowyCloudWithURL(TestEnv.afCloudUrl);
|
||||
}
|
||||
|
@ -174,7 +174,7 @@ SPEC CHECKSUMS:
|
||||
file_picker: 09aa5ec1ab24135ccd7a1621c46c84134bfd6655
|
||||
flowy_infra_ui: 0455e1fa8c51885aa1437848e361e99419f34ebc
|
||||
Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7
|
||||
fluttertoast: e9a18c7be5413da53898f660530c56f35edfba9c
|
||||
fluttertoast: 723e187574b149e68e63ca4d39b837586b903cfa
|
||||
image_picker_ios: 99dfe1854b4fa34d0364e74a78448a0151025425
|
||||
integration_test: ce0a3ffa1de96d1a89ca0ac26fca7ea18a749ef4
|
||||
irondash_engine_context: 3458bf979b90d616ffb8ae03a150bafe2e860cc9
|
||||
@ -196,4 +196,4 @@ SPEC CHECKSUMS:
|
||||
|
||||
PODFILE CHECKSUM: d0d9b4ff572d8695c38eb3f9b490f55cdfc57eca
|
||||
|
||||
COCOAPODS: 1.15.2
|
||||
COCOAPODS: 1.11.3
|
||||
|
@ -13,7 +13,6 @@ class AppFlowyConfiguration {
|
||||
required this.device_id,
|
||||
required this.platform,
|
||||
required this.authenticator_type,
|
||||
required this.supabase_config,
|
||||
required this.appflowy_cloud_config,
|
||||
required this.envs,
|
||||
});
|
||||
@ -28,41 +27,12 @@ class AppFlowyConfiguration {
|
||||
final String device_id;
|
||||
final String platform;
|
||||
final int authenticator_type;
|
||||
final SupabaseConfiguration supabase_config;
|
||||
final AppFlowyCloudConfiguration appflowy_cloud_config;
|
||||
final Map<String, String> envs;
|
||||
|
||||
Map<String, dynamic> toJson() => _$AppFlowyConfigurationToJson(this);
|
||||
}
|
||||
|
||||
@JsonSerializable()
|
||||
class SupabaseConfiguration {
|
||||
SupabaseConfiguration({
|
||||
required this.url,
|
||||
required this.anon_key,
|
||||
});
|
||||
|
||||
factory SupabaseConfiguration.fromJson(Map<String, dynamic> json) =>
|
||||
_$SupabaseConfigurationFromJson(json);
|
||||
|
||||
/// Indicates whether the sync feature is enabled.
|
||||
final String url;
|
||||
final String anon_key;
|
||||
|
||||
Map<String, dynamic> toJson() => _$SupabaseConfigurationToJson(this);
|
||||
|
||||
static SupabaseConfiguration defaultConfig() {
|
||||
return SupabaseConfiguration(
|
||||
url: '',
|
||||
anon_key: '',
|
||||
);
|
||||
}
|
||||
|
||||
bool get isValid {
|
||||
return url.isNotEmpty && anon_key.isNotEmpty;
|
||||
}
|
||||
}
|
||||
|
||||
@JsonSerializable()
|
||||
class AppFlowyCloudConfiguration {
|
||||
AppFlowyCloudConfiguration({
|
||||
|
67
frontend/appflowy_flutter/lib/env/cloud_env.dart
vendored
67
frontend/appflowy_flutter/lib/env/cloud_env.dart
vendored
@ -21,9 +21,6 @@ Future<void> _setAuthenticatorType(AuthenticatorType ty) async {
|
||||
case AuthenticatorType.local:
|
||||
await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 0.toString());
|
||||
break;
|
||||
case AuthenticatorType.supabase:
|
||||
await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 1.toString());
|
||||
break;
|
||||
case AuthenticatorType.appflowyCloud:
|
||||
await getIt<KeyValueStorage>().set(KVKeys.kCloudType, 2.toString());
|
||||
break;
|
||||
@ -63,8 +60,6 @@ Future<AuthenticatorType> getAuthenticatorType() async {
|
||||
switch (value ?? "0") {
|
||||
case "0":
|
||||
return AuthenticatorType.local;
|
||||
case "1":
|
||||
return AuthenticatorType.supabase;
|
||||
case "2":
|
||||
return AuthenticatorType.appflowyCloud;
|
||||
case "3":
|
||||
@ -93,10 +88,6 @@ Future<AuthenticatorType> getAuthenticatorType() async {
|
||||
/// Returns `false` otherwise.
|
||||
bool get isAuthEnabled {
|
||||
final env = getIt<AppFlowyCloudSharedEnv>();
|
||||
if (env.authenticatorType == AuthenticatorType.supabase) {
|
||||
return env.supabaseConfig.isValid;
|
||||
}
|
||||
|
||||
if (env.authenticatorType.isAppFlowyCloudEnabled) {
|
||||
return env.appflowyCloudConfig.isValid;
|
||||
}
|
||||
@ -104,19 +95,6 @@ bool get isAuthEnabled {
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Checks if Supabase is enabled.
|
||||
///
|
||||
/// This getter evaluates if Supabase should be enabled based on the
|
||||
/// current integration mode and cloud type setting.
|
||||
///
|
||||
/// Returns:
|
||||
/// A boolean value indicating whether Supabase is enabled. It returns `true`
|
||||
/// if the application is in release or develop mode and the current cloud type
|
||||
/// is `CloudType.supabase`. Otherwise, it returns `false`.
|
||||
bool get isSupabaseEnabled {
|
||||
return currentCloudType().isSupabaseEnabled;
|
||||
}
|
||||
|
||||
/// Determines if AppFlowy Cloud is enabled.
|
||||
bool get isAppFlowyCloudEnabled {
|
||||
return currentCloudType().isAppFlowyCloudEnabled;
|
||||
@ -124,7 +102,6 @@ bool get isAppFlowyCloudEnabled {
|
||||
|
||||
enum AuthenticatorType {
|
||||
local,
|
||||
supabase,
|
||||
appflowyCloud,
|
||||
appflowyCloudSelfHost,
|
||||
// The 'appflowyCloudDevelop' type is used for develop purposes only.
|
||||
@ -137,14 +114,10 @@ enum AuthenticatorType {
|
||||
this == AuthenticatorType.appflowyCloudDevelop ||
|
||||
this == AuthenticatorType.appflowyCloud;
|
||||
|
||||
bool get isSupabaseEnabled => this == AuthenticatorType.supabase;
|
||||
|
||||
int get value {
|
||||
switch (this) {
|
||||
case AuthenticatorType.local:
|
||||
return 0;
|
||||
case AuthenticatorType.supabase:
|
||||
return 1;
|
||||
case AuthenticatorType.appflowyCloud:
|
||||
return 2;
|
||||
case AuthenticatorType.appflowyCloudSelfHost:
|
||||
@ -158,8 +131,6 @@ enum AuthenticatorType {
|
||||
switch (value) {
|
||||
case 0:
|
||||
return AuthenticatorType.local;
|
||||
case 1:
|
||||
return AuthenticatorType.supabase;
|
||||
case 2:
|
||||
return AuthenticatorType.appflowyCloud;
|
||||
case 3:
|
||||
@ -197,25 +168,15 @@ Future<void> useLocalServer() async {
|
||||
await _setAuthenticatorType(AuthenticatorType.local);
|
||||
}
|
||||
|
||||
Future<void> useSupabaseCloud({
|
||||
required String url,
|
||||
required String anonKey,
|
||||
}) async {
|
||||
await _setAuthenticatorType(AuthenticatorType.supabase);
|
||||
await setSupabaseServer(url, anonKey);
|
||||
}
|
||||
|
||||
/// Use getIt<AppFlowyCloudSharedEnv>() to get the shared environment.
|
||||
class AppFlowyCloudSharedEnv {
|
||||
AppFlowyCloudSharedEnv({
|
||||
required AuthenticatorType authenticatorType,
|
||||
required this.appflowyCloudConfig,
|
||||
required this.supabaseConfig,
|
||||
}) : _authenticatorType = authenticatorType;
|
||||
|
||||
final AuthenticatorType _authenticatorType;
|
||||
final AppFlowyCloudConfiguration appflowyCloudConfig;
|
||||
final SupabaseConfiguration supabaseConfig;
|
||||
|
||||
AuthenticatorType get authenticatorType => _authenticatorType;
|
||||
|
||||
@ -229,10 +190,6 @@ class AppFlowyCloudSharedEnv {
|
||||
? await getAppFlowyCloudConfig(authenticatorType)
|
||||
: AppFlowyCloudConfiguration.defaultConfig();
|
||||
|
||||
final supabaseCloudConfig = authenticatorType.isSupabaseEnabled
|
||||
? await getSupabaseCloudConfig()
|
||||
: SupabaseConfiguration.defaultConfig();
|
||||
|
||||
// In the backend, the value '2' represents the use of AppFlowy Cloud. However, in the frontend,
|
||||
// we distinguish between [AuthenticatorType.appflowyCloudSelfHost] and [AuthenticatorType.appflowyCloud].
|
||||
// When the cloud type is [AuthenticatorType.appflowyCloudSelfHost] in the frontend, it should be
|
||||
@ -244,7 +201,6 @@ class AppFlowyCloudSharedEnv {
|
||||
return AppFlowyCloudSharedEnv(
|
||||
authenticatorType: authenticatorType,
|
||||
appflowyCloudConfig: appflowyCloudConfig,
|
||||
supabaseConfig: supabaseCloudConfig,
|
||||
);
|
||||
} else {
|
||||
// Using the cloud settings from the .env file.
|
||||
@ -257,7 +213,6 @@ class AppFlowyCloudSharedEnv {
|
||||
return AppFlowyCloudSharedEnv(
|
||||
authenticatorType: AuthenticatorType.fromValue(Env.authenticatorType),
|
||||
appflowyCloudConfig: appflowyCloudConfig,
|
||||
supabaseConfig: SupabaseConfiguration.defaultConfig(),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -265,8 +220,7 @@ class AppFlowyCloudSharedEnv {
|
||||
@override
|
||||
String toString() {
|
||||
return 'authenticator: $_authenticatorType\n'
|
||||
'appflowy: ${appflowyCloudConfig.toJson()}\n'
|
||||
'supabase: ${supabaseConfig.toJson()})\n';
|
||||
'appflowy: ${appflowyCloudConfig.toJson()}\n';
|
||||
}
|
||||
}
|
||||
|
||||
@ -354,22 +308,3 @@ Future<void> setSupabaseServer(
|
||||
await getIt<KeyValueStorage>().set(KVKeys.kSupabaseAnonKey, anonKey);
|
||||
}
|
||||
}
|
||||
|
||||
Future<SupabaseConfiguration> getSupabaseCloudConfig() async {
|
||||
final url = await _getSupabaseUrl();
|
||||
final anonKey = await _getSupabaseAnonKey();
|
||||
return SupabaseConfiguration(
|
||||
url: url,
|
||||
anon_key: anonKey,
|
||||
);
|
||||
}
|
||||
|
||||
Future<String> _getSupabaseUrl() async {
|
||||
final result = await getIt<KeyValueStorage>().get(KVKeys.kSupabaseURL);
|
||||
return result ?? '';
|
||||
}
|
||||
|
||||
Future<String> _getSupabaseAnonKey() async {
|
||||
final result = await getIt<KeyValueStorage>().get(KVKeys.kSupabaseAnonKey);
|
||||
return result ?? '';
|
||||
}
|
||||
|
@ -37,6 +37,14 @@ class RowBackendService {
|
||||
return DatabaseEventCreateRow(payload).send();
|
||||
}
|
||||
|
||||
Future<FlowyResult<void, FlowyError>> initRow(RowId rowId) async {
|
||||
final payload = RowIdPB()
|
||||
..viewId = viewId
|
||||
..rowId = rowId;
|
||||
|
||||
return DatabaseEventInitRow(payload).send();
|
||||
}
|
||||
|
||||
Future<FlowyResult<RowMetaPB, FlowyError>> createRowBefore(RowId rowId) {
|
||||
return createRow(
|
||||
viewId: viewId,
|
||||
|
@ -23,6 +23,8 @@ class RowBloc extends Bloc<RowEvent, RowState> {
|
||||
}) : _rowBackendSvc = RowBackendService(viewId: viewId),
|
||||
_rowController = rowController,
|
||||
super(RowState.initial()) {
|
||||
_rowBackendSvc.initRow(rowId);
|
||||
|
||||
_dispatch();
|
||||
_startListening();
|
||||
_init();
|
||||
|
@ -12,7 +12,6 @@ import 'package:appflowy/startup/tasks/appflowy_cloud_task.dart';
|
||||
import 'package:appflowy/user/application/ai_service.dart';
|
||||
import 'package:appflowy/user/application/auth/af_cloud_auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/supabase_auth_service.dart';
|
||||
import 'package:appflowy/user/application/prelude.dart';
|
||||
import 'package:appflowy/user/application/reminder/reminder_bloc.dart';
|
||||
import 'package:appflowy/user/application/user_listener.dart';
|
||||
@ -124,9 +123,6 @@ void _resolveUserDeps(GetIt getIt, IntegrationMode mode) {
|
||||
),
|
||||
);
|
||||
break;
|
||||
case AuthenticatorType.supabase:
|
||||
getIt.registerFactory<AuthService>(() => SupabaseAuthService());
|
||||
break;
|
||||
case AuthenticatorType.appflowyCloud:
|
||||
case AuthenticatorType.appflowyCloudSelfHost:
|
||||
case AuthenticatorType.appflowyCloudDevelop:
|
||||
|
@ -133,7 +133,6 @@ class FlowyRunner {
|
||||
// It is unable to get the device information from the test environment.
|
||||
const ApplicationInfoTask(),
|
||||
const HotKeyTask(),
|
||||
if (isSupabaseEnabled) InitSupabaseTask(),
|
||||
if (isAppFlowyCloudEnabled) InitAppFlowyCloudTask(),
|
||||
const InitAppWidgetTask(),
|
||||
const InitPlatformServiceTask(),
|
||||
|
@ -7,7 +7,6 @@ import 'package:app_links/app_links.dart';
|
||||
import 'package:appflowy/env/cloud_env.dart';
|
||||
import 'package:appflowy/startup/startup.dart';
|
||||
import 'package:appflowy/startup/tasks/app_widget.dart';
|
||||
import 'package:appflowy/startup/tasks/supabase_task.dart';
|
||||
import 'package:appflowy/user/application/auth/auth_error.dart';
|
||||
import 'package:appflowy/user/application/auth/auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/device_id.dart';
|
||||
@ -22,6 +21,8 @@ import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
|
||||
import 'package:appflowy_result/appflowy_result.dart';
|
||||
import 'package:url_protocol/url_protocol.dart';
|
||||
|
||||
const appflowyDeepLinkSchema = 'appflowy-flutter';
|
||||
|
||||
class AppFlowyCloudDeepLink {
|
||||
AppFlowyCloudDeepLink() {
|
||||
if (_deeplinkSubscription == null) {
|
||||
|
@ -12,5 +12,4 @@ export 'platform_service.dart';
|
||||
export 'recent_service_task.dart';
|
||||
export 'rust_sdk.dart';
|
||||
export 'sentry.dart';
|
||||
export 'supabase_task.dart';
|
||||
export 'windows.dart';
|
||||
|
@ -63,7 +63,6 @@ AppFlowyConfiguration _makeAppFlowyConfiguration(
|
||||
device_id: deviceId,
|
||||
platform: Platform.operatingSystem,
|
||||
authenticator_type: env.authenticatorType.value,
|
||||
supabase_config: env.supabaseConfig,
|
||||
appflowy_cloud_config: env.appflowyCloudConfig,
|
||||
envs: rustEnvs,
|
||||
);
|
||||
|
@ -1,118 +0,0 @@
|
||||
import 'dart:async';
|
||||
import 'dart:io';
|
||||
|
||||
import 'package:appflowy/env/cloud_env.dart';
|
||||
import 'package:appflowy/user/application/supabase_realtime.dart';
|
||||
import 'package:appflowy/workspace/application/settings/application_data_storage.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:hive_flutter/hive_flutter.dart';
|
||||
import 'package:path/path.dart' as p;
|
||||
import 'package:supabase_flutter/supabase_flutter.dart';
|
||||
import 'package:url_protocol/url_protocol.dart';
|
||||
|
||||
import '../startup.dart';
|
||||
|
||||
// ONLY supports in macOS and Windows now.
|
||||
//
|
||||
// If you need to update the schema, please update the following files:
|
||||
// - appflowy_flutter/macos/Runner/Info.plist (macOS)
|
||||
// - the callback url in Supabase dashboard
|
||||
const appflowyDeepLinkSchema = 'appflowy-flutter';
|
||||
const supabaseLoginCallback = '$appflowyDeepLinkSchema://login-callback';
|
||||
|
||||
const hiveBoxName = 'appflowy_supabase_authentication';
|
||||
|
||||
// Used to store the session of the supabase in case of the user switch the different folder.
|
||||
Supabase? supabase;
|
||||
SupabaseRealtimeService? realtimeService;
|
||||
|
||||
class InitSupabaseTask extends LaunchTask {
|
||||
@override
|
||||
Future<void> initialize(LaunchContext context) async {
|
||||
if (!isSupabaseEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
await supabase?.dispose();
|
||||
supabase = null;
|
||||
final initializedSupabase = await Supabase.initialize(
|
||||
url: getIt<AppFlowyCloudSharedEnv>().supabaseConfig.url,
|
||||
anonKey: getIt<AppFlowyCloudSharedEnv>().supabaseConfig.anon_key,
|
||||
debug: kDebugMode,
|
||||
authOptions: const FlutterAuthClientOptions(
|
||||
localStorage: SupabaseLocalStorage(),
|
||||
),
|
||||
);
|
||||
|
||||
if (realtimeService != null) {
|
||||
await realtimeService?.dispose();
|
||||
realtimeService = null;
|
||||
}
|
||||
realtimeService = SupabaseRealtimeService(supabase: initializedSupabase);
|
||||
|
||||
supabase = initializedSupabase;
|
||||
|
||||
if (Platform.isWindows) {
|
||||
// register deep link for Windows
|
||||
registerProtocolHandler(appflowyDeepLinkSchema);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> dispose() async {
|
||||
await realtimeService?.dispose();
|
||||
realtimeService = null;
|
||||
await supabase?.dispose();
|
||||
supabase = null;
|
||||
}
|
||||
}
|
||||
|
||||
/// customize the supabase auth storage
|
||||
///
|
||||
/// We don't use the default one because it always save the session in the document directory.
|
||||
/// When we switch to the different folder, the session still exists.
|
||||
class SupabaseLocalStorage extends LocalStorage {
|
||||
const SupabaseLocalStorage();
|
||||
|
||||
@override
|
||||
Future<void> initialize() async {
|
||||
HiveCipher? encryptionCipher;
|
||||
|
||||
// customize the path for Hive
|
||||
final path = await getIt<ApplicationDataStorage>().getPath();
|
||||
Hive.init(p.join(path, 'supabase_auth'));
|
||||
await Hive.openBox(
|
||||
hiveBoxName,
|
||||
encryptionCipher: encryptionCipher,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<bool> hasAccessToken() {
|
||||
return Future.value(
|
||||
Hive.box(hiveBoxName).containsKey(
|
||||
supabasePersistSessionKey,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<String?> accessToken() {
|
||||
return Future.value(
|
||||
Hive.box(hiveBoxName).get(supabasePersistSessionKey) as String?,
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> removePersistedSession() {
|
||||
return Hive.box(hiveBoxName).delete(supabasePersistSessionKey);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> persistSession(String persistSessionString) {
|
||||
return Hive.box(hiveBoxName).put(
|
||||
supabasePersistSessionKey,
|
||||
persistSessionString,
|
||||
);
|
||||
}
|
||||
}
|
@ -20,7 +20,7 @@ class AppFlowyCloudMockAuthService implements AuthService {
|
||||
final String userEmail;
|
||||
|
||||
final BackendAuthService _appFlowyAuthService =
|
||||
BackendAuthService(AuthenticatorPB.Supabase);
|
||||
BackendAuthService(AuthenticatorPB.AppFlowyCloud);
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signUp({
|
||||
|
@ -1,252 +0,0 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:appflowy/startup/tasks/prelude.dart';
|
||||
import 'package:appflowy/user/application/auth/auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/backend_auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/device_id.dart';
|
||||
import 'package:appflowy/user/application/user_service.dart';
|
||||
import 'package:appflowy_backend/dispatch/dispatch.dart';
|
||||
import 'package:appflowy_backend/log.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
|
||||
import 'package:appflowy_result/appflowy_result.dart';
|
||||
import 'package:flutter/foundation.dart';
|
||||
import 'package:supabase_flutter/supabase_flutter.dart';
|
||||
|
||||
import 'auth_error.dart';
|
||||
|
||||
class SupabaseAuthService implements AuthService {
|
||||
SupabaseAuthService();
|
||||
|
||||
SupabaseClient get _client => Supabase.instance.client;
|
||||
GoTrueClient get _auth => _client.auth;
|
||||
|
||||
final BackendAuthService _backendAuthService = BackendAuthService(
|
||||
AuthenticatorPB.Supabase,
|
||||
);
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signUp({
|
||||
required String name,
|
||||
required String email,
|
||||
required String password,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
// fetch the uuid from supabase.
|
||||
final response = await _auth.signUp(
|
||||
email: email,
|
||||
password: password,
|
||||
);
|
||||
final uuid = response.user?.id;
|
||||
if (uuid == null) {
|
||||
return FlowyResult.failure(AuthError.supabaseSignUpError);
|
||||
}
|
||||
// assign the uuid to our backend service.
|
||||
// and will transfer this logic to backend later.
|
||||
return _backendAuthService.signUp(
|
||||
name: name,
|
||||
email: email,
|
||||
password: password,
|
||||
params: {
|
||||
AuthServiceMapKeys.uuid: uuid,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithEmailPassword({
|
||||
required String email,
|
||||
required String password,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
try {
|
||||
final response = await _auth.signInWithPassword(
|
||||
email: email,
|
||||
password: password,
|
||||
);
|
||||
final uuid = response.user?.id;
|
||||
if (uuid == null) {
|
||||
return FlowyResult.failure(AuthError.supabaseSignInError);
|
||||
}
|
||||
return _backendAuthService.signInWithEmailPassword(
|
||||
email: email,
|
||||
password: password,
|
||||
params: {
|
||||
AuthServiceMapKeys.uuid: uuid,
|
||||
},
|
||||
);
|
||||
} on AuthException catch (e) {
|
||||
Log.error(e);
|
||||
return FlowyResult.failure(AuthError.supabaseSignInError);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signUpWithOAuth({
|
||||
required String platform,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
// Before signing in, sign out any existing users. Otherwise, the callback will be triggered even if the user doesn't click the 'Sign In' button on the website
|
||||
if (_auth.currentUser != null) {
|
||||
await _auth.signOut();
|
||||
}
|
||||
|
||||
final provider = platform.toProvider();
|
||||
final completer = supabaseLoginCompleter(
|
||||
onSuccess: (userId, userEmail) async {
|
||||
return _setupAuth(
|
||||
map: {
|
||||
AuthServiceMapKeys.uuid: userId,
|
||||
AuthServiceMapKeys.email: userEmail,
|
||||
AuthServiceMapKeys.deviceId: await getDeviceId(),
|
||||
},
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
final response = await _auth.signInWithOAuth(
|
||||
provider,
|
||||
queryParams: queryParamsForProvider(provider),
|
||||
redirectTo: supabaseLoginCallback,
|
||||
);
|
||||
if (!response) {
|
||||
completer.complete(
|
||||
FlowyResult.failure(AuthError.supabaseSignInWithOauthError),
|
||||
);
|
||||
}
|
||||
return completer.future;
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> signOut() async {
|
||||
await _auth.signOut();
|
||||
await _backendAuthService.signOut();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signUpAsGuest({
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
// supabase don't support guest login.
|
||||
// so, just forward to our backend.
|
||||
return _backendAuthService.signUpAsGuest();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithMagicLink({
|
||||
required String email,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
final completer = supabaseLoginCompleter(
|
||||
onSuccess: (userId, userEmail) async {
|
||||
return _setupAuth(
|
||||
map: {
|
||||
AuthServiceMapKeys.uuid: userId,
|
||||
AuthServiceMapKeys.email: userEmail,
|
||||
AuthServiceMapKeys.deviceId: await getDeviceId(),
|
||||
},
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
await _auth.signInWithOtp(
|
||||
email: email,
|
||||
emailRedirectTo: kIsWeb ? null : supabaseLoginCallback,
|
||||
);
|
||||
return completer.future;
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> getUser() async {
|
||||
return UserBackendService.getCurrentUserProfile();
|
||||
}
|
||||
|
||||
Future<FlowyResult<User, FlowyError>> getSupabaseUser() async {
|
||||
final user = _auth.currentUser;
|
||||
if (user == null) {
|
||||
return FlowyResult.failure(AuthError.supabaseGetUserError);
|
||||
}
|
||||
return FlowyResult.success(user);
|
||||
}
|
||||
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> _setupAuth({
|
||||
required Map<String, String> map,
|
||||
}) async {
|
||||
final payload = OauthSignInPB(
|
||||
authenticator: AuthenticatorPB.Supabase,
|
||||
map: map,
|
||||
);
|
||||
|
||||
return UserEventOauthSignIn(payload).send().then((value) => value);
|
||||
}
|
||||
}
|
||||
|
||||
extension on String {
|
||||
OAuthProvider toProvider() {
|
||||
switch (this) {
|
||||
case 'github':
|
||||
return OAuthProvider.github;
|
||||
case 'google':
|
||||
return OAuthProvider.google;
|
||||
case 'discord':
|
||||
return OAuthProvider.discord;
|
||||
default:
|
||||
throw UnimplementedError();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a completer that listens to Supabase authentication state changes and
|
||||
/// completes when a user signs in.
|
||||
///
|
||||
/// This function sets up a listener on Supabase's authentication state. When a user
|
||||
/// signs in, it triggers the provided [onSuccess] callback with the user's `id` and
|
||||
/// `email`. Once the [onSuccess] callback is executed and a response is received,
|
||||
/// the completer completes with the response, and the listener is canceled.
|
||||
///
|
||||
/// Parameters:
|
||||
/// - [onSuccess]: A callback function that's executed when a user signs in. It
|
||||
/// should take in a user's `id` and `email` and return a `Future` containing either
|
||||
/// a `FlowyError` or a `UserProfilePB`.
|
||||
///
|
||||
/// Returns:
|
||||
/// A completer of type `FlowyResult<UserProfilePB, FlowyError>`. This completer completes
|
||||
/// with the response from the [onSuccess] callback when a user signs in.
|
||||
Completer<FlowyResult<UserProfilePB, FlowyError>> supabaseLoginCompleter({
|
||||
required Future<FlowyResult<UserProfilePB, FlowyError>> Function(
|
||||
String userId,
|
||||
String userEmail,
|
||||
) onSuccess,
|
||||
}) {
|
||||
final completer = Completer<FlowyResult<UserProfilePB, FlowyError>>();
|
||||
late final StreamSubscription<AuthState> subscription;
|
||||
final auth = Supabase.instance.client.auth;
|
||||
|
||||
subscription = auth.onAuthStateChange.listen((event) async {
|
||||
final user = event.session?.user;
|
||||
if (event.event == AuthChangeEvent.signedIn && user != null) {
|
||||
final response = await onSuccess(
|
||||
user.id,
|
||||
user.email ?? user.newEmail ?? '',
|
||||
);
|
||||
// Only cancel the subscription if the Event is signedIn.
|
||||
await subscription.cancel();
|
||||
completer.complete(response);
|
||||
}
|
||||
});
|
||||
return completer;
|
||||
}
|
||||
|
||||
Map<String, String> queryParamsForProvider(OAuthProvider provider) {
|
||||
switch (provider) {
|
||||
case OAuthProvider.google:
|
||||
return {
|
||||
'access_type': 'offline',
|
||||
'prompt': 'consent',
|
||||
};
|
||||
case OAuthProvider.github:
|
||||
case OAuthProvider.discord:
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
@ -1,113 +0,0 @@
|
||||
import 'dart:async';
|
||||
|
||||
import 'package:appflowy/user/application/auth/auth_service.dart';
|
||||
import 'package:appflowy/user/application/auth/backend_auth_service.dart';
|
||||
import 'package:appflowy/user/application/user_service.dart';
|
||||
import 'package:appflowy_backend/dispatch/dispatch.dart';
|
||||
import 'package:appflowy_backend/log.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
|
||||
import 'package:appflowy_result/appflowy_result.dart';
|
||||
import 'package:supabase_flutter/supabase_flutter.dart';
|
||||
|
||||
import 'auth_error.dart';
|
||||
|
||||
/// Only used for testing.
|
||||
class SupabaseMockAuthService implements AuthService {
|
||||
SupabaseMockAuthService();
|
||||
static OauthSignInPB? signInPayload;
|
||||
|
||||
SupabaseClient get _client => Supabase.instance.client;
|
||||
GoTrueClient get _auth => _client.auth;
|
||||
|
||||
final BackendAuthService _appFlowyAuthService =
|
||||
BackendAuthService(AuthenticatorPB.Supabase);
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signUp({
|
||||
required String name,
|
||||
required String email,
|
||||
required String password,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
throw UnimplementedError();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithEmailPassword({
|
||||
required String email,
|
||||
required String password,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
throw UnimplementedError();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signUpWithOAuth({
|
||||
required String platform,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
const password = "AppFlowyTest123!";
|
||||
const email = "supabase_integration_test@appflowy.io";
|
||||
try {
|
||||
if (_auth.currentSession == null) {
|
||||
try {
|
||||
await _auth.signInWithPassword(
|
||||
password: password,
|
||||
email: email,
|
||||
);
|
||||
} catch (e) {
|
||||
Log.error(e);
|
||||
return FlowyResult.failure(AuthError.supabaseSignUpError);
|
||||
}
|
||||
}
|
||||
// Check if the user is already logged in.
|
||||
final session = _auth.currentSession!;
|
||||
final uuid = session.user.id;
|
||||
|
||||
// Create the OAuth sign-in payload.
|
||||
final payload = OauthSignInPB(
|
||||
authenticator: AuthenticatorPB.Supabase,
|
||||
map: {
|
||||
AuthServiceMapKeys.uuid: uuid,
|
||||
AuthServiceMapKeys.email: email,
|
||||
AuthServiceMapKeys.deviceId: 'MockDeviceId',
|
||||
},
|
||||
);
|
||||
|
||||
// Send the sign-in event and handle the response.
|
||||
return UserEventOauthSignIn(payload).send().then((value) => value);
|
||||
} on AuthException catch (e) {
|
||||
Log.error(e);
|
||||
return FlowyResult.failure(AuthError.supabaseSignInError);
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<void> signOut() async {
|
||||
// await _auth.signOut();
|
||||
await _appFlowyAuthService.signOut();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signUpAsGuest({
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
// supabase don't support guest login.
|
||||
// so, just forward to our backend.
|
||||
return _appFlowyAuthService.signUpAsGuest();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> signInWithMagicLink({
|
||||
required String email,
|
||||
Map<String, String> params = const {},
|
||||
}) async {
|
||||
throw UnimplementedError();
|
||||
}
|
||||
|
||||
@override
|
||||
Future<FlowyResult<UserProfilePB, FlowyError>> getUser() async {
|
||||
return UserBackendService.getCurrentUserProfile();
|
||||
}
|
||||
}
|
@ -90,7 +90,6 @@ class SettingsDialogBloc
|
||||
]) async {
|
||||
if ([
|
||||
AuthenticatorPB.Local,
|
||||
AuthenticatorPB.Supabase,
|
||||
].contains(userProfile.authenticator)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -1,103 +0,0 @@
|
||||
import 'package:appflowy/env/backend_env.dart';
|
||||
import 'package:appflowy/env/cloud_env.dart';
|
||||
import 'package:appflowy/plugins/database/application/defines.dart';
|
||||
import 'package:appflowy/startup/startup.dart';
|
||||
import 'package:appflowy_backend/dispatch/dispatch.dart';
|
||||
import 'package:appflowy_backend/log.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
|
||||
import 'package:appflowy_result/appflowy_result.dart';
|
||||
import 'package:flutter_bloc/flutter_bloc.dart';
|
||||
import 'package:freezed_annotation/freezed_annotation.dart';
|
||||
|
||||
import 'cloud_setting_listener.dart';
|
||||
|
||||
part 'supabase_cloud_setting_bloc.freezed.dart';
|
||||
|
||||
class SupabaseCloudSettingBloc
|
||||
extends Bloc<SupabaseCloudSettingEvent, SupabaseCloudSettingState> {
|
||||
SupabaseCloudSettingBloc({
|
||||
required CloudSettingPB setting,
|
||||
}) : _listener = UserCloudConfigListener(),
|
||||
super(SupabaseCloudSettingState.initial(setting)) {
|
||||
_dispatch();
|
||||
}
|
||||
|
||||
final UserCloudConfigListener _listener;
|
||||
|
||||
@override
|
||||
Future<void> close() async {
|
||||
await _listener.stop();
|
||||
return super.close();
|
||||
}
|
||||
|
||||
void _dispatch() {
|
||||
on<SupabaseCloudSettingEvent>(
|
||||
(event, emit) async {
|
||||
await event.when(
|
||||
initial: () async {
|
||||
_listener.start(
|
||||
onSettingChanged: (result) {
|
||||
if (isClosed) {
|
||||
return;
|
||||
}
|
||||
result.fold(
|
||||
(setting) =>
|
||||
add(SupabaseCloudSettingEvent.didReceiveSetting(setting)),
|
||||
(error) => Log.error(error),
|
||||
);
|
||||
},
|
||||
);
|
||||
},
|
||||
enableSync: (bool enable) async {
|
||||
final update = UpdateCloudConfigPB.create()..enableSync = enable;
|
||||
await updateCloudConfig(update);
|
||||
},
|
||||
didReceiveSetting: (CloudSettingPB setting) {
|
||||
emit(
|
||||
state.copyWith(
|
||||
setting: setting,
|
||||
loadingState: LoadingState.finish(FlowyResult.success(null)),
|
||||
),
|
||||
);
|
||||
},
|
||||
enableEncrypt: (bool enable) {
|
||||
final update = UpdateCloudConfigPB.create()..enableEncrypt = enable;
|
||||
updateCloudConfig(update);
|
||||
emit(state.copyWith(loadingState: const LoadingState.loading()));
|
||||
},
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
Future<void> updateCloudConfig(UpdateCloudConfigPB setting) async {
|
||||
await UserEventSetCloudConfig(setting).send();
|
||||
}
|
||||
}
|
||||
|
||||
@freezed
|
||||
class SupabaseCloudSettingEvent with _$SupabaseCloudSettingEvent {
|
||||
const factory SupabaseCloudSettingEvent.initial() = _Initial;
|
||||
const factory SupabaseCloudSettingEvent.didReceiveSetting(
|
||||
CloudSettingPB setting,
|
||||
) = _DidSyncSupabaseConfig;
|
||||
const factory SupabaseCloudSettingEvent.enableSync(bool enable) = _EnableSync;
|
||||
const factory SupabaseCloudSettingEvent.enableEncrypt(bool enable) =
|
||||
_EnableEncrypt;
|
||||
}
|
||||
|
||||
@freezed
|
||||
class SupabaseCloudSettingState with _$SupabaseCloudSettingState {
|
||||
const factory SupabaseCloudSettingState({
|
||||
required LoadingState loadingState,
|
||||
required SupabaseConfiguration config,
|
||||
required CloudSettingPB setting,
|
||||
}) = _SupabaseCloudSettingState;
|
||||
|
||||
factory SupabaseCloudSettingState.initial(CloudSettingPB setting) =>
|
||||
SupabaseCloudSettingState(
|
||||
loadingState: LoadingState.finish(FlowyResult.success(null)),
|
||||
setting: setting,
|
||||
config: getIt<AppFlowyCloudSharedEnv>().supabaseConfig,
|
||||
);
|
||||
}
|
@ -1,128 +0,0 @@
|
||||
import 'package:appflowy/env/backend_env.dart';
|
||||
import 'package:appflowy/env/cloud_env.dart';
|
||||
import 'package:appflowy/generated/locale_keys.g.dart';
|
||||
import 'package:appflowy/startup/startup.dart';
|
||||
import 'package:appflowy_backend/dispatch/dispatch.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flutter_bloc/flutter_bloc.dart';
|
||||
import 'package:freezed_annotation/freezed_annotation.dart';
|
||||
|
||||
import 'appflowy_cloud_setting_bloc.dart';
|
||||
|
||||
part 'supabase_cloud_urls_bloc.freezed.dart';
|
||||
|
||||
class SupabaseCloudURLsBloc
|
||||
extends Bloc<SupabaseCloudURLsEvent, SupabaseCloudURLsState> {
|
||||
SupabaseCloudURLsBloc() : super(SupabaseCloudURLsState.initial()) {
|
||||
on<SupabaseCloudURLsEvent>((event, emit) async {
|
||||
await event.when(
|
||||
updateUrl: (String url) {
|
||||
emit(
|
||||
state.copyWith(
|
||||
updatedUrl: url,
|
||||
showRestartHint: url.isNotEmpty && state.upatedAnonKey.isNotEmpty,
|
||||
urlError: null,
|
||||
),
|
||||
);
|
||||
},
|
||||
updateAnonKey: (String anonKey) {
|
||||
emit(
|
||||
state.copyWith(
|
||||
upatedAnonKey: anonKey,
|
||||
showRestartHint:
|
||||
anonKey.isNotEmpty && state.updatedUrl.isNotEmpty,
|
||||
anonKeyError: null,
|
||||
),
|
||||
);
|
||||
},
|
||||
confirmUpdate: () async {
|
||||
if (state.updatedUrl.isEmpty) {
|
||||
emit(
|
||||
state.copyWith(
|
||||
urlError:
|
||||
LocaleKeys.settings_menu_cloudSupabaseUrlCanNotBeEmpty.tr(),
|
||||
anonKeyError: null,
|
||||
restartApp: false,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (state.upatedAnonKey.isEmpty) {
|
||||
emit(
|
||||
state.copyWith(
|
||||
urlError: null,
|
||||
anonKeyError: LocaleKeys
|
||||
.settings_menu_cloudSupabaseAnonKeyCanNotBeEmpty
|
||||
.tr(),
|
||||
restartApp: false,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
validateUrl(state.updatedUrl).fold(
|
||||
(_) async {
|
||||
await useSupabaseCloud(
|
||||
url: state.updatedUrl,
|
||||
anonKey: state.upatedAnonKey,
|
||||
);
|
||||
|
||||
add(const SupabaseCloudURLsEvent.didSaveConfig());
|
||||
},
|
||||
(error) => emit(state.copyWith(urlError: error)),
|
||||
);
|
||||
},
|
||||
didSaveConfig: () {
|
||||
emit(
|
||||
state.copyWith(
|
||||
urlError: null,
|
||||
anonKeyError: null,
|
||||
restartApp: true,
|
||||
),
|
||||
);
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
Future<void> updateCloudConfig(UpdateCloudConfigPB setting) async {
|
||||
await UserEventSetCloudConfig(setting).send();
|
||||
}
|
||||
}
|
||||
|
||||
@freezed
|
||||
class SupabaseCloudURLsEvent with _$SupabaseCloudURLsEvent {
|
||||
const factory SupabaseCloudURLsEvent.updateUrl(String text) = _UpdateUrl;
|
||||
const factory SupabaseCloudURLsEvent.updateAnonKey(String text) =
|
||||
_UpdateAnonKey;
|
||||
const factory SupabaseCloudURLsEvent.confirmUpdate() = _UpdateConfig;
|
||||
const factory SupabaseCloudURLsEvent.didSaveConfig() = _DidSaveConfig;
|
||||
}
|
||||
|
||||
@freezed
|
||||
class SupabaseCloudURLsState with _$SupabaseCloudURLsState {
|
||||
const factory SupabaseCloudURLsState({
|
||||
required SupabaseConfiguration config,
|
||||
required String updatedUrl,
|
||||
required String upatedAnonKey,
|
||||
required String? urlError,
|
||||
required String? anonKeyError,
|
||||
required bool restartApp,
|
||||
required bool showRestartHint,
|
||||
}) = _SupabaseCloudURLsState;
|
||||
|
||||
factory SupabaseCloudURLsState.initial() {
|
||||
final config = getIt<AppFlowyCloudSharedEnv>().supabaseConfig;
|
||||
return SupabaseCloudURLsState(
|
||||
updatedUrl: config.url,
|
||||
upatedAnonKey: config.anon_key,
|
||||
urlError: null,
|
||||
anonKeyError: null,
|
||||
restartApp: false,
|
||||
showRestartHint: config.url.isNotEmpty && config.anon_key.isNotEmpty,
|
||||
config: config,
|
||||
);
|
||||
}
|
||||
}
|
@ -22,7 +22,6 @@ import 'package:flutter_bloc/flutter_bloc.dart';
|
||||
import 'package:go_router/go_router.dart';
|
||||
|
||||
import 'setting_appflowy_cloud.dart';
|
||||
import 'setting_supabase_cloud.dart';
|
||||
|
||||
class SettingCloud extends StatelessWidget {
|
||||
const SettingCloud({required this.restartAppFlowy, super.key});
|
||||
@ -80,8 +79,6 @@ class SettingCloud extends StatelessWidget {
|
||||
switch (cloudType) {
|
||||
case AuthenticatorType.local:
|
||||
return SettingLocalCloud(restartAppFlowy: restartAppFlowy);
|
||||
case AuthenticatorType.supabase:
|
||||
return SettingSupabaseCloudView(restartAppFlowy: restartAppFlowy);
|
||||
case AuthenticatorType.appflowyCloud:
|
||||
return AppFlowyCloudViewSetting(restartAppFlowy: restartAppFlowy);
|
||||
case AuthenticatorType.appflowyCloudSelfHost:
|
||||
@ -112,9 +109,6 @@ class CloudTypeSwitcher extends StatelessWidget {
|
||||
// Only show the appflowyCloudDevelop in develop mode
|
||||
final values = AuthenticatorType.values.where((element) {
|
||||
// Supabase will going to be removed in the future
|
||||
if (element == AuthenticatorType.supabase) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return isDevelopMode || element != AuthenticatorType.appflowyCloudDevelop;
|
||||
}).toList();
|
||||
@ -218,8 +212,6 @@ String titleFromCloudType(AuthenticatorType cloudType) {
|
||||
switch (cloudType) {
|
||||
case AuthenticatorType.local:
|
||||
return LocaleKeys.settings_menu_cloudLocal.tr();
|
||||
case AuthenticatorType.supabase:
|
||||
return LocaleKeys.settings_menu_cloudSupabase.tr();
|
||||
case AuthenticatorType.appflowyCloud:
|
||||
return LocaleKeys.settings_menu_cloudAppFlowy.tr();
|
||||
case AuthenticatorType.appflowyCloudSelfHost:
|
||||
|
@ -1,339 +0,0 @@
|
||||
import 'package:flutter/gestures.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter/services.dart';
|
||||
|
||||
import 'package:appflowy/core/helpers/url_launcher.dart';
|
||||
import 'package:appflowy/generated/locale_keys.g.dart';
|
||||
import 'package:appflowy/workspace/application/settings/supabase_cloud_setting_bloc.dart';
|
||||
import 'package:appflowy/workspace/application/settings/supabase_cloud_urls_bloc.dart';
|
||||
import 'package:appflowy/workspace/presentation/home/toast.dart';
|
||||
import 'package:appflowy/workspace/presentation/settings/widgets/_restart_app_button.dart';
|
||||
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
|
||||
import 'package:appflowy_backend/dispatch/dispatch.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-user/user_setting.pb.dart';
|
||||
import 'package:appflowy_result/appflowy_result.dart';
|
||||
import 'package:easy_localization/easy_localization.dart';
|
||||
import 'package:flowy_infra/size.dart';
|
||||
import 'package:flowy_infra/theme_extension.dart';
|
||||
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
|
||||
import 'package:flowy_infra_ui/widget/error_page.dart';
|
||||
import 'package:flutter_bloc/flutter_bloc.dart';
|
||||
|
||||
class SettingSupabaseCloudView extends StatelessWidget {
|
||||
const SettingSupabaseCloudView({required this.restartAppFlowy, super.key});
|
||||
|
||||
final VoidCallback restartAppFlowy;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return FutureBuilder<FlowyResult<CloudSettingPB, FlowyError>>(
|
||||
future: UserEventGetCloudConfig().send(),
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.data != null &&
|
||||
snapshot.connectionState == ConnectionState.done) {
|
||||
return snapshot.data!.fold(
|
||||
(setting) {
|
||||
return BlocProvider(
|
||||
create: (context) => SupabaseCloudSettingBloc(
|
||||
setting: setting,
|
||||
)..add(const SupabaseCloudSettingEvent.initial()),
|
||||
child: Column(
|
||||
children: [
|
||||
BlocBuilder<SupabaseCloudSettingBloc,
|
||||
SupabaseCloudSettingState>(
|
||||
builder: (context, state) {
|
||||
return const Column(
|
||||
children: [
|
||||
SupabaseEnableSync(),
|
||||
EnableEncrypt(),
|
||||
],
|
||||
);
|
||||
},
|
||||
),
|
||||
const VSpace(40),
|
||||
const SupabaseSelfhostTip(),
|
||||
SupabaseCloudURLs(
|
||||
didUpdateUrls: restartAppFlowy,
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
},
|
||||
(err) {
|
||||
return FlowyErrorPage.message(err.toString(), howToFix: "");
|
||||
},
|
||||
);
|
||||
} else {
|
||||
return const Center(
|
||||
child: CircularProgressIndicator(),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class SupabaseCloudURLs extends StatelessWidget {
|
||||
const SupabaseCloudURLs({super.key, required this.didUpdateUrls});
|
||||
|
||||
final VoidCallback didUpdateUrls;
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return BlocProvider(
|
||||
create: (context) => SupabaseCloudURLsBloc(),
|
||||
child: BlocListener<SupabaseCloudURLsBloc, SupabaseCloudURLsState>(
|
||||
listener: (context, state) async {
|
||||
if (state.restartApp) {
|
||||
didUpdateUrls();
|
||||
}
|
||||
},
|
||||
child: BlocBuilder<SupabaseCloudURLsBloc, SupabaseCloudURLsState>(
|
||||
builder: (context, state) {
|
||||
return Column(
|
||||
children: [
|
||||
SupabaseInput(
|
||||
title: LocaleKeys.settings_menu_cloudSupabaseUrl.tr(),
|
||||
url: state.config.url,
|
||||
hint: LocaleKeys.settings_menu_cloudURLHint.tr(),
|
||||
onChanged: (text) {
|
||||
context
|
||||
.read<SupabaseCloudURLsBloc>()
|
||||
.add(SupabaseCloudURLsEvent.updateUrl(text));
|
||||
},
|
||||
error: state.urlError,
|
||||
),
|
||||
SupabaseInput(
|
||||
title: LocaleKeys.settings_menu_cloudSupabaseAnonKey.tr(),
|
||||
url: state.config.anon_key,
|
||||
hint: LocaleKeys.settings_menu_cloudURLHint.tr(),
|
||||
onChanged: (text) {
|
||||
context
|
||||
.read<SupabaseCloudURLsBloc>()
|
||||
.add(SupabaseCloudURLsEvent.updateAnonKey(text));
|
||||
},
|
||||
error: state.anonKeyError,
|
||||
),
|
||||
const VSpace(20),
|
||||
RestartButton(
|
||||
onClick: () => _restartApp(context),
|
||||
showRestartHint: state.showRestartHint,
|
||||
),
|
||||
],
|
||||
);
|
||||
},
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
void _restartApp(BuildContext context) {
|
||||
NavigatorAlertDialog(
|
||||
title: LocaleKeys.settings_menu_restartAppTip.tr(),
|
||||
confirm: () => context
|
||||
.read<SupabaseCloudURLsBloc>()
|
||||
.add(const SupabaseCloudURLsEvent.confirmUpdate()),
|
||||
).show(context);
|
||||
}
|
||||
}
|
||||
|
||||
class EnableEncrypt extends StatelessWidget {
|
||||
const EnableEncrypt({super.key});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return BlocBuilder<SupabaseCloudSettingBloc, SupabaseCloudSettingState>(
|
||||
builder: (context, state) {
|
||||
final indicator = state.loadingState.when(
|
||||
loading: () => const CircularProgressIndicator.adaptive(),
|
||||
finish: (successOrFail) => const SizedBox.shrink(),
|
||||
idle: () => const SizedBox.shrink(),
|
||||
);
|
||||
|
||||
return Column(
|
||||
children: [
|
||||
Row(
|
||||
children: [
|
||||
FlowyText.medium(LocaleKeys.settings_menu_enableEncrypt.tr()),
|
||||
const Spacer(),
|
||||
indicator,
|
||||
const HSpace(3),
|
||||
Switch.adaptive(
|
||||
activeColor: Theme.of(context).colorScheme.primary,
|
||||
onChanged: state.setting.enableEncrypt
|
||||
? null
|
||||
: (bool value) {
|
||||
context.read<SupabaseCloudSettingBloc>().add(
|
||||
SupabaseCloudSettingEvent.enableEncrypt(value),
|
||||
);
|
||||
},
|
||||
value: state.setting.enableEncrypt,
|
||||
),
|
||||
],
|
||||
),
|
||||
Column(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
IntrinsicHeight(
|
||||
child: Opacity(
|
||||
opacity: 0.6,
|
||||
child: FlowyText.medium(
|
||||
LocaleKeys.settings_menu_enableEncryptPrompt.tr(),
|
||||
maxLines: 13,
|
||||
),
|
||||
),
|
||||
),
|
||||
const VSpace(6),
|
||||
SizedBox(
|
||||
height: 40,
|
||||
child: FlowyTooltip(
|
||||
message: LocaleKeys.settings_menu_clickToCopySecret.tr(),
|
||||
child: FlowyButton(
|
||||
disable: !state.setting.enableEncrypt,
|
||||
decoration: BoxDecoration(
|
||||
borderRadius: Corners.s5Border,
|
||||
border: Border.all(
|
||||
color: Theme.of(context).colorScheme.secondary,
|
||||
),
|
||||
),
|
||||
text: FlowyText.medium(state.setting.encryptSecret),
|
||||
onTap: () async {
|
||||
await Clipboard.setData(
|
||||
ClipboardData(text: state.setting.encryptSecret),
|
||||
);
|
||||
showMessageToast(LocaleKeys.message_copy_success.tr());
|
||||
},
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class SupabaseEnableSync extends StatelessWidget {
|
||||
const SupabaseEnableSync({super.key});
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return BlocBuilder<SupabaseCloudSettingBloc, SupabaseCloudSettingState>(
|
||||
builder: (context, state) {
|
||||
return Row(
|
||||
children: [
|
||||
FlowyText.medium(LocaleKeys.settings_menu_enableSync.tr()),
|
||||
const Spacer(),
|
||||
Switch.adaptive(
|
||||
activeColor: Theme.of(context).colorScheme.primary,
|
||||
onChanged: (bool value) {
|
||||
context.read<SupabaseCloudSettingBloc>().add(
|
||||
SupabaseCloudSettingEvent.enableSync(value),
|
||||
);
|
||||
},
|
||||
value: state.setting.enableSync,
|
||||
),
|
||||
],
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@visibleForTesting
|
||||
class SupabaseInput extends StatefulWidget {
|
||||
const SupabaseInput({
|
||||
super.key,
|
||||
required this.title,
|
||||
required this.url,
|
||||
required this.hint,
|
||||
required this.error,
|
||||
required this.onChanged,
|
||||
});
|
||||
|
||||
final String title;
|
||||
final String url;
|
||||
final String hint;
|
||||
final String? error;
|
||||
final Function(String) onChanged;
|
||||
|
||||
@override
|
||||
SupabaseInputState createState() => SupabaseInputState();
|
||||
}
|
||||
|
||||
class SupabaseInputState extends State<SupabaseInput> {
|
||||
late final _controller = TextEditingController(text: widget.url);
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
_controller.dispose();
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return TextField(
|
||||
controller: _controller,
|
||||
style: const TextStyle(fontSize: 12.0),
|
||||
decoration: InputDecoration(
|
||||
contentPadding: const EdgeInsets.symmetric(vertical: 6),
|
||||
labelText: widget.title,
|
||||
labelStyle: Theme.of(context)
|
||||
.textTheme
|
||||
.titleMedium!
|
||||
.copyWith(fontWeight: FontWeight.w400, fontSize: 16),
|
||||
enabledBorder: UnderlineInputBorder(
|
||||
borderSide:
|
||||
BorderSide(color: AFThemeExtension.of(context).onBackground),
|
||||
),
|
||||
focusedBorder: UnderlineInputBorder(
|
||||
borderSide: BorderSide(color: Theme.of(context).colorScheme.primary),
|
||||
),
|
||||
hintText: widget.hint,
|
||||
errorText: widget.error,
|
||||
),
|
||||
onChanged: widget.onChanged,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class SupabaseSelfhostTip extends StatelessWidget {
|
||||
const SupabaseSelfhostTip({super.key});
|
||||
|
||||
final url =
|
||||
"https://docs.appflowy.io/docs/guides/appflowy/self-hosting-appflowy-using-supabase";
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Opacity(
|
||||
opacity: 0.6,
|
||||
child: RichText(
|
||||
text: TextSpan(
|
||||
children: <TextSpan>[
|
||||
TextSpan(
|
||||
text: LocaleKeys.settings_menu_selfHostStart.tr(),
|
||||
style: Theme.of(context).textTheme.bodySmall!,
|
||||
),
|
||||
TextSpan(
|
||||
text: " ${LocaleKeys.settings_menu_selfHostContent.tr()} ",
|
||||
style: Theme.of(context).textTheme.bodyMedium!.copyWith(
|
||||
fontSize: FontSizes.s14,
|
||||
color: Theme.of(context).colorScheme.primary,
|
||||
decoration: TextDecoration.underline,
|
||||
),
|
||||
recognizer: TapGestureRecognizer()
|
||||
..onTap = () => afLaunchUrlString(url),
|
||||
),
|
||||
TextSpan(
|
||||
text: LocaleKeys.settings_menu_selfHostEnd.tr(),
|
||||
style: Theme.of(context).textTheme.bodySmall!,
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
75
frontend/appflowy_tauri/src-tauri/Cargo.lock
generated
75
frontend/appflowy_tauri/src-tauri/Cargo.lock
generated
@ -172,7 +172,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||
[[package]]
|
||||
name = "app-error"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
@ -192,7 +192,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-ai-client"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -826,11 +826,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-api"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"again",
|
||||
"anyhow",
|
||||
"app-error",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"brotli",
|
||||
@ -876,7 +877,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-api-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"collab-entity",
|
||||
"collab-rt-entity",
|
||||
@ -888,7 +889,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-websocket"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
@ -962,15 +963,16 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"js-sys",
|
||||
"parking_lot 0.12.1",
|
||||
"lazy_static",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
@ -986,7 +988,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-database"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -995,11 +997,11 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-plugins",
|
||||
"dashmap 5.5.3",
|
||||
"futures",
|
||||
"getrandom 0.2.10",
|
||||
"js-sys",
|
||||
"lazy_static",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"rayon",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -1016,14 +1018,14 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-document"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"getrandom 0.2.10",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
@ -1036,7 +1038,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-entity"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -1055,14 +1057,15 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-folder"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"chrono",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"dashmap 5.5.3",
|
||||
"getrandom 0.2.10",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
@ -1077,13 +1080,17 @@ name = "collab-integrate"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"collab",
|
||||
"collab-database",
|
||||
"collab-document",
|
||||
"collab-entity",
|
||||
"collab-folder",
|
||||
"collab-plugins",
|
||||
"collab-user",
|
||||
"futures",
|
||||
"lib-infra",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@ -1093,7 +1100,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-plugins"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
@ -1109,7 +1116,6 @@ dependencies = [
|
||||
"indexed_db_futures",
|
||||
"js-sys",
|
||||
"lazy_static",
|
||||
"parking_lot 0.12.1",
|
||||
"rand 0.8.5",
|
||||
"rocksdb",
|
||||
"serde",
|
||||
@ -1132,7 +1138,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-rt-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
@ -1157,7 +1163,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-rt-protocol"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -1174,13 +1180,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-user"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"getrandom 0.2.10",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@ -1546,7 +1551,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
|
||||
[[package]]
|
||||
name = "database-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
@ -1972,6 +1977,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-local-ai",
|
||||
"appflowy-plugin",
|
||||
"arc-swap",
|
||||
"base64 0.21.5",
|
||||
"bytes",
|
||||
"dashmap 6.0.1",
|
||||
@ -1989,7 +1995,6 @@ dependencies = [
|
||||
"log",
|
||||
"md5",
|
||||
"notify",
|
||||
"parking_lot 0.12.1",
|
||||
"pin-project",
|
||||
"protobuf",
|
||||
"reqwest",
|
||||
@ -2072,6 +2077,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-local-ai",
|
||||
"arc-swap",
|
||||
"base64 0.21.5",
|
||||
"bytes",
|
||||
"client-api",
|
||||
@ -2079,6 +2085,7 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-integrate",
|
||||
"collab-plugins",
|
||||
"dashmap 6.0.1",
|
||||
"diesel",
|
||||
"flowy-ai",
|
||||
"flowy-ai-pub",
|
||||
@ -2105,7 +2112,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"lib-log",
|
||||
"parking_lot 0.12.1",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -2135,6 +2141,7 @@ name = "flowy-database2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-stream",
|
||||
"async-trait",
|
||||
"bytes",
|
||||
@ -2159,7 +2166,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"rayon",
|
||||
"rust_decimal",
|
||||
@ -2231,7 +2237,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"scraper 0.18.1",
|
||||
"serde",
|
||||
@ -2302,6 +2307,7 @@ dependencies = [
|
||||
name = "flowy-folder"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"chrono",
|
||||
@ -2323,7 +2329,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"regex",
|
||||
"serde",
|
||||
@ -2418,6 +2423,7 @@ name = "flowy-server"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"client-api",
|
||||
@ -2426,6 +2432,7 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-folder",
|
||||
"collab-plugins",
|
||||
"dashmap 6.0.1",
|
||||
"flowy-ai-pub",
|
||||
"flowy-database-pub",
|
||||
"flowy-document-pub",
|
||||
@ -2445,7 +2452,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"mime_guess",
|
||||
"parking_lot 0.12.1",
|
||||
"postgrest",
|
||||
"rand 0.8.5",
|
||||
"reqwest",
|
||||
@ -2481,7 +2487,6 @@ dependencies = [
|
||||
"diesel_derives",
|
||||
"diesel_migrations",
|
||||
"libsqlite3-sys",
|
||||
"parking_lot 0.12.1",
|
||||
"r2d2",
|
||||
"scheduled-thread-pool",
|
||||
"serde",
|
||||
@ -2539,6 +2544,7 @@ name = "flowy-user"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"base64 0.21.5",
|
||||
"bytes",
|
||||
"chrono",
|
||||
@ -2551,6 +2557,7 @@ dependencies = [
|
||||
"collab-integrate",
|
||||
"collab-plugins",
|
||||
"collab-user",
|
||||
"dashmap 6.0.1",
|
||||
"diesel",
|
||||
"diesel_derives",
|
||||
"fancy-regex 0.11.0",
|
||||
@ -2567,7 +2574,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"semver",
|
||||
"serde",
|
||||
@ -3068,7 +3074,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gotrue"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures-util",
|
||||
@ -3085,7 +3091,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gotrue-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
@ -3517,7 +3523,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "infra"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -3782,7 +3788,6 @@ dependencies = [
|
||||
"futures-util",
|
||||
"getrandom 0.2.10",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"pin-project",
|
||||
"protobuf",
|
||||
"serde",
|
||||
@ -6115,7 +6120,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "shared-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
|
@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
|
||||
# To switch to the local path, run:
|
||||
# scripts/tool/update_collab_source.sh
|
||||
# ⚠️⚠️⚠️️
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
|
||||
# Working directory: frontend
|
||||
# To update the commit ID, run:
|
||||
|
@ -1,9 +1,9 @@
|
||||
use dotenv::dotenv;
|
||||
use flowy_core::config::AppFlowyCoreConfig;
|
||||
use flowy_core::{AppFlowyCore, MutexAppFlowyCore, DEFAULT_NAME};
|
||||
use flowy_core::{AppFlowyCore, DEFAULT_NAME};
|
||||
use lib_dispatch::runtime::AFPluginRuntime;
|
||||
use std::rc::Rc;
|
||||
|
||||
use dotenv::dotenv;
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub fn read_env() {
|
||||
dotenv().ok();
|
||||
@ -25,7 +25,7 @@ pub fn read_env() {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_flowy_core() -> MutexAppFlowyCore {
|
||||
pub(crate) fn init_appflowy_core() -> MutexAppFlowyCore {
|
||||
let config_json = include_str!("../tauri.conf.json");
|
||||
let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap();
|
||||
|
||||
@ -67,3 +67,13 @@ pub fn init_flowy_core() -> MutexAppFlowyCore {
|
||||
MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await)
|
||||
})
|
||||
}
|
||||
|
||||
pub struct MutexAppFlowyCore(pub Rc<Mutex<AppFlowyCore>>);
|
||||
|
||||
impl MutexAppFlowyCore {
|
||||
fn new(appflowy_core: AppFlowyCore) -> Self {
|
||||
Self(Rc::new(Mutex::new(appflowy_core)))
|
||||
}
|
||||
}
|
||||
unsafe impl Sync for MutexAppFlowyCore {}
|
||||
unsafe impl Send for MutexAppFlowyCore {}
|
||||
|
@ -11,17 +11,18 @@ mod init;
|
||||
mod notification;
|
||||
mod request;
|
||||
|
||||
use crate::init::init_appflowy_core;
|
||||
use crate::request::invoke_request;
|
||||
use flowy_notification::{register_notification_sender, unregister_all_notification_sender};
|
||||
use init::*;
|
||||
use notification::*;
|
||||
use request::*;
|
||||
use tauri::Manager;
|
||||
|
||||
extern crate dotenv;
|
||||
|
||||
fn main() {
|
||||
tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME);
|
||||
|
||||
let flowy_core = init_flowy_core();
|
||||
let flowy_core = init_appflowy_core();
|
||||
tauri::Builder::default()
|
||||
.invoke_handler(tauri::generate_handler![invoke_request])
|
||||
.manage(flowy_core)
|
||||
|
@ -1,4 +1,4 @@
|
||||
use flowy_core::;
|
||||
use crate::init::MutexAppFlowyCore;
|
||||
use lib_dispatch::prelude::{
|
||||
AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode,
|
||||
};
|
||||
@ -39,7 +39,7 @@ pub async fn invoke_request(
|
||||
) -> AFTauriResponse {
|
||||
let request: AFPluginRequest = request.into();
|
||||
let state: State<MutexAppFlowyCore> = app_handler.state();
|
||||
let dispatcher = state.0.lock().dispatcher();
|
||||
let dispatcher = state.0.lock().unwrap().dispatcher();
|
||||
let response = AFPluginDispatcher::sync_send(dispatcher, request);
|
||||
response.into()
|
||||
}
|
||||
|
75
frontend/appflowy_web_app/src-tauri/Cargo.lock
generated
75
frontend/appflowy_web_app/src-tauri/Cargo.lock
generated
@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||
[[package]]
|
||||
name = "app-error"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
@ -183,7 +183,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-ai-client"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -800,11 +800,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-api"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"again",
|
||||
"anyhow",
|
||||
"app-error",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"brotli",
|
||||
@ -850,7 +851,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-api-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"collab-entity",
|
||||
"collab-rt-entity",
|
||||
@ -862,7 +863,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-websocket"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
@ -945,15 +946,16 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"js-sys",
|
||||
"parking_lot 0.12.1",
|
||||
"lazy_static",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
@ -969,7 +971,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-database"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -978,11 +980,11 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-plugins",
|
||||
"dashmap 5.5.3",
|
||||
"futures",
|
||||
"getrandom 0.2.12",
|
||||
"js-sys",
|
||||
"lazy_static",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"rayon",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -999,14 +1001,14 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-document"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"getrandom 0.2.12",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
@ -1019,7 +1021,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-entity"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -1038,14 +1040,15 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-folder"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"chrono",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"dashmap 5.5.3",
|
||||
"getrandom 0.2.12",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
@ -1060,13 +1063,17 @@ name = "collab-integrate"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"collab",
|
||||
"collab-database",
|
||||
"collab-document",
|
||||
"collab-entity",
|
||||
"collab-folder",
|
||||
"collab-plugins",
|
||||
"collab-user",
|
||||
"futures",
|
||||
"lib-infra",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@ -1076,7 +1083,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-plugins"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
@ -1092,7 +1099,6 @@ dependencies = [
|
||||
"indexed_db_futures",
|
||||
"js-sys",
|
||||
"lazy_static",
|
||||
"parking_lot 0.12.1",
|
||||
"rand 0.8.5",
|
||||
"rocksdb",
|
||||
"serde",
|
||||
@ -1115,7 +1121,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-rt-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
@ -1140,7 +1146,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-rt-protocol"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -1157,13 +1163,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-user"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"getrandom 0.2.12",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@ -1536,7 +1541,7 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
|
||||
[[package]]
|
||||
name = "database-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
@ -2002,6 +2007,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-local-ai",
|
||||
"appflowy-plugin",
|
||||
"arc-swap",
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
"dashmap 6.0.1",
|
||||
@ -2019,7 +2025,6 @@ dependencies = [
|
||||
"log",
|
||||
"md5",
|
||||
"notify",
|
||||
"parking_lot 0.12.1",
|
||||
"pin-project",
|
||||
"protobuf",
|
||||
"reqwest",
|
||||
@ -2102,6 +2107,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-local-ai",
|
||||
"arc-swap",
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
"client-api",
|
||||
@ -2109,6 +2115,7 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-integrate",
|
||||
"collab-plugins",
|
||||
"dashmap 6.0.1",
|
||||
"diesel",
|
||||
"flowy-ai",
|
||||
"flowy-ai-pub",
|
||||
@ -2135,7 +2142,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"lib-log",
|
||||
"parking_lot 0.12.1",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -2165,6 +2171,7 @@ name = "flowy-database2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-stream",
|
||||
"async-trait",
|
||||
"bytes",
|
||||
@ -2189,7 +2196,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"rayon",
|
||||
"rust_decimal",
|
||||
@ -2261,7 +2267,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"scraper 0.18.1",
|
||||
"serde",
|
||||
@ -2332,6 +2337,7 @@ dependencies = [
|
||||
name = "flowy-folder"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"chrono",
|
||||
@ -2353,7 +2359,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"regex",
|
||||
"serde",
|
||||
@ -2448,6 +2453,7 @@ name = "flowy-server"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"client-api",
|
||||
@ -2456,6 +2462,7 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-folder",
|
||||
"collab-plugins",
|
||||
"dashmap 6.0.1",
|
||||
"flowy-ai-pub",
|
||||
"flowy-database-pub",
|
||||
"flowy-document-pub",
|
||||
@ -2475,7 +2482,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"mime_guess",
|
||||
"parking_lot 0.12.1",
|
||||
"postgrest",
|
||||
"rand 0.8.5",
|
||||
"reqwest",
|
||||
@ -2511,7 +2517,6 @@ dependencies = [
|
||||
"diesel_derives",
|
||||
"diesel_migrations",
|
||||
"libsqlite3-sys",
|
||||
"parking_lot 0.12.1",
|
||||
"r2d2",
|
||||
"scheduled-thread-pool",
|
||||
"serde",
|
||||
@ -2569,6 +2574,7 @@ name = "flowy-user"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"base64 0.21.7",
|
||||
"bytes",
|
||||
"chrono",
|
||||
@ -2581,6 +2587,7 @@ dependencies = [
|
||||
"collab-integrate",
|
||||
"collab-plugins",
|
||||
"collab-user",
|
||||
"dashmap 6.0.1",
|
||||
"diesel",
|
||||
"diesel_derives",
|
||||
"fancy-regex 0.11.0",
|
||||
@ -2597,7 +2604,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"semver",
|
||||
"serde",
|
||||
@ -3135,7 +3141,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gotrue"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures-util",
|
||||
@ -3152,7 +3158,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gotrue-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
@ -3589,7 +3595,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "infra"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -3859,7 +3865,6 @@ dependencies = [
|
||||
"futures-util",
|
||||
"getrandom 0.2.12",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"pin-project",
|
||||
"protobuf",
|
||||
"serde",
|
||||
@ -6179,7 +6184,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "shared-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
|
@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
|
||||
# To switch to the local path, run:
|
||||
# scripts/tool/update_collab_source.sh
|
||||
# ⚠️⚠️⚠️️
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
|
||||
# Working directory: frontend
|
||||
# To update the commit ID, run:
|
||||
|
@ -1,9 +1,9 @@
|
||||
use dotenv::dotenv;
|
||||
use flowy_core::config::AppFlowyCoreConfig;
|
||||
use flowy_core::{AppFlowyCore, MutexAppFlowyCore, DEFAULT_NAME};
|
||||
use flowy_core::{AppFlowyCore, DEFAULT_NAME};
|
||||
use lib_dispatch::runtime::AFPluginRuntime;
|
||||
use std::rc::Rc;
|
||||
|
||||
use dotenv::dotenv;
|
||||
use std::sync::Mutex;
|
||||
|
||||
pub fn read_env() {
|
||||
dotenv().ok();
|
||||
@ -25,7 +25,7 @@ pub fn read_env() {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_flowy_core() -> MutexAppFlowyCore {
|
||||
pub fn init_appflowy_core() -> MutexAppFlowyCore {
|
||||
let config_json = include_str!("../tauri.conf.json");
|
||||
let config: tauri_utils::config::Config = serde_json::from_str(config_json).unwrap();
|
||||
|
||||
@ -67,3 +67,13 @@ pub fn init_flowy_core() -> MutexAppFlowyCore {
|
||||
MutexAppFlowyCore::new(AppFlowyCore::new(config, cloned_runtime, None).await)
|
||||
})
|
||||
}
|
||||
|
||||
pub struct MutexAppFlowyCore(pub Rc<Mutex<AppFlowyCore>>);
|
||||
|
||||
impl MutexAppFlowyCore {
|
||||
pub(crate) fn new(appflowy_core: AppFlowyCore) -> Self {
|
||||
Self(Rc::new(Mutex::new(appflowy_core)))
|
||||
}
|
||||
}
|
||||
unsafe impl Sync for MutexAppFlowyCore {}
|
||||
unsafe impl Send for MutexAppFlowyCore {}
|
||||
|
@ -21,7 +21,7 @@ extern crate dotenv;
|
||||
fn main() {
|
||||
tauri_plugin_deep_link::prepare(DEEP_LINK_SCHEME);
|
||||
|
||||
let flowy_core = init_flowy_core();
|
||||
let flowy_core = init_appflowy_core();
|
||||
tauri::Builder::default()
|
||||
.invoke_handler(tauri::generate_handler![invoke_request])
|
||||
.manage(flowy_core)
|
||||
|
@ -1,4 +1,4 @@
|
||||
use flowy_core::MutexAppFlowyCore;
|
||||
use crate::init::MutexAppFlowyCore;
|
||||
use lib_dispatch::prelude::{
|
||||
AFPluginDispatcher, AFPluginEventResponse, AFPluginRequest, StatusCode,
|
||||
};
|
||||
@ -39,7 +39,7 @@ pub async fn invoke_request(
|
||||
) -> AFTauriResponse {
|
||||
let request: AFPluginRequest = request.into();
|
||||
let state: State<MutexAppFlowyCore> = app_handler.state();
|
||||
let dispatcher = state.0.lock().dispatcher();
|
||||
let dispatcher = state.0.lock().unwrap().dispatcher();
|
||||
let response = AFPluginDispatcher::sync_send(dispatcher, request);
|
||||
response.into()
|
||||
}
|
||||
|
77
frontend/rust-lib/Cargo.lock
generated
77
frontend/rust-lib/Cargo.lock
generated
@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
|
||||
[[package]]
|
||||
name = "app-error"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
@ -183,7 +183,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-ai-client"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -718,11 +718,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-api"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"again",
|
||||
"anyhow",
|
||||
"app-error",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"brotli",
|
||||
@ -768,7 +769,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-api-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"collab-entity",
|
||||
"collab-rt-entity",
|
||||
@ -780,7 +781,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "client-websocket"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
@ -823,15 +824,16 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bincode",
|
||||
"bytes",
|
||||
"chrono",
|
||||
"js-sys",
|
||||
"parking_lot 0.12.1",
|
||||
"lazy_static",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
@ -847,7 +849,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-database"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -856,11 +858,11 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-plugins",
|
||||
"dashmap 5.5.3",
|
||||
"futures",
|
||||
"getrandom 0.2.10",
|
||||
"js-sys",
|
||||
"lazy_static",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"rayon",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -877,14 +879,14 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-document"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"getrandom 0.2.10",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror",
|
||||
@ -897,7 +899,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-entity"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -916,14 +918,15 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-folder"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"chrono",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"dashmap 5.5.3",
|
||||
"getrandom 0.2.10",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
@ -938,13 +941,17 @@ name = "collab-integrate"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"collab",
|
||||
"collab-database",
|
||||
"collab-document",
|
||||
"collab-entity",
|
||||
"collab-folder",
|
||||
"collab-plugins",
|
||||
"collab-user",
|
||||
"futures",
|
||||
"lib-infra",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@ -954,7 +961,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-plugins"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
@ -970,7 +977,6 @@ dependencies = [
|
||||
"indexed_db_futures",
|
||||
"js-sys",
|
||||
"lazy_static",
|
||||
"parking_lot 0.12.1",
|
||||
"rand 0.8.5",
|
||||
"rocksdb",
|
||||
"serde",
|
||||
@ -993,7 +999,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-rt-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
@ -1018,7 +1024,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-rt-protocol"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -1035,13 +1041,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-user"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6adf750#6adf750dcb7a3f74806b8ffe8c7865bc9d5f85db"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d03bd474e551ab5583780abe051a85b8063e6aa9#d03bd474e551ab5583780abe051a85b8063e6aa9"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collab",
|
||||
"collab-entity",
|
||||
"getrandom 0.2.10",
|
||||
"parking_lot 0.12.1",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
@ -1323,7 +1328,6 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"lib-dispatch",
|
||||
"lib-log",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"semver",
|
||||
"serde",
|
||||
@ -1370,7 +1374,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
|
||||
[[package]]
|
||||
name = "database-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
@ -1662,7 +1666,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"rand 0.8.5",
|
||||
"semver",
|
||||
@ -1795,6 +1798,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-local-ai",
|
||||
"appflowy-plugin",
|
||||
"arc-swap",
|
||||
"base64 0.21.5",
|
||||
"bytes",
|
||||
"dashmap 6.0.1",
|
||||
@ -1813,7 +1817,6 @@ dependencies = [
|
||||
"log",
|
||||
"md5",
|
||||
"notify",
|
||||
"parking_lot 0.12.1",
|
||||
"pin-project",
|
||||
"protobuf",
|
||||
"reqwest",
|
||||
@ -1898,6 +1901,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-local-ai",
|
||||
"arc-swap",
|
||||
"base64 0.21.5",
|
||||
"bytes",
|
||||
"client-api",
|
||||
@ -1906,6 +1910,7 @@ dependencies = [
|
||||
"collab-integrate",
|
||||
"collab-plugins",
|
||||
"console-subscriber",
|
||||
"dashmap 6.0.1",
|
||||
"diesel",
|
||||
"flowy-ai",
|
||||
"flowy-ai-pub",
|
||||
@ -1932,7 +1937,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"lib-log",
|
||||
"parking_lot 0.12.1",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -1962,6 +1966,7 @@ name = "flowy-database2"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-stream",
|
||||
"async-trait",
|
||||
"bytes",
|
||||
@ -1987,7 +1992,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"rayon",
|
||||
"rust_decimal",
|
||||
@ -2059,7 +2063,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"scraper 0.18.1",
|
||||
"serde",
|
||||
@ -2132,6 +2135,7 @@ dependencies = [
|
||||
name = "flowy-folder"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"chrono",
|
||||
@ -2153,7 +2157,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"regex",
|
||||
"serde",
|
||||
@ -2249,6 +2252,7 @@ name = "flowy-server"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"assert-json-diff",
|
||||
"bytes",
|
||||
"chrono",
|
||||
@ -2258,6 +2262,7 @@ dependencies = [
|
||||
"collab-entity",
|
||||
"collab-folder",
|
||||
"collab-plugins",
|
||||
"dashmap 6.0.1",
|
||||
"dotenv",
|
||||
"flowy-ai-pub",
|
||||
"flowy-database-pub",
|
||||
@ -2278,7 +2283,6 @@ dependencies = [
|
||||
"lib-dispatch",
|
||||
"lib-infra",
|
||||
"mime_guess",
|
||||
"parking_lot 0.12.1",
|
||||
"postgrest",
|
||||
"rand 0.8.5",
|
||||
"reqwest",
|
||||
@ -2317,7 +2321,6 @@ dependencies = [
|
||||
"libsqlite3-sys",
|
||||
"openssl",
|
||||
"openssl-sys",
|
||||
"parking_lot 0.12.1",
|
||||
"r2d2",
|
||||
"scheduled-thread-pool",
|
||||
"serde",
|
||||
@ -2378,6 +2381,7 @@ name = "flowy-user"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"base64 0.21.5",
|
||||
"bytes",
|
||||
"chrono",
|
||||
@ -2390,6 +2394,7 @@ dependencies = [
|
||||
"collab-integrate",
|
||||
"collab-plugins",
|
||||
"collab-user",
|
||||
"dashmap 6.0.1",
|
||||
"diesel",
|
||||
"diesel_derives",
|
||||
"fake",
|
||||
@ -2408,7 +2413,6 @@ dependencies = [
|
||||
"lib-infra",
|
||||
"nanoid",
|
||||
"once_cell",
|
||||
"parking_lot 0.12.1",
|
||||
"protobuf",
|
||||
"quickcheck",
|
||||
"quickcheck_macros",
|
||||
@ -2747,7 +2751,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gotrue"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures-util",
|
||||
@ -2764,7 +2768,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gotrue-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
@ -3129,7 +3133,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "infra"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -3295,7 +3299,6 @@ dependencies = [
|
||||
"futures-util",
|
||||
"getrandom 0.2.10",
|
||||
"nanoid",
|
||||
"parking_lot 0.12.1",
|
||||
"pin-project",
|
||||
"protobuf",
|
||||
"serde",
|
||||
@ -5338,7 +5341,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "shared-entity"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=7878a018a18553e3d8201e572a0c066c14ba3b35#7878a018a18553e3d8201e572a0c066c14ba3b35"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=d503905#d5039059313804103f34eee49ee9844c255a99c0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"app-error",
|
||||
|
@ -68,6 +68,7 @@ flowy-date = { workspace = true, path = "flowy-date" }
|
||||
flowy-ai = { workspace = true, path = "flowy-ai" }
|
||||
flowy-ai-pub = { workspace = true, path = "flowy-ai-pub" }
|
||||
anyhow = "1.0"
|
||||
arc-swap = "1.7"
|
||||
tracing = "0.1.40"
|
||||
bytes = "1.5.0"
|
||||
serde_json = "1.0.108"
|
||||
@ -76,7 +77,6 @@ protobuf = { version = "2.28.0" }
|
||||
diesel = { version = "2.1.0", features = ["sqlite", "chrono", "r2d2", "serde_json"] }
|
||||
uuid = { version = "1.5.0", features = ["serde", "v4", "v5"] }
|
||||
serde_repr = "0.1"
|
||||
parking_lot = "0.12"
|
||||
futures = "0.3.29"
|
||||
tokio = "1.38.0"
|
||||
tokio-stream = "0.1.14"
|
||||
@ -100,8 +100,8 @@ dashmap = "6.0.1"
|
||||
# Run the script.add_workspace_members:
|
||||
# scripts/tool/update_client_api_rev.sh new_rev_id
|
||||
# ⚠️⚠️⚠️️
|
||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "7878a018a18553e3d8201e572a0c066c14ba3b35" }
|
||||
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "7878a018a18553e3d8201e572a0c066c14ba3b35" }
|
||||
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "d503905" }
|
||||
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "d503905" }
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 0
|
||||
@ -136,13 +136,13 @@ rocksdb = { git = "https://github.com/rust-rocksdb/rust-rocksdb", rev = "1710120
|
||||
# To switch to the local path, run:
|
||||
# scripts/tool/update_collab_source.sh
|
||||
# ⚠️⚠️⚠️️
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6adf750" }
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d03bd474e551ab5583780abe051a85b8063e6aa9" }
|
||||
|
||||
# Working directory: frontend
|
||||
# To update the commit ID, run:
|
||||
|
@ -11,15 +11,19 @@ crate-type = ["cdylib", "rlib"]
|
||||
collab = { workspace = true }
|
||||
collab-plugins = { workspace = true }
|
||||
collab-entity = { workspace = true }
|
||||
collab-document = { workspace = true }
|
||||
collab-folder = { workspace = true }
|
||||
collab-user = { workspace = true }
|
||||
collab-database = { workspace = true }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
anyhow.workspace = true
|
||||
tracing.workspace = true
|
||||
parking_lot.workspace = true
|
||||
async-trait.workspace = true
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
lib-infra = { workspace = true }
|
||||
futures = "0.3"
|
||||
arc-swap = "1.7"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
@ -1,11 +1,18 @@
|
||||
use std::borrow::BorrowMut;
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use crate::CollabKVDB;
|
||||
use anyhow::Error;
|
||||
use collab::core::collab::{DataSource, MutexCollab};
|
||||
use collab::preclude::CollabBuilder;
|
||||
use arc_swap::{ArcSwap, ArcSwapOption};
|
||||
use collab::core::collab::DataSource;
|
||||
use collab::core::collab_plugin::CollabPersistence;
|
||||
use collab::preclude::{Collab, CollabBuilder};
|
||||
use collab_database::workspace_database::{DatabaseCollabService, WorkspaceDatabase};
|
||||
use collab_document::blocks::DocumentData;
|
||||
use collab_document::document::Document;
|
||||
use collab_entity::{CollabObject, CollabType};
|
||||
use collab_folder::{Folder, FolderData, FolderNotify};
|
||||
use collab_plugins::connect_state::{CollabConnectReachability, CollabConnectState};
|
||||
use collab_plugins::local_storage::kv::snapshot::SnapshotPersistence;
|
||||
if_native! {
|
||||
@ -17,17 +24,19 @@ use collab_plugins::local_storage::indexeddb::IndexeddbDiskPlugin;
|
||||
}
|
||||
|
||||
pub use crate::plugin_provider::CollabCloudPluginProvider;
|
||||
use collab_plugins::local_storage::kv::doc::CollabKVAction;
|
||||
use collab_plugins::local_storage::kv::KVTransactionDB;
|
||||
use collab_plugins::local_storage::CollabPersistenceConfig;
|
||||
use collab_user::core::{UserAwareness, UserAwarenessNotifier};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use lib_infra::{if_native, if_wasm};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use tracing::{instrument, trace};
|
||||
use tracing::{error, instrument, trace, warn};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum CollabPluginProviderType {
|
||||
Local,
|
||||
AppFlowyCloud,
|
||||
Supabase,
|
||||
}
|
||||
|
||||
pub enum CollabPluginProviderContext {
|
||||
@ -35,13 +44,7 @@ pub enum CollabPluginProviderContext {
|
||||
AppFlowyCloud {
|
||||
uid: i64,
|
||||
collab_object: CollabObject,
|
||||
local_collab: Weak<MutexCollab>,
|
||||
},
|
||||
Supabase {
|
||||
uid: i64,
|
||||
collab_object: CollabObject,
|
||||
local_collab: Weak<MutexCollab>,
|
||||
local_collab_db: Weak<CollabKVDB>,
|
||||
local_collab: Weak<RwLock<dyn BorrowMut<Collab> + Send + Sync + 'static>>,
|
||||
},
|
||||
}
|
||||
|
||||
@ -52,13 +55,7 @@ impl Display for CollabPluginProviderContext {
|
||||
CollabPluginProviderContext::AppFlowyCloud {
|
||||
uid: _,
|
||||
collab_object,
|
||||
local_collab: _,
|
||||
} => collab_object.to_string(),
|
||||
CollabPluginProviderContext::Supabase {
|
||||
uid: _,
|
||||
collab_object,
|
||||
local_collab: _,
|
||||
local_collab_db: _,
|
||||
..
|
||||
} => collab_object.to_string(),
|
||||
};
|
||||
write!(f, "{}", str)
|
||||
@ -72,10 +69,10 @@ pub trait WorkspaceCollabIntegrate: Send + Sync {
|
||||
|
||||
pub struct AppFlowyCollabBuilder {
|
||||
network_reachability: CollabConnectReachability,
|
||||
plugin_provider: RwLock<Arc<dyn CollabCloudPluginProvider>>,
|
||||
snapshot_persistence: Mutex<Option<Arc<dyn SnapshotPersistence>>>,
|
||||
plugin_provider: ArcSwap<Arc<dyn CollabCloudPluginProvider>>,
|
||||
snapshot_persistence: ArcSwapOption<Arc<dyn SnapshotPersistence + 'static>>,
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
rocksdb_backup: Mutex<Option<Arc<dyn RocksdbBackup>>>,
|
||||
rocksdb_backup: ArcSwapOption<Arc<dyn RocksdbBackup>>,
|
||||
workspace_integrate: Arc<dyn WorkspaceCollabIntegrate>,
|
||||
}
|
||||
|
||||
@ -86,7 +83,7 @@ impl AppFlowyCollabBuilder {
|
||||
) -> Self {
|
||||
Self {
|
||||
network_reachability: CollabConnectReachability::new(),
|
||||
plugin_provider: RwLock::new(Arc::new(storage_provider)),
|
||||
plugin_provider: ArcSwap::new(Arc::new(Arc::new(storage_provider))),
|
||||
snapshot_persistence: Default::default(),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
rocksdb_backup: Default::default(),
|
||||
@ -95,12 +92,14 @@ impl AppFlowyCollabBuilder {
|
||||
}
|
||||
|
||||
pub fn set_snapshot_persistence(&self, snapshot_persistence: Arc<dyn SnapshotPersistence>) {
|
||||
*self.snapshot_persistence.lock() = Some(snapshot_persistence);
|
||||
self
|
||||
.snapshot_persistence
|
||||
.store(Some(snapshot_persistence.into()));
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn set_rocksdb_backup(&self, rocksdb_backup: Arc<dyn RocksdbBackup>) {
|
||||
*self.rocksdb_backup.lock() = Some(rocksdb_backup);
|
||||
self.rocksdb_backup.store(Some(rocksdb_backup.into()));
|
||||
}
|
||||
|
||||
pub fn update_network(&self, reachable: bool) {
|
||||
@ -115,12 +114,23 @@ impl AppFlowyCollabBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
fn collab_object(
|
||||
pub fn collab_object(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
uid: i64,
|
||||
object_id: &str,
|
||||
collab_type: CollabType,
|
||||
) -> Result<CollabObject, Error> {
|
||||
// Compare the workspace_id with the currently opened workspace_id. Return an error if they do not match.
|
||||
// This check is crucial in asynchronous code contexts where the workspace_id might change during operation.
|
||||
let actual_workspace_id = self.workspace_integrate.workspace_id()?;
|
||||
if workspace_id != actual_workspace_id {
|
||||
return Err(anyhow::anyhow!(
|
||||
"workspace_id not match when build collab. expect workspace_id: {}, actual workspace_id: {}",
|
||||
workspace_id,
|
||||
actual_workspace_id
|
||||
));
|
||||
}
|
||||
let device_id = self.workspace_integrate.device_id()?;
|
||||
let workspace_id = self.workspace_integrate.workspace_id()?;
|
||||
Ok(CollabObject::new(
|
||||
@ -132,170 +142,155 @@ impl AppFlowyCollabBuilder {
|
||||
))
|
||||
}
|
||||
|
||||
/// Creates a new collaboration builder with the default configuration.
|
||||
///
|
||||
/// This function will initiate the creation of a [MutexCollab] object if it does not already exist.
|
||||
/// To check for the existence of the object prior to creation, you should utilize a transaction
|
||||
/// returned by the [read_txn] method of the [CollabKVDB]. Then, invoke the [is_exist] method
|
||||
/// to confirm the object's presence.
|
||||
///
|
||||
/// # Parameters
|
||||
/// - `uid`: The user ID associated with the collaboration.
|
||||
/// - `object_id`: A string reference representing the ID of the object.
|
||||
/// - `object_type`: The type of the collaboration, defined by the [CollabType] enum.
|
||||
/// - `raw_data`: The raw data of the collaboration object, defined by the [CollabDocState] type.
|
||||
/// - `collab_db`: A weak reference to the [CollabKVDB].
|
||||
///
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn build(
|
||||
#[instrument(
|
||||
level = "trace",
|
||||
skip(self, data_source, collab_db, builder_config, data)
|
||||
)]
|
||||
pub fn create_document(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
uid: i64,
|
||||
object_id: &str,
|
||||
object_type: CollabType,
|
||||
collab_doc_state: DataSource,
|
||||
object: CollabObject,
|
||||
data_source: DataSource,
|
||||
collab_db: Weak<CollabKVDB>,
|
||||
build_config: CollabBuilderConfig,
|
||||
) -> Result<Arc<MutexCollab>, Error> {
|
||||
self.build_with_config(
|
||||
workspace_id,
|
||||
uid,
|
||||
object_id,
|
||||
object_type,
|
||||
collab_db,
|
||||
collab_doc_state,
|
||||
build_config,
|
||||
)
|
||||
builder_config: CollabBuilderConfig,
|
||||
data: Option<DocumentData>,
|
||||
) -> Result<Arc<RwLock<Document>>, Error> {
|
||||
assert_eq!(object.collab_type, CollabType::Document);
|
||||
let collab = self.build_collab(&object, &collab_db, data_source)?;
|
||||
let document = Document::open_with(collab, data)?;
|
||||
let document = Arc::new(RwLock::new(document));
|
||||
self.finalize(object, builder_config, document)
|
||||
}
|
||||
|
||||
/// Creates a new collaboration builder with the custom configuration.
|
||||
///
|
||||
/// This function will initiate the creation of a [MutexCollab] object if it does not already exist.
|
||||
/// To check for the existence of the object prior to creation, you should utilize a transaction
|
||||
/// returned by the [read_txn] method of the [CollabKVDB]. Then, invoke the [is_exist] method
|
||||
/// to confirm the object's presence.
|
||||
///
|
||||
/// # Parameters
|
||||
/// - `uid`: The user ID associated with the collaboration.
|
||||
/// - `object_id`: A string reference representing the ID of the object.
|
||||
/// - `object_type`: The type of the collaboration, defined by the [CollabType] enum.
|
||||
/// - `raw_data`: The raw data of the collaboration object, defined by the [CollabDocState] type.
|
||||
/// - `collab_db`: A weak reference to the [CollabKVDB].
|
||||
///
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[instrument(level = "trace", skip(self, collab_db, collab_doc_state, build_config))]
|
||||
pub fn build_with_config(
|
||||
#[instrument(
|
||||
level = "trace",
|
||||
skip(self, object, doc_state, collab_db, builder_config, folder_notifier)
|
||||
)]
|
||||
pub fn create_folder(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
uid: i64,
|
||||
object_id: &str,
|
||||
object_type: CollabType,
|
||||
object: CollabObject,
|
||||
doc_state: DataSource,
|
||||
collab_db: Weak<CollabKVDB>,
|
||||
collab_doc_state: DataSource,
|
||||
build_config: CollabBuilderConfig,
|
||||
) -> Result<Arc<MutexCollab>, Error> {
|
||||
let collab = CollabBuilder::new(uid, object_id)
|
||||
.with_doc_state(collab_doc_state)
|
||||
builder_config: CollabBuilderConfig,
|
||||
folder_notifier: Option<FolderNotify>,
|
||||
folder_data: Option<FolderData>,
|
||||
) -> Result<Arc<RwLock<Folder>>, Error> {
|
||||
assert_eq!(object.collab_type, CollabType::Folder);
|
||||
let collab = self.build_collab(&object, &collab_db, doc_state)?;
|
||||
let folder = Folder::open_with(object.uid, collab, folder_notifier, folder_data);
|
||||
let folder = Arc::new(RwLock::new(folder));
|
||||
self.finalize(object, builder_config, folder)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[instrument(
|
||||
level = "trace",
|
||||
skip(self, object, doc_state, collab_db, builder_config, notifier)
|
||||
)]
|
||||
pub fn create_user_awareness(
|
||||
&self,
|
||||
object: CollabObject,
|
||||
doc_state: DataSource,
|
||||
collab_db: Weak<CollabKVDB>,
|
||||
builder_config: CollabBuilderConfig,
|
||||
notifier: Option<UserAwarenessNotifier>,
|
||||
) -> Result<Arc<RwLock<UserAwareness>>, Error> {
|
||||
assert_eq!(object.collab_type, CollabType::UserAwareness);
|
||||
let collab = self.build_collab(&object, &collab_db, doc_state)?;
|
||||
let user_awareness = UserAwareness::open(collab, notifier);
|
||||
let user_awareness = Arc::new(RwLock::new(user_awareness));
|
||||
self.finalize(object, builder_config, user_awareness)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[instrument(
|
||||
level = "trace",
|
||||
skip(self, object, doc_state, collab_db, builder_config, collab_service)
|
||||
)]
|
||||
pub fn create_workspace_database(
|
||||
&self,
|
||||
object: CollabObject,
|
||||
doc_state: DataSource,
|
||||
collab_db: Weak<CollabKVDB>,
|
||||
builder_config: CollabBuilderConfig,
|
||||
collab_service: impl DatabaseCollabService,
|
||||
) -> Result<Arc<RwLock<WorkspaceDatabase>>, Error> {
|
||||
assert_eq!(object.collab_type, CollabType::WorkspaceDatabase);
|
||||
let collab = self.build_collab(&object, &collab_db, doc_state)?;
|
||||
let workspace = WorkspaceDatabase::open(object.uid, collab, collab_db.clone(), collab_service);
|
||||
let workspace = Arc::new(RwLock::new(workspace));
|
||||
self.finalize(object, builder_config, workspace)
|
||||
}
|
||||
|
||||
pub fn build_collab(
|
||||
&self,
|
||||
object: &CollabObject,
|
||||
collab_db: &Weak<CollabKVDB>,
|
||||
data_source: DataSource,
|
||||
) -> Result<Collab, Error> {
|
||||
let collab = CollabBuilder::new(object.uid, &object.object_id, data_source)
|
||||
.with_device_id(self.workspace_integrate.device_id()?)
|
||||
.build()?;
|
||||
|
||||
// Compare the workspace_id with the currently opened workspace_id. Return an error if they do not match.
|
||||
// This check is crucial in asynchronous code contexts where the workspace_id might change during operation.
|
||||
let actual_workspace_id = self.workspace_integrate.workspace_id()?;
|
||||
if workspace_id != actual_workspace_id {
|
||||
return Err(anyhow::anyhow!(
|
||||
"workspace_id not match when build collab. expect workspace_id: {}, actual workspace_id: {}",
|
||||
workspace_id,
|
||||
actual_workspace_id
|
||||
));
|
||||
}
|
||||
let persistence_config = CollabPersistenceConfig::default();
|
||||
let db_plugin = RocksdbDiskPlugin::new_with_config(
|
||||
object.uid,
|
||||
object.object_id.to_string(),
|
||||
object.collab_type.clone(),
|
||||
collab_db.clone(),
|
||||
persistence_config.clone(),
|
||||
);
|
||||
collab.add_plugin(Box::new(db_plugin));
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
collab.lock().add_plugin(Box::new(IndexeddbDiskPlugin::new(
|
||||
uid,
|
||||
object_id.to_string(),
|
||||
object_type.clone(),
|
||||
collab_db.clone(),
|
||||
)));
|
||||
Ok(collab)
|
||||
}
|
||||
|
||||
pub fn finalize<T>(
|
||||
&self,
|
||||
object: CollabObject,
|
||||
build_config: CollabBuilderConfig,
|
||||
collab: Arc<RwLock<T>>,
|
||||
) -> Result<Arc<RwLock<T>>, Error>
|
||||
where
|
||||
T: BorrowMut<Collab> + Send + Sync + 'static,
|
||||
{
|
||||
let mut write_collab = collab.try_write()?;
|
||||
if !write_collab.borrow().get_state().is_uninitialized() {
|
||||
drop(write_collab);
|
||||
return Ok(collab);
|
||||
}
|
||||
trace!("🚀finalize collab:{}", object);
|
||||
if build_config.sync_enable {
|
||||
let plugin_provider = self.plugin_provider.load_full();
|
||||
let provider_type = plugin_provider.provider_type();
|
||||
let span =
|
||||
tracing::span!(tracing::Level::TRACE, "collab_builder", object_id = %object.object_id);
|
||||
let _enter = span.enter();
|
||||
match provider_type {
|
||||
CollabPluginProviderType::AppFlowyCloud => {
|
||||
let local_collab = Arc::downgrade(&collab);
|
||||
let plugins = plugin_provider.get_plugins(CollabPluginProviderContext::AppFlowyCloud {
|
||||
uid: object.uid,
|
||||
collab_object: object,
|
||||
local_collab,
|
||||
});
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
collab
|
||||
.lock()
|
||||
.add_plugin(Box::new(RocksdbDiskPlugin::new_with_config(
|
||||
uid,
|
||||
object_id.to_string(),
|
||||
object_type.clone(),
|
||||
collab_db.clone(),
|
||||
persistence_config.clone(),
|
||||
None,
|
||||
)));
|
||||
}
|
||||
|
||||
let arc_collab = Arc::new(collab);
|
||||
|
||||
{
|
||||
let collab_object = self.collab_object(uid, object_id, object_type.clone())?;
|
||||
if build_config.sync_enable {
|
||||
let provider_type = self.plugin_provider.read().provider_type();
|
||||
let span = tracing::span!(tracing::Level::TRACE, "collab_builder", object_id = %object_id);
|
||||
let _enter = span.enter();
|
||||
match provider_type {
|
||||
CollabPluginProviderType::AppFlowyCloud => {
|
||||
let local_collab = Arc::downgrade(&arc_collab);
|
||||
let plugins =
|
||||
self
|
||||
.plugin_provider
|
||||
.read()
|
||||
.get_plugins(CollabPluginProviderContext::AppFlowyCloud {
|
||||
uid,
|
||||
collab_object,
|
||||
local_collab,
|
||||
});
|
||||
|
||||
for plugin in plugins {
|
||||
arc_collab.lock().add_plugin(plugin);
|
||||
}
|
||||
},
|
||||
CollabPluginProviderType::Supabase => {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
trace!("init supabase collab plugins");
|
||||
let local_collab = Arc::downgrade(&arc_collab);
|
||||
let local_collab_db = collab_db.clone();
|
||||
let plugins =
|
||||
self
|
||||
.plugin_provider
|
||||
.read()
|
||||
.get_plugins(CollabPluginProviderContext::Supabase {
|
||||
uid,
|
||||
collab_object,
|
||||
local_collab,
|
||||
local_collab_db,
|
||||
});
|
||||
for plugin in plugins {
|
||||
arc_collab.lock().add_plugin(plugin);
|
||||
}
|
||||
}
|
||||
},
|
||||
CollabPluginProviderType::Local => {},
|
||||
}
|
||||
// at the moment when we get the lock, the collab object is not yet exposed outside
|
||||
for plugin in plugins {
|
||||
write_collab.borrow().add_plugin(plugin);
|
||||
}
|
||||
},
|
||||
CollabPluginProviderType::Local => {},
|
||||
}
|
||||
}
|
||||
|
||||
if build_config.auto_initialize {
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
futures::executor::block_on(arc_collab.lock().initialize());
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
arc_collab.lock().initialize();
|
||||
// at the moment when we get the lock, the collab object is not yet exposed outside
|
||||
(*write_collab).borrow_mut().initialize();
|
||||
}
|
||||
|
||||
trace!("collab initialized: {}:{}", object_type, object_id);
|
||||
Ok(arc_collab)
|
||||
drop(write_collab);
|
||||
Ok(collab)
|
||||
}
|
||||
}
|
||||
|
||||
@ -328,3 +323,39 @@ impl CollabBuilderConfig {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub struct KVDBCollabPersistenceImpl {
|
||||
pub db: Weak<CollabKVDB>,
|
||||
pub uid: i64,
|
||||
}
|
||||
|
||||
impl KVDBCollabPersistenceImpl {
|
||||
pub fn new(db: Weak<CollabKVDB>, uid: i64) -> Self {
|
||||
Self { db, uid }
|
||||
}
|
||||
|
||||
pub fn into_data_source(self) -> DataSource {
|
||||
DataSource::Disk(Some(Box::new(self)))
|
||||
}
|
||||
}
|
||||
|
||||
impl CollabPersistence for KVDBCollabPersistenceImpl {
|
||||
fn load_collab(&self, collab: &mut Collab) {
|
||||
if let Some(collab_db) = self.db.upgrade() {
|
||||
let object_id = collab.object_id().to_string();
|
||||
let rocksdb_read = collab_db.read_txn();
|
||||
|
||||
if rocksdb_read.is_exist(self.uid, &object_id) {
|
||||
let mut txn = collab.transact_mut();
|
||||
if let Err(err) = rocksdb_read.load_doc_with_txn(self.uid, &object_id, &mut txn) {
|
||||
error!("🔴 load doc:{} failed: {}", object_id, err);
|
||||
}
|
||||
drop(rocksdb_read);
|
||||
txn.commit();
|
||||
drop(txn);
|
||||
}
|
||||
} else {
|
||||
warn!("collab_db is dropped");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,3 @@
|
||||
pub use collab::core::collab::MutexCollab;
|
||||
pub use collab::preclude::Snapshot;
|
||||
pub use collab_plugins::local_storage::CollabPersistenceConfig;
|
||||
pub use collab_plugins::CollabKVDB;
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::collab_builder::{CollabPluginProviderContext, CollabPluginProviderType};
|
||||
use collab::preclude::CollabPlugin;
|
||||
|
||||
use crate::collab_builder::{CollabPluginProviderContext, CollabPluginProviderType};
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub trait CollabCloudPluginProvider: 'static {
|
||||
fn provider_type(&self) -> CollabPluginProviderType;
|
||||
@ -11,9 +12,9 @@ pub trait CollabCloudPluginProvider: 'static {
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
impl<T> CollabCloudPluginProvider for std::rc::Rc<T>
|
||||
impl<U> CollabCloudPluginProvider for std::rc::Rc<U>
|
||||
where
|
||||
T: CollabCloudPluginProvider,
|
||||
U: CollabCloudPluginProvider,
|
||||
{
|
||||
fn provider_type(&self) -> CollabPluginProviderType {
|
||||
(**self).provider_type()
|
||||
@ -38,9 +39,9 @@ pub trait CollabCloudPluginProvider: Send + Sync + 'static {
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl<T> CollabCloudPluginProvider for std::sync::Arc<T>
|
||||
impl<U> CollabCloudPluginProvider for std::sync::Arc<U>
|
||||
where
|
||||
T: CollabCloudPluginProvider,
|
||||
U: CollabCloudPluginProvider,
|
||||
{
|
||||
fn provider_type(&self) -> CollabPluginProviderType {
|
||||
(**self).provider_type()
|
||||
|
@ -22,7 +22,6 @@ serde_json.workspace = true
|
||||
bytes.workspace = true
|
||||
crossbeam-utils = "0.8.15"
|
||||
lazy_static = "1.4.0"
|
||||
parking_lot.workspace = true
|
||||
tracing.workspace = true
|
||||
lib-log.workspace = true
|
||||
semver = "1.0.22"
|
||||
|
@ -3,7 +3,6 @@ use std::collections::HashMap;
|
||||
use serde::Deserialize;
|
||||
|
||||
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
|
||||
use flowy_server_pub::supabase_config::SupabaseConfiguration;
|
||||
use flowy_server_pub::AuthenticatorType;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
@ -17,7 +16,7 @@ pub struct AppFlowyDartConfiguration {
|
||||
pub device_id: String,
|
||||
pub platform: String,
|
||||
pub authenticator_type: AuthenticatorType,
|
||||
pub(crate) supabase_config: SupabaseConfiguration,
|
||||
//pub(crate) supabase_config: SupabaseConfiguration,
|
||||
pub(crate) appflowy_cloud_config: AFCloudConfiguration,
|
||||
#[serde(default)]
|
||||
pub(crate) envs: HashMap<String, String>,
|
||||
@ -31,7 +30,7 @@ impl AppFlowyDartConfiguration {
|
||||
pub fn write_env(&self) {
|
||||
self.authenticator_type.write_env();
|
||||
self.appflowy_cloud_config.write_env();
|
||||
self.supabase_config.write_env();
|
||||
//self.supabase_config.write_env();
|
||||
|
||||
for (k, v) in self.envs.iter() {
|
||||
std::env::set_var(k, v);
|
||||
|
@ -2,10 +2,9 @@
|
||||
|
||||
use allo_isolate::Isolate;
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::Mutex;
|
||||
use semver::Version;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{ffi::CStr, os::raw::c_char};
|
||||
use tracing::{debug, error, info, trace, warn};
|
||||
|
||||
@ -38,6 +37,10 @@ lazy_static! {
|
||||
static ref LOG_STREAM_ISOLATE: Mutex<Option<Isolate>> = Mutex::new(None);
|
||||
}
|
||||
|
||||
unsafe impl Send for MutexAppFlowyCore {}
|
||||
unsafe impl Sync for MutexAppFlowyCore {}
|
||||
|
||||
///FIXME: I'm pretty sure that there's a better way to do this
|
||||
struct MutexAppFlowyCore(Rc<Mutex<Option<AppFlowyCore>>>);
|
||||
|
||||
impl MutexAppFlowyCore {
|
||||
@ -46,15 +49,12 @@ impl MutexAppFlowyCore {
|
||||
}
|
||||
|
||||
fn dispatcher(&self) -> Option<Rc<AFPluginDispatcher>> {
|
||||
let binding = self.0.lock();
|
||||
let binding = self.0.lock().unwrap();
|
||||
let core = binding.as_ref();
|
||||
core.map(|core| core.event_dispatcher.clone())
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Sync for MutexAppFlowyCore {}
|
||||
unsafe impl Send for MutexAppFlowyCore {}
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 {
|
||||
// and sent it the `Rust's` result
|
||||
@ -87,7 +87,7 @@ pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 {
|
||||
|
||||
// Ensure that the database is closed before initialization. Also, verify that the init_sdk function can be called
|
||||
// multiple times (is reentrant). Currently, only the database resource is exclusive.
|
||||
if let Some(core) = &*APPFLOWY_CORE.0.lock() {
|
||||
if let Some(core) = &*APPFLOWY_CORE.0.lock().unwrap() {
|
||||
core.close_db();
|
||||
}
|
||||
|
||||
@ -96,11 +96,12 @@ pub extern "C" fn init_sdk(_port: i64, data: *mut c_char) -> i64 {
|
||||
|
||||
let log_stream = LOG_STREAM_ISOLATE
|
||||
.lock()
|
||||
.unwrap()
|
||||
.take()
|
||||
.map(|isolate| Arc::new(LogStreamSenderImpl { isolate }) as Arc<dyn StreamLogSender>);
|
||||
|
||||
// let isolate = allo_isolate::Isolate::new(port);
|
||||
*APPFLOWY_CORE.0.lock() = runtime.block_on(async move {
|
||||
*APPFLOWY_CORE.0.lock().unwrap() = runtime.block_on(async move {
|
||||
Some(AppFlowyCore::new(config, cloned_runtime, log_stream).await)
|
||||
// isolate.post("".to_string());
|
||||
});
|
||||
@ -168,7 +169,7 @@ pub extern "C" fn set_stream_port(notification_port: i64) -> i32 {
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_log_stream_port(port: i64) -> i32 {
|
||||
*LOG_STREAM_ISOLATE.lock() = Some(Isolate::new(port));
|
||||
*LOG_STREAM_ISOLATE.lock().unwrap() = Some(Isolate::new(port));
|
||||
|
||||
0
|
||||
}
|
||||
|
@ -37,7 +37,6 @@ thread-id = "3.3.0"
|
||||
bytes.workspace = true
|
||||
nanoid = "0.4.0"
|
||||
tracing.workspace = true
|
||||
parking_lot.workspace = true
|
||||
uuid.workspace = true
|
||||
collab = { workspace = true }
|
||||
collab-document = { workspace = true }
|
||||
|
@ -24,7 +24,7 @@ impl EventIntegrationTest {
|
||||
self
|
||||
.appflowy_core
|
||||
.database_manager
|
||||
.get_database_with_view_id(database_view_id)
|
||||
.get_database_editor_with_view_id(database_view_id)
|
||||
.await
|
||||
.unwrap()
|
||||
.export_csv(CSVFormat::Original)
|
||||
|
@ -42,10 +42,10 @@ impl DocumentEventTest {
|
||||
.event_test
|
||||
.appflowy_core
|
||||
.document_manager
|
||||
.get_opened_document(doc_id)
|
||||
.editable_document(doc_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let guard = doc.lock();
|
||||
let guard = doc.read().await;
|
||||
guard.encode_collab().unwrap()
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,3 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab::core::collab::MutexCollab;
|
||||
use collab::core::origin::CollabOrigin;
|
||||
use collab::preclude::updates::decoder::Decode;
|
||||
use collab::preclude::{Collab, Update};
|
||||
@ -107,17 +104,13 @@ impl EventIntegrationTest {
|
||||
}
|
||||
|
||||
pub fn assert_document_data_equal(doc_state: &[u8], doc_id: &str, expected: DocumentData) {
|
||||
let collab = MutexCollab::new(Collab::new_with_origin(
|
||||
CollabOrigin::Server,
|
||||
doc_id,
|
||||
vec![],
|
||||
false,
|
||||
));
|
||||
collab.lock().with_origin_transact_mut(|txn| {
|
||||
let mut collab = Collab::new_with_origin(CollabOrigin::Server, doc_id, vec![], false);
|
||||
{
|
||||
let update = Update::decode_v1(doc_state).unwrap();
|
||||
let mut txn = collab.transact_mut();
|
||||
txn.apply_update(update);
|
||||
});
|
||||
let document = Document::open(Arc::new(collab)).unwrap();
|
||||
};
|
||||
let document = Document::open(collab).unwrap();
|
||||
let actual = document.get_document_data().unwrap();
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use crate::EventIntegrationTest;
|
||||
use flowy_user::errors::{internal_error, FlowyError};
|
||||
use lib_dispatch::prelude::{
|
||||
AFPluginDispatcher, AFPluginEventResponse, AFPluginFromBytes, AFPluginRequest, ToBytes, *,
|
||||
@ -9,8 +10,6 @@ use std::{
|
||||
hash::Hash,
|
||||
};
|
||||
|
||||
use crate::EventIntegrationTest;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EventBuilder {
|
||||
context: TestContext,
|
||||
|
@ -166,10 +166,14 @@ impl EventIntegrationTest {
|
||||
.await;
|
||||
}
|
||||
|
||||
pub fn get_folder_data(&self) -> FolderData {
|
||||
let mutex_folder = self.appflowy_core.folder_manager.get_mutex_folder().clone();
|
||||
let folder_lock_guard = mutex_folder.read();
|
||||
let folder = folder_lock_guard.as_ref().unwrap();
|
||||
pub async fn get_folder_data(&self) -> FolderData {
|
||||
let mutex_folder = self
|
||||
.appflowy_core
|
||||
.folder_manager
|
||||
.get_mutex_folder()
|
||||
.clone()
|
||||
.unwrap();
|
||||
let folder = mutex_folder.read().await;
|
||||
let workspace_id = self.appflowy_core.user_manager.workspace_id().unwrap();
|
||||
folder.get_folder_data(&workspace_id).clone().unwrap()
|
||||
}
|
||||
|
@ -6,11 +6,11 @@ use collab_entity::CollabType;
|
||||
use std::env::temp_dir;
|
||||
use std::path::PathBuf;
|
||||
use std::rc::Rc;
|
||||
use std::sync::atomic::{AtomicBool, AtomicU8, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use nanoid::nanoid;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use semver::Version;
|
||||
use tokio::select;
|
||||
use tokio::time::sleep;
|
||||
@ -35,10 +35,10 @@ pub mod user_event;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EventIntegrationTest {
|
||||
pub authenticator: Arc<RwLock<AuthenticatorPB>>,
|
||||
pub authenticator: Arc<AtomicU8>,
|
||||
pub appflowy_core: AppFlowyCore,
|
||||
#[allow(dead_code)]
|
||||
cleaner: Arc<Mutex<Cleaner>>,
|
||||
cleaner: Arc<Cleaner>,
|
||||
pub notification_sender: TestNotificationSender,
|
||||
}
|
||||
|
||||
@ -57,7 +57,7 @@ impl EventIntegrationTest {
|
||||
let clean_path = config.storage_path.clone();
|
||||
let inner = init_core(config).await;
|
||||
let notification_sender = TestNotificationSender::new();
|
||||
let authenticator = Arc::new(RwLock::new(AuthenticatorPB::Local));
|
||||
let authenticator = Arc::new(AtomicU8::new(AuthenticatorPB::Local as u8));
|
||||
register_notification_sender(notification_sender.clone());
|
||||
|
||||
// In case of dropping the runtime that runs the core, we need to forget the dispatcher
|
||||
@ -66,7 +66,7 @@ impl EventIntegrationTest {
|
||||
appflowy_core: inner,
|
||||
authenticator,
|
||||
notification_sender,
|
||||
cleaner: Arc::new(Mutex::new(Cleaner::new(PathBuf::from(clean_path)))),
|
||||
cleaner: Arc::new(Cleaner::new(PathBuf::from(clean_path))),
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,7 +93,7 @@ impl EventIntegrationTest {
|
||||
}
|
||||
|
||||
pub fn skip_clean(&mut self) {
|
||||
self.cleaner.lock().should_clean = false;
|
||||
self.cleaner.should_clean.store(false, Ordering::Release);
|
||||
}
|
||||
|
||||
pub fn instance_name(&self) -> String {
|
||||
@ -154,7 +154,7 @@ pub fn document_data_from_document_doc_state(doc_id: &str, doc_state: Vec<u8>) -
|
||||
}
|
||||
|
||||
pub fn document_from_document_doc_state(doc_id: &str, doc_state: Vec<u8>) -> Document {
|
||||
Document::from_doc_state(
|
||||
Document::open_with_options(
|
||||
CollabOrigin::Empty,
|
||||
DataSource::DocStateV1(doc_state),
|
||||
doc_id,
|
||||
@ -177,17 +177,16 @@ impl std::ops::Deref for EventIntegrationTest {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Cleaner {
|
||||
dir: PathBuf,
|
||||
should_clean: bool,
|
||||
should_clean: AtomicBool,
|
||||
}
|
||||
|
||||
impl Cleaner {
|
||||
pub fn new(dir: PathBuf) -> Self {
|
||||
Self {
|
||||
dir,
|
||||
should_clean: true,
|
||||
should_clean: AtomicBool::new(true),
|
||||
}
|
||||
}
|
||||
|
||||
@ -198,7 +197,7 @@ impl Cleaner {
|
||||
|
||||
impl Drop for Cleaner {
|
||||
fn drop(&mut self) {
|
||||
if self.should_clean {
|
||||
if self.should_clean.load(Ordering::Acquire) {
|
||||
Self::cleanup(&self.dir)
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,12 @@
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use bytes::Bytes;
|
||||
|
||||
use flowy_folder::entities::{RepeatedViewPB, WorkspacePB};
|
||||
use nanoid::nanoid;
|
||||
|
||||
use protobuf::ProtobufError;
|
||||
use tokio::sync::broadcast::{channel, Sender};
|
||||
use tracing::error;
|
||||
@ -101,21 +102,6 @@ impl EventIntegrationTest {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn supabase_party_sign_up(&self) -> UserProfilePB {
|
||||
let map = third_party_sign_up_param(Uuid::new_v4().to_string());
|
||||
let payload = OauthSignInPB {
|
||||
map,
|
||||
authenticator: AuthenticatorPB::Supabase,
|
||||
};
|
||||
|
||||
EventBuilder::new(self.clone())
|
||||
.event(UserEvent::OauthSignIn)
|
||||
.payload(payload)
|
||||
.async_send()
|
||||
.await
|
||||
.parse::<UserProfilePB>()
|
||||
}
|
||||
|
||||
pub async fn sign_out(&self) {
|
||||
EventBuilder::new(self.clone())
|
||||
.event(UserEvent::SignOut)
|
||||
@ -124,7 +110,7 @@ impl EventIntegrationTest {
|
||||
}
|
||||
|
||||
pub fn set_auth_type(&self, auth_type: AuthenticatorPB) {
|
||||
*self.authenticator.write() = auth_type;
|
||||
self.authenticator.store(auth_type as u8, Ordering::Release);
|
||||
}
|
||||
|
||||
pub async fn init_anon_user(&self) -> UserProfilePB {
|
||||
@ -178,33 +164,6 @@ impl EventIntegrationTest {
|
||||
Ok(user_profile)
|
||||
}
|
||||
|
||||
pub async fn supabase_sign_up_with_uuid(
|
||||
&self,
|
||||
uuid: &str,
|
||||
email: Option<String>,
|
||||
) -> FlowyResult<UserProfilePB> {
|
||||
let mut map = HashMap::new();
|
||||
map.insert(USER_UUID.to_string(), uuid.to_string());
|
||||
map.insert(USER_DEVICE_ID.to_string(), uuid.to_string());
|
||||
map.insert(
|
||||
USER_EMAIL.to_string(),
|
||||
email.unwrap_or_else(|| format!("{}@appflowy.io", nanoid!(10))),
|
||||
);
|
||||
let payload = OauthSignInPB {
|
||||
map,
|
||||
authenticator: AuthenticatorPB::Supabase,
|
||||
};
|
||||
|
||||
let user_profile = EventBuilder::new(self.clone())
|
||||
.event(UserEvent::OauthSignIn)
|
||||
.payload(payload)
|
||||
.async_send()
|
||||
.await
|
||||
.try_parse::<UserProfilePB>()?;
|
||||
|
||||
Ok(user_profile)
|
||||
}
|
||||
|
||||
pub async fn import_appflowy_data(
|
||||
&self,
|
||||
path: String,
|
||||
|
@ -207,6 +207,22 @@ impl FolderTest {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// pub async fn duplicate_view(&self, view_id: &str) {
|
||||
// let payload = DuplicateViewPayloadPB {
|
||||
// view_id: view_id.to_string(),
|
||||
// open_after_duplicate: false,
|
||||
// include_children: false,
|
||||
// parent_view_id: None,
|
||||
// suffix: None,
|
||||
// sync_after_create: false,
|
||||
// };
|
||||
// EventBuilder::new(self.sdk.clone())
|
||||
// .event(DuplicateView)
|
||||
// .payload(payload)
|
||||
// .async_send()
|
||||
// .await;
|
||||
// }
|
||||
}
|
||||
pub async fn create_workspace(sdk: &EventIntegrationTest, name: &str, desc: &str) -> WorkspacePB {
|
||||
let request = CreateWorkspacePayloadPB {
|
||||
|
@ -125,7 +125,7 @@ async fn af_cloud_open_workspace_test() {
|
||||
assert_eq!(views[2].name, "D");
|
||||
|
||||
// simulate open workspace and check if the views are correct
|
||||
for i in 0..30 {
|
||||
for i in 0..10 {
|
||||
if i % 2 == 0 {
|
||||
test.open_workspace(&first_workspace.id).await;
|
||||
sleep(Duration::from_millis(300)).await;
|
||||
@ -142,16 +142,16 @@ async fn af_cloud_open_workspace_test() {
|
||||
}
|
||||
|
||||
test.open_workspace(&first_workspace.id).await;
|
||||
let views = test.get_all_workspace_views().await;
|
||||
assert_eq!(views[0].name, default_document_name);
|
||||
assert_eq!(views[1].name, "A");
|
||||
assert_eq!(views[2].name, "B");
|
||||
let views_1 = test.get_all_workspace_views().await;
|
||||
assert_eq!(views_1[0].name, default_document_name);
|
||||
assert_eq!(views_1[1].name, "A");
|
||||
assert_eq!(views_1[2].name, "B");
|
||||
|
||||
test.open_workspace(&second_workspace.id).await;
|
||||
let views = test.get_all_workspace_views().await;
|
||||
assert_eq!(views[0].name, default_document_name);
|
||||
assert_eq!(views[1].name, "C");
|
||||
assert_eq!(views[2].name, "D");
|
||||
let views_2 = test.get_all_workspace_views().await;
|
||||
assert_eq!(views_2[0].name, default_document_name);
|
||||
assert_eq!(views_2[1].name, "C");
|
||||
assert_eq!(views_2[2].name, "D");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@ -240,7 +240,7 @@ async fn af_cloud_different_open_same_workspace_test() {
|
||||
// Retrieve and verify the views associated with the workspace.
|
||||
let views = folder.get_views_belong_to(&shared_workspace_id);
|
||||
let folder_workspace_id = folder.get_workspace_id();
|
||||
assert_eq!(folder_workspace_id, shared_workspace_id);
|
||||
assert_eq!(folder_workspace_id, Some(shared_workspace_id));
|
||||
|
||||
assert_eq!(views.len(), 1, "only get: {:?}", views); // Expecting two views.
|
||||
assert_eq!(views[0].name, "Getting started");
|
||||
|
@ -2,16 +2,11 @@ use std::fs::{create_dir_all, File, OpenOptions};
|
||||
use std::io::copy;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::{fs, io};
|
||||
|
||||
use anyhow::Error;
|
||||
use collab_folder::FolderData;
|
||||
use collab_plugins::cloud_storage::RemoteCollabStorage;
|
||||
use nanoid::nanoid;
|
||||
use tokio::sync::mpsc::Receiver;
|
||||
|
||||
use tokio::time::timeout;
|
||||
use uuid::Uuid;
|
||||
use walkdir::WalkDir;
|
||||
@ -21,22 +16,9 @@ use zip::{CompressionMethod, ZipArchive, ZipWriter};
|
||||
use event_integration_test::event_builder::EventBuilder;
|
||||
use event_integration_test::Cleaner;
|
||||
use event_integration_test::EventIntegrationTest;
|
||||
use flowy_database_pub::cloud::DatabaseCloudService;
|
||||
use flowy_folder_pub::cloud::{FolderCloudService, FolderSnapshot};
|
||||
use flowy_server::supabase::api::*;
|
||||
use flowy_server::{AppFlowyEncryption, EncryptionImpl};
|
||||
use flowy_server_pub::supabase_config::SupabaseConfiguration;
|
||||
use flowy_user::entities::{AuthenticatorPB, UpdateUserProfilePayloadPB};
|
||||
use flowy_user::entities::UpdateUserProfilePayloadPB;
|
||||
use flowy_user::errors::FlowyError;
|
||||
|
||||
use flowy_user::event_map::UserEvent::*;
|
||||
use flowy_user_pub::cloud::UserCloudService;
|
||||
use flowy_user_pub::entities::Authenticator;
|
||||
|
||||
pub fn get_supabase_config() -> Option<SupabaseConfiguration> {
|
||||
dotenv::from_path(".env.ci").ok()?;
|
||||
SupabaseConfiguration::from_env().ok()
|
||||
}
|
||||
|
||||
pub struct FlowySupabaseTest {
|
||||
event_test: EventIntegrationTest,
|
||||
@ -44,13 +26,7 @@ pub struct FlowySupabaseTest {
|
||||
|
||||
impl FlowySupabaseTest {
|
||||
pub async fn new() -> Option<Self> {
|
||||
let _ = get_supabase_config()?;
|
||||
let event_test = EventIntegrationTest::new().await;
|
||||
event_test.set_auth_type(AuthenticatorPB::Supabase);
|
||||
event_test
|
||||
.server_provider
|
||||
.set_authenticator(Authenticator::Supabase);
|
||||
|
||||
Some(Self { event_test })
|
||||
}
|
||||
|
||||
@ -79,93 +55,6 @@ pub async fn receive_with_timeout<T>(mut receiver: Receiver<T>, duration: Durati
|
||||
timeout(duration, receiver.recv()).await.ok()?
|
||||
}
|
||||
|
||||
pub fn get_supabase_ci_config() -> Option<SupabaseConfiguration> {
|
||||
dotenv::from_filename("./.env.ci").ok()?;
|
||||
SupabaseConfiguration::from_env().ok()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn get_supabase_dev_config() -> Option<SupabaseConfiguration> {
|
||||
dotenv::from_filename("./.env.dev").ok()?;
|
||||
SupabaseConfiguration::from_env().ok()
|
||||
}
|
||||
|
||||
pub fn collab_service() -> Arc<dyn RemoteCollabStorage> {
|
||||
let (server, encryption_impl) = appflowy_server(None);
|
||||
Arc::new(SupabaseCollabStorageImpl::new(
|
||||
server,
|
||||
None,
|
||||
Arc::downgrade(&encryption_impl),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn database_service() -> Arc<dyn DatabaseCloudService> {
|
||||
let (server, _encryption_impl) = appflowy_server(None);
|
||||
Arc::new(SupabaseDatabaseServiceImpl::new(server))
|
||||
}
|
||||
|
||||
pub fn user_auth_service() -> Arc<dyn UserCloudService> {
|
||||
let (server, _encryption_impl) = appflowy_server(None);
|
||||
Arc::new(SupabaseUserServiceImpl::new(server, vec![], None))
|
||||
}
|
||||
|
||||
pub fn folder_service() -> Arc<dyn FolderCloudService> {
|
||||
let (server, _encryption_impl) = appflowy_server(None);
|
||||
Arc::new(SupabaseFolderServiceImpl::new(server))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub fn encryption_folder_service(
|
||||
secret: Option<String>,
|
||||
) -> (Arc<dyn FolderCloudService>, Arc<dyn AppFlowyEncryption>) {
|
||||
let (server, encryption_impl) = appflowy_server(secret);
|
||||
let service = Arc::new(SupabaseFolderServiceImpl::new(server));
|
||||
(service, encryption_impl)
|
||||
}
|
||||
|
||||
pub fn encryption_collab_service(
|
||||
secret: Option<String>,
|
||||
) -> (Arc<dyn RemoteCollabStorage>, Arc<dyn AppFlowyEncryption>) {
|
||||
let (server, encryption_impl) = appflowy_server(secret);
|
||||
let service = Arc::new(SupabaseCollabStorageImpl::new(
|
||||
server,
|
||||
None,
|
||||
Arc::downgrade(&encryption_impl),
|
||||
));
|
||||
(service, encryption_impl)
|
||||
}
|
||||
|
||||
pub async fn get_folder_data_from_server(
|
||||
uid: &i64,
|
||||
folder_id: &str,
|
||||
encryption_secret: Option<String>,
|
||||
) -> Result<Option<FolderData>, Error> {
|
||||
let (cloud_service, _encryption) = encryption_folder_service(encryption_secret);
|
||||
cloud_service.get_folder_data(folder_id, uid).await
|
||||
}
|
||||
|
||||
pub async fn get_folder_snapshots(
|
||||
folder_id: &str,
|
||||
encryption_secret: Option<String>,
|
||||
) -> Vec<FolderSnapshot> {
|
||||
let (cloud_service, _encryption) = encryption_folder_service(encryption_secret);
|
||||
cloud_service
|
||||
.get_folder_snapshots(folder_id, 10)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn appflowy_server(
|
||||
encryption_secret: Option<String>,
|
||||
) -> (SupabaseServerServiceImpl, Arc<dyn AppFlowyEncryption>) {
|
||||
let config = SupabaseConfiguration::from_env().unwrap();
|
||||
let encryption_impl: Arc<dyn AppFlowyEncryption> =
|
||||
Arc::new(EncryptionImpl::new(encryption_secret));
|
||||
let encryption = Arc::downgrade(&encryption_impl);
|
||||
let server = Arc::new(RESTfulPostgresServer::new(config, encryption));
|
||||
(SupabaseServerServiceImpl::new(server), encryption_impl)
|
||||
}
|
||||
|
||||
/// zip the asset to the destination
|
||||
/// Zips the specified directory into a zip file.
|
||||
///
|
||||
|
@ -19,6 +19,7 @@ uuid.workspace = true
|
||||
strum_macros = "0.21"
|
||||
protobuf.workspace = true
|
||||
bytes.workspace = true
|
||||
arc-swap.workspace = true
|
||||
validator = { workspace = true, features = ["derive"] }
|
||||
lib-infra = { workspace = true, features = ["isolate_flutter"] }
|
||||
flowy-ai-pub.workspace = true
|
||||
@ -33,7 +34,6 @@ serde_json = { workspace = true }
|
||||
anyhow = "1.0.86"
|
||||
tokio-stream = "0.1.15"
|
||||
tokio-util = { workspace = true, features = ["full"] }
|
||||
parking_lot.workspace = true
|
||||
appflowy-local-ai = { version = "0.1.0", features = ["verbose"] }
|
||||
appflowy-plugin = { version = "0.1.0" }
|
||||
reqwest = "0.11.27"
|
||||
|
@ -67,7 +67,8 @@ impl AIManager {
|
||||
}
|
||||
|
||||
pub async fn initialize(&self, _workspace_id: &str) -> Result<(), FlowyError> {
|
||||
self.local_ai_controller.refresh().await?;
|
||||
// Ignore following error
|
||||
let _ = self.local_ai_controller.refresh().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -17,8 +17,8 @@ use lib_infra::async_trait::async_trait;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::stream_message::StreamMessage;
|
||||
use arc_swap::ArcSwapOption;
|
||||
use futures_util::SinkExt;
|
||||
use parking_lot::Mutex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use std::ops::Deref;
|
||||
@ -47,7 +47,7 @@ const LOCAL_AI_SETTING_KEY: &str = "appflowy_local_ai_setting:v0";
|
||||
pub struct LocalAIController {
|
||||
local_ai: Arc<AppFlowyLocalAI>,
|
||||
local_ai_resource: Arc<LocalAIResourceController>,
|
||||
current_chat_id: Mutex<Option<String>>,
|
||||
current_chat_id: ArcSwapOption<String>,
|
||||
store_preferences: Arc<KVStorePreferences>,
|
||||
user_service: Arc<dyn AIUserService>,
|
||||
}
|
||||
@ -80,7 +80,7 @@ impl LocalAIController {
|
||||
res_impl,
|
||||
tx,
|
||||
));
|
||||
let current_chat_id = Mutex::new(None);
|
||||
let current_chat_id = ArcSwapOption::default();
|
||||
|
||||
let mut running_state_rx = local_ai.subscribe_running_state();
|
||||
let cloned_llm_res = llm_res.clone();
|
||||
@ -205,12 +205,14 @@ impl LocalAIController {
|
||||
|
||||
// Only keep one chat open at a time. Since loading multiple models at the same time will cause
|
||||
// memory issues.
|
||||
if let Some(current_chat_id) = self.current_chat_id.lock().as_ref() {
|
||||
if let Some(current_chat_id) = self.current_chat_id.load().as_ref() {
|
||||
debug!("[AI Plugin] close previous chat: {}", current_chat_id);
|
||||
self.close_chat(current_chat_id);
|
||||
}
|
||||
|
||||
*self.current_chat_id.lock() = Some(chat_id.to_string());
|
||||
self
|
||||
.current_chat_id
|
||||
.store(Some(Arc::new(chat_id.to_string())));
|
||||
let chat_id = chat_id.to_string();
|
||||
let weak_ctrl = Arc::downgrade(&self.local_ai);
|
||||
tokio::spawn(async move {
|
||||
@ -534,7 +536,7 @@ impl LLMResourceService for LLMResourceServiceImpl {
|
||||
fn store_setting(&self, setting: LLMSetting) -> Result<(), Error> {
|
||||
self
|
||||
.store_preferences
|
||||
.set_object(LOCAL_AI_SETTING_KEY, setting)?;
|
||||
.set_object(LOCAL_AI_SETTING_KEY, &setting)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -9,8 +9,8 @@ use flowy_error::{ErrorCode, FlowyError, FlowyResult};
|
||||
use futures::Sink;
|
||||
use futures_util::SinkExt;
|
||||
use lib_infra::async_trait::async_trait;
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use arc_swap::ArcSwapOption;
|
||||
use lib_infra::util::{get_operating_system, OperatingSystem};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
@ -64,10 +64,10 @@ impl DownloadTask {
|
||||
pub struct LocalAIResourceController {
|
||||
user_service: Arc<dyn AIUserService>,
|
||||
resource_service: Arc<dyn LLMResourceService>,
|
||||
llm_setting: RwLock<Option<LLMSetting>>,
|
||||
llm_setting: ArcSwapOption<LLMSetting>,
|
||||
// The ai_config will be set when user try to get latest local ai config from server
|
||||
ai_config: RwLock<Option<LocalAIConfig>>,
|
||||
download_task: Arc<RwLock<Option<DownloadTask>>>,
|
||||
ai_config: ArcSwapOption<LocalAIConfig>,
|
||||
download_task: Arc<ArcSwapOption<DownloadTask>>,
|
||||
resource_notify: tokio::sync::mpsc::Sender<()>,
|
||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||
#[allow(dead_code)]
|
||||
@ -82,7 +82,7 @@ impl LocalAIResourceController {
|
||||
resource_notify: tokio::sync::mpsc::Sender<()>,
|
||||
) -> Self {
|
||||
let (offline_app_state_sender, _) = tokio::sync::broadcast::channel(1);
|
||||
let llm_setting = RwLock::new(resource_service.retrieve_setting());
|
||||
let llm_setting = resource_service.retrieve_setting().map(Arc::new);
|
||||
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
|
||||
let mut offline_app_disk_watch: Option<WatchContext> = None;
|
||||
|
||||
@ -109,7 +109,7 @@ impl LocalAIResourceController {
|
||||
Self {
|
||||
user_service,
|
||||
resource_service: Arc::new(resource_service),
|
||||
llm_setting,
|
||||
llm_setting: ArcSwapOption::new(llm_setting),
|
||||
ai_config: Default::default(),
|
||||
download_task: Default::default(),
|
||||
resource_notify,
|
||||
@ -125,7 +125,7 @@ impl LocalAIResourceController {
|
||||
}
|
||||
|
||||
fn set_llm_setting(&self, llm_setting: LLMSetting) {
|
||||
*self.llm_setting.write() = Some(llm_setting);
|
||||
self.llm_setting.store(Some(llm_setting.into()));
|
||||
}
|
||||
|
||||
/// Returns true when all resources are downloaded and ready to use.
|
||||
@ -153,7 +153,7 @@ impl LocalAIResourceController {
|
||||
return Err(FlowyError::local_ai().with_context("No model found"));
|
||||
}
|
||||
|
||||
*self.ai_config.write() = Some(ai_config.clone());
|
||||
self.ai_config.store(Some(ai_config.clone().into()));
|
||||
let selected_model = self.select_model(&ai_config)?;
|
||||
|
||||
let llm_setting = LLMSetting {
|
||||
@ -173,7 +173,7 @@ impl LocalAIResourceController {
|
||||
pub fn use_local_llm(&self, llm_id: i64) -> FlowyResult<LocalModelResourcePB> {
|
||||
let (app, llm_model) = self
|
||||
.ai_config
|
||||
.read()
|
||||
.load()
|
||||
.as_ref()
|
||||
.and_then(|config| {
|
||||
config
|
||||
@ -209,7 +209,7 @@ impl LocalAIResourceController {
|
||||
|
||||
let pending_resources = self.calculate_pending_resources().ok()?;
|
||||
let is_ready = pending_resources.is_empty();
|
||||
let is_downloading = self.download_task.read().is_some();
|
||||
let is_downloading = self.download_task.load().is_some();
|
||||
let pending_resources: Vec<_> = pending_resources
|
||||
.into_iter()
|
||||
.flat_map(|res| match res {
|
||||
@ -243,7 +243,7 @@ impl LocalAIResourceController {
|
||||
|
||||
/// Returns true when all resources are downloaded and ready to use.
|
||||
pub fn calculate_pending_resources(&self) -> FlowyResult<Vec<PendingResource>> {
|
||||
match self.llm_setting.read().as_ref() {
|
||||
match self.llm_setting.load().as_ref() {
|
||||
None => Err(FlowyError::local_ai().with_context("Can't find any llm config")),
|
||||
Some(llm_setting) => {
|
||||
let mut resources = vec![];
|
||||
@ -296,7 +296,7 @@ impl LocalAIResourceController {
|
||||
info!("notify download finish, need to reload resources");
|
||||
let _ = resource_notify.send(()).await;
|
||||
if let Some(download_task) = weak_download_task.upgrade() {
|
||||
if let Some(task) = download_task.write().take() {
|
||||
if let Some(task) = download_task.swap(None) {
|
||||
task.cancel();
|
||||
}
|
||||
}
|
||||
@ -307,25 +307,27 @@ impl LocalAIResourceController {
|
||||
};
|
||||
|
||||
// return immediately if download task already exists
|
||||
if let Some(download_task) = self.download_task.read().as_ref() {
|
||||
trace!(
|
||||
"Download task already exists, return the task id: {}",
|
||||
task_id
|
||||
);
|
||||
progress_notify(download_task.tx.subscribe());
|
||||
return Ok(task_id);
|
||||
{
|
||||
let guard = self.download_task.load();
|
||||
if let Some(download_task) = &*guard {
|
||||
trace!(
|
||||
"Download task already exists, return the task id: {}",
|
||||
task_id
|
||||
);
|
||||
progress_notify(download_task.tx.subscribe());
|
||||
return Ok(task_id);
|
||||
}
|
||||
}
|
||||
|
||||
// If download task is not exists, create a new download task.
|
||||
info!("[LLM Resource] Start new download task");
|
||||
let llm_setting = self
|
||||
.llm_setting
|
||||
.read()
|
||||
.clone()
|
||||
.load_full()
|
||||
.ok_or_else(|| FlowyError::local_ai().with_context("No local ai config found"))?;
|
||||
|
||||
let download_task = DownloadTask::new();
|
||||
*self.download_task.write() = Some(download_task.clone());
|
||||
let download_task = Arc::new(DownloadTask::new());
|
||||
self.download_task.store(Some(download_task.clone()));
|
||||
progress_notify(download_task.tx.subscribe());
|
||||
|
||||
let model_dir = self.user_model_folder()?;
|
||||
@ -339,15 +341,15 @@ impl LocalAIResourceController {
|
||||
// After download the plugin, start downloading models
|
||||
let chat_model_file = (
|
||||
model_dir.join(&llm_setting.llm_model.chat_model.file_name),
|
||||
llm_setting.llm_model.chat_model.file_name,
|
||||
llm_setting.llm_model.chat_model.name,
|
||||
llm_setting.llm_model.chat_model.download_url,
|
||||
&llm_setting.llm_model.chat_model.file_name,
|
||||
&llm_setting.llm_model.chat_model.name,
|
||||
&llm_setting.llm_model.chat_model.download_url,
|
||||
);
|
||||
let embedding_model_file = (
|
||||
model_dir.join(&llm_setting.llm_model.embedding_model.file_name),
|
||||
llm_setting.llm_model.embedding_model.file_name,
|
||||
llm_setting.llm_model.embedding_model.name,
|
||||
llm_setting.llm_model.embedding_model.download_url,
|
||||
&llm_setting.llm_model.embedding_model.file_name,
|
||||
&llm_setting.llm_model.embedding_model.name,
|
||||
&llm_setting.llm_model.embedding_model.download_url,
|
||||
);
|
||||
for (file_path, file_name, model_name, url) in [chat_model_file, embedding_model_file] {
|
||||
if file_path.exists() {
|
||||
@ -370,9 +372,9 @@ impl LocalAIResourceController {
|
||||
}
|
||||
});
|
||||
match download_model(
|
||||
&url,
|
||||
url,
|
||||
&model_dir,
|
||||
&file_name,
|
||||
file_name,
|
||||
Some(progress),
|
||||
Some(download_task.cancel_token.clone()),
|
||||
)
|
||||
@ -400,7 +402,7 @@ impl LocalAIResourceController {
|
||||
}
|
||||
|
||||
pub fn cancel_download(&self) -> FlowyResult<()> {
|
||||
if let Some(cancel_token) = self.download_task.write().take() {
|
||||
if let Some(cancel_token) = self.download_task.swap(None) {
|
||||
info!("[LLM Resource] Cancel download");
|
||||
cancel_token.cancel();
|
||||
}
|
||||
@ -416,9 +418,7 @@ impl LocalAIResourceController {
|
||||
|
||||
let llm_setting = self
|
||||
.llm_setting
|
||||
.read()
|
||||
.as_ref()
|
||||
.cloned()
|
||||
.load_full()
|
||||
.ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?;
|
||||
|
||||
let model_dir = self.user_model_folder()?;
|
||||
@ -475,16 +475,14 @@ impl LocalAIResourceController {
|
||||
}
|
||||
|
||||
pub fn get_selected_model(&self) -> Option<LLMModel> {
|
||||
self
|
||||
.llm_setting
|
||||
.read()
|
||||
.as_ref()
|
||||
.map(|setting| setting.llm_model.clone())
|
||||
let setting = self.llm_setting.load();
|
||||
Some(setting.as_ref()?.llm_model.clone())
|
||||
}
|
||||
|
||||
/// Selects the appropriate model based on the current settings or defaults to the first model.
|
||||
fn select_model(&self, ai_config: &LocalAIConfig) -> FlowyResult<LLMModel> {
|
||||
let selected_model = match self.llm_setting.read().as_ref() {
|
||||
let llm_setting = self.llm_setting.load();
|
||||
let selected_model = match &*llm_setting {
|
||||
None => ai_config.models[0].clone(),
|
||||
Some(llm_setting) => {
|
||||
match ai_config
|
||||
|
@ -44,8 +44,9 @@ bytes.workspace = true
|
||||
tokio = { workspace = true, features = ["full"] }
|
||||
tokio-stream = { workspace = true, features = ["sync"] }
|
||||
console-subscriber = { version = "0.2", optional = true }
|
||||
parking_lot.workspace = true
|
||||
anyhow.workspace = true
|
||||
dashmap.workspace = true
|
||||
arc-swap.workspace = true
|
||||
base64 = "0.21.5"
|
||||
|
||||
lib-infra = { workspace = true }
|
||||
|
@ -6,7 +6,6 @@ use semver::Version;
|
||||
use tracing::{error, info};
|
||||
|
||||
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
|
||||
use flowy_server_pub::supabase_config::SupabaseConfiguration;
|
||||
use flowy_user::services::entities::URL_SAFE_ENGINE;
|
||||
use lib_infra::file_util::copy_dir_recursive;
|
||||
use lib_infra::util::OperatingSystem;
|
||||
@ -85,13 +84,7 @@ impl AppFlowyCoreConfig {
|
||||
) -> Self {
|
||||
let cloud_config = AFCloudConfiguration::from_env().ok();
|
||||
let storage_path = match &cloud_config {
|
||||
None => {
|
||||
let supabase_config = SupabaseConfiguration::from_env().ok();
|
||||
match &supabase_config {
|
||||
None => custom_application_path,
|
||||
Some(config) => make_user_data_folder(&custom_application_path, &config.url),
|
||||
}
|
||||
},
|
||||
None => custom_application_path,
|
||||
Some(config) => make_user_data_folder(&custom_application_path, &config.base_url),
|
||||
};
|
||||
let log_filter = create_log_filter("info".to_owned(), vec![], OperatingSystem::from(&platform));
|
||||
|
@ -11,7 +11,7 @@ use flowy_database2::DatabaseManager;
|
||||
use flowy_document::entities::DocumentDataPB;
|
||||
use flowy_document::manager::DocumentManager;
|
||||
use flowy_document::parser::json::parser::JsonToDocumentParser;
|
||||
use flowy_error::FlowyError;
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use flowy_folder::entities::{CreateViewParams, ViewLayoutPB};
|
||||
use flowy_folder::manager::{FolderManager, FolderUser};
|
||||
use flowy_folder::share::ImportType;
|
||||
@ -26,7 +26,6 @@ use flowy_sqlite::kv::KVStorePreferences;
|
||||
use flowy_user::services::authenticate_user::AuthenticateUser;
|
||||
use flowy_user::services::data_import::{load_collab_by_object_id, load_collab_by_object_ids};
|
||||
use lib_dispatch::prelude::ToBytes;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::sync::{Arc, Weak};
|
||||
@ -111,6 +110,10 @@ impl FolderUser for FolderUserImpl {
|
||||
fn collab_db(&self, uid: i64) -> Result<Weak<CollabKVDB>, FlowyError> {
|
||||
self.upgrade_user()?.get_collab_db(uid)
|
||||
}
|
||||
|
||||
fn is_folder_exist_on_disk(&self, uid: i64, workspace_id: &str) -> FlowyResult<bool> {
|
||||
self.upgrade_user()?.is_collab_on_disk(uid, workspace_id)
|
||||
}
|
||||
}
|
||||
|
||||
struct DocumentFolderOperation(Arc<DocumentManager>);
|
||||
|
@ -1,18 +1,17 @@
|
||||
use std::collections::HashMap;
|
||||
use arc_swap::ArcSwapOption;
|
||||
use dashmap::DashMap;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::sync::atomic::{AtomicBool, AtomicU8, Ordering};
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use parking_lot::RwLock;
|
||||
use serde_repr::*;
|
||||
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use flowy_server::af_cloud::define::ServerUser;
|
||||
use flowy_server::af_cloud::AppFlowyCloudServer;
|
||||
use flowy_server::local_server::{LocalServer, LocalServerDB};
|
||||
use flowy_server::supabase::SupabaseServer;
|
||||
use flowy_server::{AppFlowyEncryption, AppFlowyServer, EncryptionImpl};
|
||||
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
|
||||
use flowy_server_pub::supabase_config::SupabaseConfiguration;
|
||||
use flowy_server_pub::AuthenticatorType;
|
||||
use flowy_sqlite::kv::KVStorePreferences;
|
||||
use flowy_user_pub::entities::*;
|
||||
@ -26,12 +25,8 @@ pub enum Server {
|
||||
/// Offline mode, no user authentication and the data is stored locally.
|
||||
Local = 0,
|
||||
/// AppFlowy Cloud server provider.
|
||||
/// The [AppFlowy-Server](https://github.com/AppFlowy-IO/AppFlowy-Cloud) is still a work in
|
||||
/// progress.
|
||||
/// See: https://github.com/AppFlowy-IO/AppFlowy-Cloud
|
||||
AppFlowyCloud = 1,
|
||||
/// Supabase server provider.
|
||||
/// It uses supabase postgresql database to store data and user authentication.
|
||||
Supabase = 2,
|
||||
}
|
||||
|
||||
impl Server {
|
||||
@ -45,7 +40,6 @@ impl Display for Server {
|
||||
match self {
|
||||
Server::Local => write!(f, "Local"),
|
||||
Server::AppFlowyCloud => write!(f, "AppFlowyCloud"),
|
||||
Server::Supabase => write!(f, "Supabase"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -56,16 +50,16 @@ impl Display for Server {
|
||||
/// Each server implements the [AppFlowyServer] trait, which provides the [UserCloudService], etc.
|
||||
pub struct ServerProvider {
|
||||
config: AppFlowyCoreConfig,
|
||||
providers: RwLock<HashMap<Server, Arc<dyn AppFlowyServer>>>,
|
||||
pub(crate) encryption: RwLock<Arc<dyn AppFlowyEncryption>>,
|
||||
providers: DashMap<Server, Arc<dyn AppFlowyServer>>,
|
||||
pub(crate) encryption: Arc<dyn AppFlowyEncryption>,
|
||||
#[allow(dead_code)]
|
||||
pub(crate) store_preferences: Weak<KVStorePreferences>,
|
||||
pub(crate) user_enable_sync: RwLock<bool>,
|
||||
pub(crate) user_enable_sync: AtomicBool,
|
||||
|
||||
/// The authenticator type of the user.
|
||||
authenticator: RwLock<Authenticator>,
|
||||
authenticator: AtomicU8,
|
||||
user: Arc<dyn ServerUser>,
|
||||
pub(crate) uid: Arc<RwLock<Option<i64>>>,
|
||||
pub(crate) uid: Arc<ArcSwapOption<i64>>,
|
||||
}
|
||||
|
||||
impl ServerProvider {
|
||||
@ -79,10 +73,10 @@ impl ServerProvider {
|
||||
let encryption = EncryptionImpl::new(None);
|
||||
Self {
|
||||
config,
|
||||
providers: RwLock::new(HashMap::new()),
|
||||
user_enable_sync: RwLock::new(true),
|
||||
authenticator: RwLock::new(Authenticator::from(server)),
|
||||
encryption: RwLock::new(Arc::new(encryption)),
|
||||
providers: DashMap::new(),
|
||||
user_enable_sync: AtomicBool::new(true),
|
||||
authenticator: AtomicU8::new(Authenticator::from(server) as u8),
|
||||
encryption: Arc::new(encryption),
|
||||
store_preferences,
|
||||
uid: Default::default(),
|
||||
user,
|
||||
@ -90,33 +84,34 @@ impl ServerProvider {
|
||||
}
|
||||
|
||||
pub fn get_server_type(&self) -> Server {
|
||||
match &*self.authenticator.read() {
|
||||
match Authenticator::from(self.authenticator.load(Ordering::Acquire) as i32) {
|
||||
Authenticator::Local => Server::Local,
|
||||
Authenticator::AppFlowyCloud => Server::AppFlowyCloud,
|
||||
Authenticator::Supabase => Server::Supabase,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_authenticator(&self, authenticator: Authenticator) {
|
||||
let old_server_type = self.get_server_type();
|
||||
*self.authenticator.write() = authenticator;
|
||||
self
|
||||
.authenticator
|
||||
.store(authenticator as u8, Ordering::Release);
|
||||
let new_server_type = self.get_server_type();
|
||||
|
||||
if old_server_type != new_server_type {
|
||||
self.providers.write().remove(&old_server_type);
|
||||
self.providers.remove(&old_server_type);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_authenticator(&self) -> Authenticator {
|
||||
self.authenticator.read().clone()
|
||||
Authenticator::from(self.authenticator.load(Ordering::Acquire) as i32)
|
||||
}
|
||||
|
||||
/// Returns a [AppFlowyServer] trait implementation base on the provider_type.
|
||||
pub fn get_server(&self) -> FlowyResult<Arc<dyn AppFlowyServer>> {
|
||||
let server_type = self.get_server_type();
|
||||
|
||||
if let Some(provider) = self.providers.read().get(&server_type) {
|
||||
return Ok(provider.clone());
|
||||
if let Some(provider) = self.providers.get(&server_type) {
|
||||
return Ok(provider.value().clone());
|
||||
}
|
||||
|
||||
let server = match server_type {
|
||||
@ -131,7 +126,7 @@ impl ServerProvider {
|
||||
let config = AFCloudConfiguration::from_env()?;
|
||||
let server = Arc::new(AppFlowyCloudServer::new(
|
||||
config,
|
||||
*self.user_enable_sync.read(),
|
||||
self.user_enable_sync.load(Ordering::Acquire),
|
||||
self.config.device_id.clone(),
|
||||
self.config.app_version.clone(),
|
||||
self.user.clone(),
|
||||
@ -139,25 +134,9 @@ impl ServerProvider {
|
||||
|
||||
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(server)
|
||||
},
|
||||
Server::Supabase => {
|
||||
let config = SupabaseConfiguration::from_env()?;
|
||||
let uid = self.uid.clone();
|
||||
tracing::trace!("🔑Supabase config: {:?}", config);
|
||||
let encryption = Arc::downgrade(&*self.encryption.read());
|
||||
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(Arc::new(SupabaseServer::new(
|
||||
uid,
|
||||
config,
|
||||
*self.user_enable_sync.read(),
|
||||
self.config.device_id.clone(),
|
||||
encryption,
|
||||
)))
|
||||
},
|
||||
}?;
|
||||
|
||||
self
|
||||
.providers
|
||||
.write()
|
||||
.insert(server_type.clone(), server.clone());
|
||||
self.providers.insert(server_type.clone(), server.clone());
|
||||
Ok(server)
|
||||
}
|
||||
}
|
||||
@ -167,7 +146,6 @@ impl From<Authenticator> for Server {
|
||||
match auth_provider {
|
||||
Authenticator::Local => Server::Local,
|
||||
Authenticator::AppFlowyCloud => Server::AppFlowyCloud,
|
||||
Authenticator::Supabase => Server::Supabase,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -177,7 +155,6 @@ impl From<Server> for Authenticator {
|
||||
match ty {
|
||||
Server::Local => Authenticator::Local,
|
||||
Server::AppFlowyCloud => Authenticator::AppFlowyCloud,
|
||||
Server::Supabase => Authenticator::Supabase,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -190,7 +167,6 @@ impl From<&Authenticator> for Server {
|
||||
pub fn current_server_type() -> Server {
|
||||
match AuthenticatorType::from_env() {
|
||||
AuthenticatorType::Local => Server::Local,
|
||||
AuthenticatorType::Supabase => Server::Supabase,
|
||||
AuthenticatorType::AppFlowyCloud => Server::AppFlowyCloud,
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ use client_api::entity::search_dto::SearchDocumentResponseItem;
|
||||
use flowy_search_pub::cloud::SearchCloudService;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Error;
|
||||
@ -9,10 +10,9 @@ use client_api::collab_sync::{SinkConfig, SyncObject, SyncPlugin};
|
||||
use client_api::entity::ai_dto::{CompletionType, RepeatedRelatedQuestion};
|
||||
use client_api::entity::ChatMessageType;
|
||||
use collab::core::origin::{CollabClient, CollabOrigin};
|
||||
|
||||
use collab::entity::EncodedCollab;
|
||||
use collab::preclude::CollabPlugin;
|
||||
use collab_entity::CollabType;
|
||||
use collab_plugins::cloud_storage::postgres::SupabaseDBPlugin;
|
||||
use serde_json::Value;
|
||||
use tokio_stream::wrappers::WatchStream;
|
||||
use tracing::{debug, info};
|
||||
@ -25,8 +25,8 @@ use flowy_ai_pub::cloud::{
|
||||
RepeatedChatMessage, StreamAnswer, StreamComplete,
|
||||
};
|
||||
use flowy_database_pub::cloud::{
|
||||
CollabDocStateByOid, DatabaseAIService, DatabaseCloudService, DatabaseSnapshot,
|
||||
SummaryRowContent, TranslateRowContent, TranslateRowResponse,
|
||||
DatabaseAIService, DatabaseCloudService, DatabaseSnapshot, EncodeCollabByOid, SummaryRowContent,
|
||||
TranslateRowContent, TranslateRowResponse,
|
||||
};
|
||||
use flowy_document::deps::DocumentData;
|
||||
use flowy_document_pub::cloud::{DocumentCloudService, DocumentSnapshot};
|
||||
@ -36,13 +36,11 @@ use flowy_folder_pub::cloud::{
|
||||
};
|
||||
use flowy_folder_pub::entities::{PublishInfoResponse, PublishPayload};
|
||||
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
|
||||
use flowy_server_pub::supabase_config::SupabaseConfiguration;
|
||||
use flowy_storage_pub::cloud::{ObjectIdentity, ObjectValue, StorageCloudService};
|
||||
use flowy_storage_pub::storage::{CompletedPartRequest, CreateUploadResponse, UploadPartResponse};
|
||||
use flowy_user_pub::cloud::{UserCloudService, UserCloudServiceProvider};
|
||||
use flowy_user_pub::entities::{Authenticator, UserTokenState};
|
||||
use lib_infra::async_trait::async_trait;
|
||||
use lib_infra::future::FutureResult;
|
||||
|
||||
use crate::integrate::server::{Server, ServerProvider};
|
||||
|
||||
@ -168,8 +166,8 @@ impl UserCloudServiceProvider for ServerProvider {
|
||||
fn set_enable_sync(&self, uid: i64, enable_sync: bool) {
|
||||
if let Ok(server) = self.get_server() {
|
||||
server.set_enable_sync(uid, enable_sync);
|
||||
*self.user_enable_sync.write() = enable_sync;
|
||||
*self.uid.write() = Some(uid);
|
||||
self.user_enable_sync.store(enable_sync, Ordering::Release);
|
||||
self.uid.store(Some(uid.into()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -195,7 +193,7 @@ impl UserCloudServiceProvider for ServerProvider {
|
||||
|
||||
fn set_encrypt_secret(&self, secret: String) {
|
||||
tracing::info!("🔑Set encrypt secret");
|
||||
self.encryption.write().set_secret(secret);
|
||||
self.encryption.set_secret(secret);
|
||||
}
|
||||
|
||||
/// Returns the [UserCloudService] base on the current [Server].
|
||||
@ -211,93 +209,87 @@ impl UserCloudServiceProvider for ServerProvider {
|
||||
Server::AppFlowyCloud => AFCloudConfiguration::from_env()
|
||||
.map(|config| config.base_url)
|
||||
.unwrap_or_default(),
|
||||
Server::Supabase => SupabaseConfiguration::from_env()
|
||||
.map(|config| config.url)
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl FolderCloudService for ServerProvider {
|
||||
fn create_workspace(&self, uid: i64, name: &str) -> FutureResult<Workspace, Error> {
|
||||
let server = self.get_server();
|
||||
async fn create_workspace(&self, uid: i64, name: &str) -> Result<Workspace, Error> {
|
||||
let server = self.get_server()?;
|
||||
let name = name.to_string();
|
||||
FutureResult::new(async move { server?.folder_service().create_workspace(uid, &name).await })
|
||||
server.folder_service().create_workspace(uid, &name).await
|
||||
}
|
||||
|
||||
fn open_workspace(&self, workspace_id: &str) -> FutureResult<(), Error> {
|
||||
async fn open_workspace(&self, workspace_id: &str) -> Result<(), Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move { server?.folder_service().open_workspace(&workspace_id).await })
|
||||
let server = self.get_server()?;
|
||||
server.folder_service().open_workspace(&workspace_id).await
|
||||
}
|
||||
|
||||
fn get_all_workspace(&self) -> FutureResult<Vec<WorkspaceRecord>, Error> {
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move { server?.folder_service().get_all_workspace().await })
|
||||
async fn get_all_workspace(&self) -> Result<Vec<WorkspaceRecord>, Error> {
|
||||
let server = self.get_server()?;
|
||||
server.folder_service().get_all_workspace().await
|
||||
}
|
||||
|
||||
fn get_folder_data(
|
||||
async fn get_folder_data(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
uid: &i64,
|
||||
) -> FutureResult<Option<FolderData>, Error> {
|
||||
) -> Result<Option<FolderData>, Error> {
|
||||
let uid = *uid;
|
||||
let server = self.get_server();
|
||||
let server = self.get_server()?;
|
||||
let workspace_id = workspace_id.to_string();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.get_folder_data(&workspace_id, &uid)
|
||||
.await
|
||||
})
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.get_folder_data(&workspace_id, &uid)
|
||||
.await
|
||||
}
|
||||
|
||||
fn get_folder_snapshots(
|
||||
async fn get_folder_snapshots(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
limit: usize,
|
||||
) -> FutureResult<Vec<FolderSnapshot>, Error> {
|
||||
) -> Result<Vec<FolderSnapshot>, Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.get_folder_snapshots(&workspace_id, limit)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.get_folder_snapshots(&workspace_id, limit)
|
||||
.await
|
||||
}
|
||||
|
||||
fn get_folder_doc_state(
|
||||
async fn get_folder_doc_state(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
uid: i64,
|
||||
collab_type: CollabType,
|
||||
object_id: &str,
|
||||
) -> FutureResult<Vec<u8>, Error> {
|
||||
) -> Result<Vec<u8>, Error> {
|
||||
let object_id = object_id.to_string();
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.get_folder_doc_state(&workspace_id, uid, collab_type, &object_id)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.get_folder_doc_state(&workspace_id, uid, collab_type, &object_id)
|
||||
.await
|
||||
}
|
||||
|
||||
fn batch_create_folder_collab_objects(
|
||||
async fn batch_create_folder_collab_objects(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
objects: Vec<FolderCollabParams>,
|
||||
) -> FutureResult<(), Error> {
|
||||
) -> Result<(), Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.batch_create_folder_collab_objects(&workspace_id, objects)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.batch_create_folder_collab_objects(&workspace_id, objects)
|
||||
.await
|
||||
}
|
||||
|
||||
fn service_name(&self) -> String {
|
||||
@ -307,114 +299,106 @@ impl FolderCloudService for ServerProvider {
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn publish_view(
|
||||
async fn publish_view(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
payload: Vec<PublishPayload>,
|
||||
) -> FutureResult<(), Error> {
|
||||
) -> Result<(), Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.publish_view(&workspace_id, payload)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.publish_view(&workspace_id, payload)
|
||||
.await
|
||||
}
|
||||
|
||||
fn unpublish_views(&self, workspace_id: &str, view_ids: Vec<String>) -> FutureResult<(), Error> {
|
||||
async fn unpublish_views(&self, workspace_id: &str, view_ids: Vec<String>) -> Result<(), Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.unpublish_views(&workspace_id, view_ids)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.unpublish_views(&workspace_id, view_ids)
|
||||
.await
|
||||
}
|
||||
|
||||
fn get_publish_info(&self, view_id: &str) -> FutureResult<PublishInfoResponse, Error> {
|
||||
async fn get_publish_info(&self, view_id: &str) -> Result<PublishInfoResponse, Error> {
|
||||
let view_id = view_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move { server?.folder_service().get_publish_info(&view_id).await })
|
||||
let server = self.get_server()?;
|
||||
server.folder_service().get_publish_info(&view_id).await
|
||||
}
|
||||
|
||||
fn set_publish_namespace(
|
||||
async fn set_publish_namespace(
|
||||
&self,
|
||||
workspace_id: &str,
|
||||
new_namespace: &str,
|
||||
) -> FutureResult<(), Error> {
|
||||
) -> Result<(), Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let new_namespace = new_namespace.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.set_publish_namespace(&workspace_id, &new_namespace)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.set_publish_namespace(&workspace_id, &new_namespace)
|
||||
.await
|
||||
}
|
||||
|
||||
fn get_publish_namespace(&self, workspace_id: &str) -> FutureResult<String, Error> {
|
||||
async fn get_publish_namespace(&self, workspace_id: &str) -> Result<String, Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.folder_service()
|
||||
.get_publish_namespace(&workspace_id)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.folder_service()
|
||||
.get_publish_namespace(&workspace_id)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DatabaseCloudService for ServerProvider {
|
||||
fn get_database_object_doc_state(
|
||||
async fn get_database_encode_collab(
|
||||
&self,
|
||||
object_id: &str,
|
||||
collab_type: CollabType,
|
||||
workspace_id: &str,
|
||||
) -> FutureResult<Option<Vec<u8>>, Error> {
|
||||
) -> Result<Option<EncodedCollab>, Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
let server = self.get_server()?;
|
||||
let database_id = object_id.to_string();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.database_service()
|
||||
.get_database_object_doc_state(&database_id, collab_type, &workspace_id)
|
||||
.await
|
||||
})
|
||||
server
|
||||
.database_service()
|
||||
.get_database_encode_collab(&database_id, collab_type, &workspace_id)
|
||||
.await
|
||||
}
|
||||
|
||||
fn batch_get_database_object_doc_state(
|
||||
async fn batch_get_database_encode_collab(
|
||||
&self,
|
||||
object_ids: Vec<String>,
|
||||
object_ty: CollabType,
|
||||
workspace_id: &str,
|
||||
) -> FutureResult<CollabDocStateByOid, Error> {
|
||||
) -> Result<EncodeCollabByOid, Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.database_service()
|
||||
.batch_get_database_object_doc_state(object_ids, object_ty, &workspace_id)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.database_service()
|
||||
.batch_get_database_encode_collab(object_ids, object_ty, &workspace_id)
|
||||
.await
|
||||
}
|
||||
|
||||
fn get_database_collab_object_snapshots(
|
||||
async fn get_database_collab_object_snapshots(
|
||||
&self,
|
||||
object_id: &str,
|
||||
limit: usize,
|
||||
) -> FutureResult<Vec<DatabaseSnapshot>, Error> {
|
||||
let server = self.get_server();
|
||||
) -> Result<Vec<DatabaseSnapshot>, Error> {
|
||||
let server = self.get_server()?;
|
||||
let database_id = object_id.to_string();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.database_service()
|
||||
.get_database_collab_object_snapshots(&database_id, limit)
|
||||
.await
|
||||
})
|
||||
|
||||
server
|
||||
.database_service()
|
||||
.get_database_collab_object_snapshots(&database_id, limit)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@ -449,54 +433,52 @@ impl DatabaseAIService for ServerProvider {
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DocumentCloudService for ServerProvider {
|
||||
fn get_document_doc_state(
|
||||
async fn get_document_doc_state(
|
||||
&self,
|
||||
document_id: &str,
|
||||
workspace_id: &str,
|
||||
) -> FutureResult<Vec<u8>, FlowyError> {
|
||||
) -> Result<Vec<u8>, FlowyError> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let document_id = document_id.to_string();
|
||||
let server = self.get_server();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.document_service()
|
||||
.get_document_doc_state(&document_id, &workspace_id)
|
||||
.await
|
||||
})
|
||||
let server = self.get_server()?;
|
||||
|
||||
server
|
||||
.document_service()
|
||||
.get_document_doc_state(&document_id, &workspace_id)
|
||||
.await
|
||||
}
|
||||
|
||||
fn get_document_snapshots(
|
||||
async fn get_document_snapshots(
|
||||
&self,
|
||||
document_id: &str,
|
||||
limit: usize,
|
||||
workspace_id: &str,
|
||||
) -> FutureResult<Vec<DocumentSnapshot>, Error> {
|
||||
) -> Result<Vec<DocumentSnapshot>, Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
let server = self.get_server()?;
|
||||
let document_id = document_id.to_string();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.document_service()
|
||||
.get_document_snapshots(&document_id, limit, &workspace_id)
|
||||
.await
|
||||
})
|
||||
|
||||
server
|
||||
.document_service()
|
||||
.get_document_snapshots(&document_id, limit, &workspace_id)
|
||||
.await
|
||||
}
|
||||
|
||||
fn get_document_data(
|
||||
async fn get_document_data(
|
||||
&self,
|
||||
document_id: &str,
|
||||
workspace_id: &str,
|
||||
) -> FutureResult<Option<DocumentData>, Error> {
|
||||
) -> Result<Option<DocumentData>, Error> {
|
||||
let workspace_id = workspace_id.to_string();
|
||||
let server = self.get_server();
|
||||
let server = self.get_server()?;
|
||||
let document_id = document_id.to_string();
|
||||
FutureResult::new(async move {
|
||||
server?
|
||||
.document_service()
|
||||
.get_document_data(&document_id, &workspace_id)
|
||||
.await
|
||||
})
|
||||
|
||||
server
|
||||
.document_service()
|
||||
.get_document_data(&document_id, &workspace_id)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@ -563,34 +545,11 @@ impl CollabCloudPluginProvider for ServerProvider {
|
||||
vec![]
|
||||
}
|
||||
},
|
||||
CollabPluginProviderContext::Supabase {
|
||||
uid,
|
||||
collab_object,
|
||||
local_collab,
|
||||
local_collab_db,
|
||||
} => {
|
||||
let mut plugins: Vec<Box<dyn CollabPlugin>> = vec![];
|
||||
if let Some(remote_collab_storage) = self
|
||||
.get_server()
|
||||
.ok()
|
||||
.and_then(|provider| provider.collab_storage(&collab_object))
|
||||
{
|
||||
plugins.push(Box::new(SupabaseDBPlugin::new(
|
||||
uid,
|
||||
collab_object,
|
||||
local_collab,
|
||||
1,
|
||||
remote_collab_storage,
|
||||
local_collab_db,
|
||||
)));
|
||||
}
|
||||
plugins
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn is_sync_enabled(&self) -> bool {
|
||||
*self.user_enable_sync.read()
|
||||
self.user_enable_sync.load(Ordering::Acquire)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -131,21 +131,12 @@ impl UserStatusCallback for UserStatusCallbackImpl {
|
||||
create_if_not_exist: true,
|
||||
},
|
||||
Server::AppFlowyCloud => FolderInitDataSource::Cloud(doc_state),
|
||||
Server::Supabase => {
|
||||
if is_new_user {
|
||||
FolderInitDataSource::LocalDisk {
|
||||
create_if_not_exist: true,
|
||||
}
|
||||
} else {
|
||||
FolderInitDataSource::Cloud(doc_state)
|
||||
}
|
||||
},
|
||||
},
|
||||
Err(err) => match server_type {
|
||||
Server::Local => FolderInitDataSource::LocalDisk {
|
||||
create_if_not_exist: true,
|
||||
},
|
||||
Server::AppFlowyCloud | Server::Supabase => {
|
||||
Server::AppFlowyCloud => {
|
||||
return Err(FlowyError::from(err));
|
||||
},
|
||||
},
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
use flowy_search::folder::indexer::FolderIndexManagerImpl;
|
||||
use flowy_search::services::manager::SearchManager;
|
||||
use parking_lot::Mutex;
|
||||
use std::rc::Rc;
|
||||
use std::sync::{Arc, Weak};
|
||||
use std::time::Duration;
|
||||
@ -302,7 +301,6 @@ impl From<Server> for CollabPluginProviderType {
|
||||
match server_type {
|
||||
Server::Local => CollabPluginProviderType::Local,
|
||||
Server::AppFlowyCloud => CollabPluginProviderType::AppFlowyCloud,
|
||||
Server::Supabase => CollabPluginProviderType::Supabase,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -323,13 +321,3 @@ impl ServerUser for ServerUserImpl {
|
||||
self.upgrade_user()?.workspace_id()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MutexAppFlowyCore(pub Rc<Mutex<AppFlowyCore>>);
|
||||
|
||||
impl MutexAppFlowyCore {
|
||||
pub fn new(appflowy_core: AppFlowyCore) -> Self {
|
||||
Self(Rc::new(Mutex::new(appflowy_core)))
|
||||
}
|
||||
}
|
||||
unsafe impl Sync for MutexAppFlowyCore {}
|
||||
unsafe impl Send for MutexAppFlowyCore {}
|
||||
|
@ -1,13 +1,12 @@
|
||||
use anyhow::Error;
|
||||
pub use client_api::entity::ai_dto::{TranslateItem, TranslateRowResponse};
|
||||
use collab::core::collab::DataSource;
|
||||
use collab::entity::EncodedCollab;
|
||||
use collab_entity::CollabType;
|
||||
use flowy_error::FlowyError;
|
||||
use lib_infra::async_trait::async_trait;
|
||||
use lib_infra::future::FutureResult;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub type CollabDocStateByOid = HashMap<String, DataSource>;
|
||||
pub type EncodeCollabByOid = HashMap<String, EncodedCollab>;
|
||||
pub type SummaryRowContent = HashMap<String, String>;
|
||||
pub type TranslateRowContent = Vec<TranslateItem>;
|
||||
|
||||
@ -41,25 +40,25 @@ pub trait DatabaseAIService: Send + Sync {
|
||||
///
|
||||
#[async_trait]
|
||||
pub trait DatabaseCloudService: Send + Sync {
|
||||
fn get_database_object_doc_state(
|
||||
async fn get_database_encode_collab(
|
||||
&self,
|
||||
object_id: &str,
|
||||
collab_type: CollabType,
|
||||
workspace_id: &str,
|
||||
) -> FutureResult<Option<Vec<u8>>, Error>;
|
||||
) -> Result<Option<EncodedCollab>, Error>;
|
||||
|
||||
fn batch_get_database_object_doc_state(
|
||||
async fn batch_get_database_encode_collab(
|
||||
&self,
|
||||
object_ids: Vec<String>,
|
||||
object_ty: CollabType,
|
||||
workspace_id: &str,
|
||||
) -> FutureResult<CollabDocStateByOid, Error>;
|
||||
) -> Result<EncodeCollabByOid, Error>;
|
||||
|
||||
fn get_database_collab_object_snapshots(
|
||||
async fn get_database_collab_object_snapshots(
|
||||
&self,
|
||||
object_id: &str,
|
||||
limit: usize,
|
||||
) -> FutureResult<Vec<DatabaseSnapshot>, Error>;
|
||||
) -> Result<Vec<DatabaseSnapshot>, Error>;
|
||||
}
|
||||
|
||||
pub struct DatabaseSnapshot {
|
||||
|
@ -15,7 +15,6 @@ flowy-database-pub = { workspace = true }
|
||||
|
||||
flowy-derive.workspace = true
|
||||
flowy-notification = { workspace = true }
|
||||
parking_lot.workspace = true
|
||||
protobuf.workspace = true
|
||||
flowy-error = { path = "../flowy-error", features = [
|
||||
"impl_from_dispatch_error",
|
||||
@ -29,6 +28,7 @@ tracing.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_repr.workspace = true
|
||||
arc-swap.workspace = true
|
||||
lib-infra = { workspace = true }
|
||||
chrono = { workspace = true, default-features = false, features = ["clock"] }
|
||||
rust_decimal = "1.28.1"
|
||||
|
@ -3,7 +3,7 @@ use std::sync::{Arc, Weak};
|
||||
use collab_database::rows::RowId;
|
||||
use lib_infra::box_any::BoxAny;
|
||||
use tokio::sync::oneshot;
|
||||
use tracing::error;
|
||||
use tracing::{error, trace};
|
||||
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use lib_dispatch::prelude::{af_spawn, data_result_ok, AFPluginData, AFPluginState, DataResult};
|
||||
@ -33,8 +33,17 @@ pub(crate) async fn get_database_data_handler(
|
||||
) -> DataResult<DatabasePB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id: DatabaseViewIdPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
|
||||
let database_id = manager
|
||||
.get_database_id_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
let database_editor = manager.get_database_editor(&database_id).await?;
|
||||
let data = database_editor.get_database_data(view_id.as_ref()).await?;
|
||||
trace!(
|
||||
"layout: {:?}, rows: {}, fields: {}",
|
||||
data.layout_type,
|
||||
data.rows.len(),
|
||||
data.fields.len()
|
||||
);
|
||||
data_result_ok(data)
|
||||
}
|
||||
|
||||
@ -72,7 +81,9 @@ pub(crate) async fn get_database_setting_handler(
|
||||
) -> DataResult<DatabaseViewSettingPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id: DatabaseViewIdPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
let data = database_editor
|
||||
.get_database_view_setting(view_id.as_ref())
|
||||
.await?;
|
||||
@ -86,7 +97,9 @@ pub(crate) async fn update_database_setting_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params = data.try_into_inner()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
|
||||
if let Some(payload) = params.insert_filter {
|
||||
database_editor
|
||||
@ -139,7 +152,9 @@ pub(crate) async fn get_all_filters_handler(
|
||||
) -> DataResult<RepeatedFilterPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id: DatabaseViewIdPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
let filters = database_editor.get_all_filters(view_id.as_ref()).await;
|
||||
data_result_ok(filters)
|
||||
}
|
||||
@ -151,7 +166,9 @@ pub(crate) async fn get_all_sorts_handler(
|
||||
) -> DataResult<RepeatedSortPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id: DatabaseViewIdPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
let sorts = database_editor.get_all_sorts(view_id.as_ref()).await;
|
||||
data_result_ok(sorts)
|
||||
}
|
||||
@ -163,7 +180,9 @@ pub(crate) async fn delete_all_sorts_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id: DatabaseViewIdPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
database_editor.delete_all_sorts(view_id.as_ref()).await;
|
||||
Ok(())
|
||||
}
|
||||
@ -175,9 +194,12 @@ pub(crate) async fn get_fields_handler(
|
||||
) -> DataResult<RepeatedFieldPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: GetFieldParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let fields = database_editor
|
||||
.get_fields(¶ms.view_id, params.field_ids)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(FieldPB::new)
|
||||
.collect::<Vec<FieldPB>>()
|
||||
@ -192,9 +214,10 @@ pub(crate) async fn get_primary_field_handler(
|
||||
) -> DataResult<FieldPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id = data.into_inner().value;
|
||||
let database_editor = manager.get_database_with_view_id(&view_id).await?;
|
||||
let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
|
||||
let mut fields = database_editor
|
||||
.get_fields(&view_id, None)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter(|field| field.is_primary)
|
||||
.map(FieldPB::new)
|
||||
@ -221,7 +244,9 @@ pub(crate) async fn update_field_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: FieldChangesetParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor.update_field(params).await?;
|
||||
Ok(())
|
||||
}
|
||||
@ -233,8 +258,10 @@ pub(crate) async fn update_field_type_option_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
if let Some(old_field) = database_editor.get_field(¶ms.field_id) {
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
if let Some(old_field) = database_editor.get_field(¶ms.field_id).await {
|
||||
let field_type = FieldType::from(old_field.field_type);
|
||||
let type_option_data = type_option_data_from_pb(params.type_option_data, &field_type)?;
|
||||
database_editor
|
||||
@ -251,7 +278,9 @@ pub(crate) async fn delete_field_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: FieldIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor.delete_field(¶ms.field_id).await?;
|
||||
Ok(())
|
||||
}
|
||||
@ -263,7 +292,9 @@ pub(crate) async fn clear_field_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: FieldIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.clear_field(¶ms.view_id, ¶ms.field_id)
|
||||
.await?;
|
||||
@ -277,14 +308,17 @@ pub(crate) async fn switch_to_field_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: EditFieldParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let old_field = database_editor.get_field(¶ms.field_id);
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let old_field = database_editor.get_field(¶ms.field_id).await;
|
||||
database_editor
|
||||
.switch_to_field_type(¶ms.field_id, params.field_type)
|
||||
.await?;
|
||||
|
||||
if let Some(new_type_option) = database_editor
|
||||
.get_field(¶ms.field_id)
|
||||
.await
|
||||
.map(|field| field.get_any_type_option(field.field_type))
|
||||
{
|
||||
match (old_field, new_type_option) {
|
||||
@ -308,7 +342,9 @@ pub(crate) async fn duplicate_field_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: DuplicateFieldPayloadPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.duplicate_field(¶ms.view_id, ¶ms.field_id)
|
||||
.await?;
|
||||
@ -323,7 +359,9 @@ pub(crate) async fn create_field_handler(
|
||||
) -> DataResult<FieldPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: CreateFieldParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let data = database_editor
|
||||
.create_field_with_type_option(params)
|
||||
.await?;
|
||||
@ -338,7 +376,9 @@ pub(crate) async fn move_field_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: MoveFieldParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor.move_field(params).await?;
|
||||
Ok(())
|
||||
}
|
||||
@ -350,21 +390,42 @@ pub(crate) async fn get_row_handler(
|
||||
) -> DataResult<OptionalRowPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: RowIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let row = database_editor
|
||||
.get_row(¶ms.view_id, ¶ms.row_id)
|
||||
.await
|
||||
.map(RowPB::from);
|
||||
data_result_ok(OptionalRowPB { row })
|
||||
}
|
||||
|
||||
pub(crate) async fn init_row_handler(
|
||||
data: AFPluginData<RowIdPB>,
|
||||
manager: AFPluginState<Weak<DatabaseManager>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: RowIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor.init_database_row(¶ms.row_id).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn get_row_meta_handler(
|
||||
data: AFPluginData<RowIdPB>,
|
||||
manager: AFPluginState<Weak<DatabaseManager>>,
|
||||
) -> DataResult<RowMetaPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: RowIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
match database_editor.get_row_meta(¶ms.view_id, ¶ms.row_id) {
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
match database_editor
|
||||
.get_row_meta(¶ms.view_id, ¶ms.row_id)
|
||||
.await
|
||||
{
|
||||
None => Err(FlowyError::record_not_found()),
|
||||
Some(row) => data_result_ok(row),
|
||||
}
|
||||
@ -376,7 +437,9 @@ pub(crate) async fn update_row_meta_handler(
|
||||
) -> FlowyResult<()> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: UpdateRowMetaParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let row_id = RowId::from(params.id.clone());
|
||||
database_editor
|
||||
.update_row_meta(&row_id.clone(), params)
|
||||
@ -391,7 +454,9 @@ pub(crate) async fn delete_rows_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: RepeatedRowIdPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let row_ids = params
|
||||
.row_ids
|
||||
.into_iter()
|
||||
@ -408,7 +473,9 @@ pub(crate) async fn duplicate_row_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: RowIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.duplicate_row(¶ms.view_id, ¶ms.row_id)
|
||||
.await?;
|
||||
@ -422,7 +489,9 @@ pub(crate) async fn move_row_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: MoveRowParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.move_row(¶ms.view_id, params.from_row_id, params.to_row_id)
|
||||
.await?;
|
||||
@ -436,7 +505,9 @@ pub(crate) async fn create_row_handler(
|
||||
) -> DataResult<RowMetaPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params = data.try_into_inner()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
|
||||
match database_editor.create_row(params).await? {
|
||||
Some(row) => data_result_ok(RowMetaPB::from(row)),
|
||||
@ -451,7 +522,9 @@ pub(crate) async fn get_cell_handler(
|
||||
) -> DataResult<CellPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: CellIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let cell = database_editor
|
||||
.get_cell_pb(¶ms.field_id, ¶ms.row_id)
|
||||
.await
|
||||
@ -466,7 +539,9 @@ pub(crate) async fn update_cell_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: CellChangesetPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.update_cell_with_changeset(
|
||||
¶ms.view_id,
|
||||
@ -485,7 +560,9 @@ pub(crate) async fn new_select_option_handler(
|
||||
) -> DataResult<SelectOptionPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: CreateSelectOptionParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let result = database_editor
|
||||
.create_select_option(¶ms.field_id, params.option_name)
|
||||
.await;
|
||||
@ -505,7 +582,9 @@ pub(crate) async fn insert_or_update_select_option_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.insert_select_options(
|
||||
¶ms.view_id,
|
||||
@ -524,7 +603,9 @@ pub(crate) async fn delete_select_option_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.delete_select_options(
|
||||
¶ms.view_id,
|
||||
@ -544,7 +625,7 @@ pub(crate) async fn update_select_option_cell_handler(
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager
|
||||
.get_database_with_view_id(¶ms.cell_identifier.view_id)
|
||||
.get_database_editor_with_view_id(¶ms.cell_identifier.view_id)
|
||||
.await?;
|
||||
let changeset = SelectOptionCellChangeset {
|
||||
insert_option_ids: params.insert_option_ids,
|
||||
@ -568,7 +649,9 @@ pub(crate) async fn update_checklist_cell_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: ChecklistCellDataChangesetParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let changeset = ChecklistCellChangeset {
|
||||
insert_options: params
|
||||
.insert_options
|
||||
@ -609,7 +692,9 @@ pub(crate) async fn update_date_cell_handler(
|
||||
reminder_id: data.reminder_id,
|
||||
};
|
||||
|
||||
let database_editor = manager.get_database_with_view_id(&cell_id.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(&cell_id.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.update_cell_with_changeset(
|
||||
&cell_id.view_id,
|
||||
@ -628,7 +713,9 @@ pub(crate) async fn get_groups_handler(
|
||||
) -> DataResult<RepeatedGroupPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: DatabaseViewIdPB = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(params.as_ref()).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(params.as_ref())
|
||||
.await?;
|
||||
let groups = database_editor.load_groups(params.as_ref()).await?;
|
||||
data_result_ok(groups)
|
||||
}
|
||||
@ -640,7 +727,9 @@ pub(crate) async fn get_group_handler(
|
||||
) -> DataResult<GroupPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: DatabaseGroupIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let group = database_editor
|
||||
.get_group(¶ms.view_id, ¶ms.group_id)
|
||||
.await?;
|
||||
@ -654,7 +743,9 @@ pub(crate) async fn set_group_by_field_handler(
|
||||
) -> FlowyResult<()> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: GroupByFieldParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.set_group_by_field(¶ms.view_id, ¶ms.field_id, params.setting_content)
|
||||
.await?;
|
||||
@ -669,17 +760,11 @@ pub(crate) async fn update_group_handler(
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: UpdateGroupParams = data.into_inner().try_into()?;
|
||||
let view_id = params.view_id.clone();
|
||||
let database_editor = manager.get_database_with_view_id(&view_id).await?;
|
||||
let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
|
||||
let group_changeset = GroupChangeset::from(params);
|
||||
let (tx, rx) = oneshot::channel();
|
||||
af_spawn(async move {
|
||||
let result = database_editor
|
||||
.update_group(&view_id, vec![group_changeset])
|
||||
.await;
|
||||
let _ = tx.send(result);
|
||||
});
|
||||
|
||||
let _ = rx.await?;
|
||||
database_editor
|
||||
.update_group(&view_id, vec![group_changeset])
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -690,7 +775,9 @@ pub(crate) async fn move_group_handler(
|
||||
) -> FlowyResult<()> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: MoveGroupParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.move_group(¶ms.view_id, ¶ms.from_group_id, ¶ms.to_group_id)
|
||||
.await?;
|
||||
@ -704,7 +791,9 @@ pub(crate) async fn move_group_row_handler(
|
||||
) -> FlowyResult<()> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: MoveGroupRowParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.move_group_row(
|
||||
¶ms.view_id,
|
||||
@ -724,7 +813,9 @@ pub(crate) async fn create_group_handler(
|
||||
) -> FlowyResult<()> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: CreateGroupParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.create_group(¶ms.view_id, ¶ms.name)
|
||||
.await?;
|
||||
@ -738,7 +829,9 @@ pub(crate) async fn delete_group_handler(
|
||||
) -> FlowyResult<()> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: DeleteGroupParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor.delete_group(params).await?;
|
||||
Ok(())
|
||||
}
|
||||
@ -792,7 +885,7 @@ pub(crate) async fn set_layout_setting_handler(
|
||||
let changeset = data.into_inner();
|
||||
let view_id = changeset.view_id.clone();
|
||||
let params: LayoutSettingChangeset = changeset.try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(&view_id).await?;
|
||||
let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
|
||||
database_editor.set_layout_setting(&view_id, params).await?;
|
||||
Ok(())
|
||||
}
|
||||
@ -803,7 +896,9 @@ pub(crate) async fn get_layout_setting_handler(
|
||||
) -> DataResult<DatabaseLayoutSettingPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: DatabaseLayoutMeta = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let layout_setting_pb = database_editor
|
||||
.get_layout_setting(¶ms.view_id, params.layout)
|
||||
.await
|
||||
@ -819,7 +914,9 @@ pub(crate) async fn get_calendar_events_handler(
|
||||
) -> DataResult<RepeatedCalendarEventPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: CalendarEventRequestParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let events = database_editor
|
||||
.get_all_calendar_events(¶ms.view_id)
|
||||
.await;
|
||||
@ -833,7 +930,9 @@ pub(crate) async fn get_no_date_calendar_events_handler(
|
||||
) -> DataResult<RepeatedNoDateCalendarEventPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: CalendarEventRequestParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let _events = database_editor
|
||||
.get_all_no_date_calendar_events(¶ms.view_id)
|
||||
.await;
|
||||
@ -847,7 +946,9 @@ pub(crate) async fn get_calendar_event_handler(
|
||||
) -> DataResult<CalendarEventPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: RowIdParams = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
let event = database_editor
|
||||
.get_calendar_event(¶ms.view_id, params.row_id)
|
||||
.await;
|
||||
@ -869,7 +970,9 @@ pub(crate) async fn move_calendar_event_handler(
|
||||
date: Some(data.timestamp),
|
||||
..Default::default()
|
||||
};
|
||||
let database_editor = manager.get_database_with_view_id(&cell_id.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(&cell_id.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.update_cell_with_changeset(
|
||||
&cell_id.view_id,
|
||||
@ -897,7 +1000,7 @@ pub(crate) async fn export_csv_handler(
|
||||
) -> DataResult<DatabaseExportDataPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id = data.into_inner().value;
|
||||
let database = manager.get_database_with_view_id(&view_id).await?;
|
||||
let database = manager.get_database_editor_with_view_id(&view_id).await?;
|
||||
let data = database.export_csv(CSVFormat::Original).await?;
|
||||
data_result_ok(DatabaseExportDataPB {
|
||||
export_type: DatabaseExportDataType::CSV,
|
||||
@ -923,7 +1026,7 @@ pub(crate) async fn get_field_settings_handler(
|
||||
) -> DataResult<RepeatedFieldSettingsPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let (view_id, field_ids) = data.into_inner().try_into()?;
|
||||
let database_editor = manager.get_database_with_view_id(&view_id).await?;
|
||||
let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
|
||||
|
||||
let field_settings = database_editor
|
||||
.get_field_settings(&view_id, field_ids.clone())
|
||||
@ -944,7 +1047,9 @@ pub(crate) async fn get_all_field_settings_handler(
|
||||
) -> DataResult<RepeatedFieldSettingsPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
|
||||
let field_settings = database_editor
|
||||
.get_all_field_settings(view_id.as_ref())
|
||||
@ -965,7 +1070,9 @@ pub(crate) async fn update_field_settings_handler(
|
||||
) -> FlowyResult<()> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params = data.try_into_inner()?;
|
||||
let database_editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
database_editor
|
||||
.update_field_settings_with_changeset(params)
|
||||
.await?;
|
||||
@ -979,7 +1086,9 @@ pub(crate) async fn get_all_calculations_handler(
|
||||
) -> DataResult<RepeatedCalculationsPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let view_id = data.into_inner();
|
||||
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
|
||||
let database_editor = manager
|
||||
.get_database_editor_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
|
||||
let calculations = database_editor.get_all_calculations(view_id.as_ref()).await;
|
||||
|
||||
@ -993,7 +1102,9 @@ pub(crate) async fn update_calculation_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: UpdateCalculationChangesetPB = data.into_inner();
|
||||
let editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
|
||||
editor.update_calculation(params).await?;
|
||||
|
||||
@ -1007,7 +1118,9 @@ pub(crate) async fn remove_calculation_handler(
|
||||
) -> Result<(), FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: RemoveCalculationChangesetPB = data.into_inner();
|
||||
let editor = manager.get_database_with_view_id(¶ms.view_id).await?;
|
||||
let editor = manager
|
||||
.get_database_editor_with_view_id(¶ms.view_id)
|
||||
.await?;
|
||||
|
||||
editor.remove_calculation(params).await?;
|
||||
|
||||
@ -1041,7 +1154,7 @@ pub(crate) async fn update_relation_cell_handler(
|
||||
removed_row_ids: params.removed_row_ids.into_iter().map(Into::into).collect(),
|
||||
};
|
||||
|
||||
let database_editor = manager.get_database_with_view_id(&view_id).await?;
|
||||
let database_editor = manager.get_database_editor_with_view_id(&view_id).await?;
|
||||
|
||||
// // get the related database
|
||||
// let related_database_id = database_editor
|
||||
@ -1072,7 +1185,7 @@ pub(crate) async fn get_related_row_datas_handler(
|
||||
) -> DataResult<RepeatedRelatedRowDataPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let params: GetRelatedRowDataPB = data.into_inner();
|
||||
let database_editor = manager.get_database(¶ms.database_id).await?;
|
||||
let database_editor = manager.get_database_editor(¶ms.database_id).await?;
|
||||
let row_datas = database_editor
|
||||
.get_related_rows(Some(¶ms.row_ids))
|
||||
.await?;
|
||||
@ -1086,7 +1199,7 @@ pub(crate) async fn get_related_database_rows_handler(
|
||||
) -> DataResult<RepeatedRelatedRowDataPB, FlowyError> {
|
||||
let manager = upgrade_manager(manager)?;
|
||||
let database_id = data.into_inner().value;
|
||||
let database_editor = manager.get_database(&database_id).await?;
|
||||
let database_editor = manager.get_database_editor(&database_id).await?;
|
||||
let row_datas = database_editor.get_related_rows(None).await?;
|
||||
|
||||
data_result_ok(RepeatedRelatedRowDataPB { rows: row_datas })
|
||||
|
@ -13,85 +13,86 @@ pub fn init(database_manager: Weak<DatabaseManager>) -> AFPlugin {
|
||||
.name(env!("CARGO_PKG_NAME"))
|
||||
.state(database_manager);
|
||||
plugin
|
||||
.event(DatabaseEvent::GetDatabase, get_database_data_handler)
|
||||
.event(DatabaseEvent::GetDatabaseData, get_database_data_handler)
|
||||
.event(DatabaseEvent::GetDatabaseId, get_database_id_handler)
|
||||
.event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler)
|
||||
.event(DatabaseEvent::UpdateDatabaseSetting, update_database_setting_handler)
|
||||
.event(DatabaseEvent::GetAllFilters, get_all_filters_handler)
|
||||
.event(DatabaseEvent::GetAllSorts, get_all_sorts_handler)
|
||||
.event(DatabaseEvent::DeleteAllSorts, delete_all_sorts_handler)
|
||||
// Field
|
||||
.event(DatabaseEvent::GetFields, get_fields_handler)
|
||||
.event(DatabaseEvent::GetPrimaryField, get_primary_field_handler)
|
||||
.event(DatabaseEvent::UpdateField, update_field_handler)
|
||||
.event(DatabaseEvent::UpdateFieldTypeOption, update_field_type_option_handler)
|
||||
.event(DatabaseEvent::DeleteField, delete_field_handler)
|
||||
.event(DatabaseEvent::ClearField, clear_field_handler)
|
||||
.event(DatabaseEvent::UpdateFieldType, switch_to_field_handler)
|
||||
.event(DatabaseEvent::DuplicateField, duplicate_field_handler)
|
||||
.event(DatabaseEvent::MoveField, move_field_handler)
|
||||
.event(DatabaseEvent::CreateField, create_field_handler)
|
||||
// Row
|
||||
.event(DatabaseEvent::CreateRow, create_row_handler)
|
||||
.event(DatabaseEvent::GetRow, get_row_handler)
|
||||
.event(DatabaseEvent::GetRowMeta, get_row_meta_handler)
|
||||
.event(DatabaseEvent::UpdateRowMeta, update_row_meta_handler)
|
||||
.event(DatabaseEvent::DeleteRows, delete_rows_handler)
|
||||
.event(DatabaseEvent::DuplicateRow, duplicate_row_handler)
|
||||
.event(DatabaseEvent::MoveRow, move_row_handler)
|
||||
// Cell
|
||||
.event(DatabaseEvent::GetCell, get_cell_handler)
|
||||
.event(DatabaseEvent::UpdateCell, update_cell_handler)
|
||||
// SelectOption
|
||||
.event(DatabaseEvent::CreateSelectOption, new_select_option_handler)
|
||||
.event(DatabaseEvent::InsertOrUpdateSelectOption, insert_or_update_select_option_handler)
|
||||
.event(DatabaseEvent::DeleteSelectOption, delete_select_option_handler)
|
||||
.event(DatabaseEvent::UpdateSelectOptionCell, update_select_option_cell_handler)
|
||||
// Checklist
|
||||
.event(DatabaseEvent::UpdateChecklistCell, update_checklist_cell_handler)
|
||||
// Date
|
||||
.event(DatabaseEvent::UpdateDateCell, update_date_cell_handler)
|
||||
// Group
|
||||
.event(DatabaseEvent::SetGroupByField, set_group_by_field_handler)
|
||||
.event(DatabaseEvent::MoveGroup, move_group_handler)
|
||||
.event(DatabaseEvent::MoveGroupRow, move_group_row_handler)
|
||||
.event(DatabaseEvent::GetGroups, get_groups_handler)
|
||||
.event(DatabaseEvent::GetGroup, get_group_handler)
|
||||
.event(DatabaseEvent::UpdateGroup, update_group_handler)
|
||||
.event(DatabaseEvent::CreateGroup, create_group_handler)
|
||||
.event(DatabaseEvent::DeleteGroup, delete_group_handler)
|
||||
// Database
|
||||
.event(DatabaseEvent::GetDatabaseMeta, get_database_meta_handler)
|
||||
.event(DatabaseEvent::GetDatabases, get_databases_handler)
|
||||
// Calendar
|
||||
.event(DatabaseEvent::GetAllCalendarEvents, get_calendar_events_handler)
|
||||
.event(DatabaseEvent::GetNoDateCalendarEvents, get_no_date_calendar_events_handler)
|
||||
.event(DatabaseEvent::GetCalendarEvent, get_calendar_event_handler)
|
||||
.event(DatabaseEvent::MoveCalendarEvent, move_calendar_event_handler)
|
||||
// Layout setting
|
||||
.event(DatabaseEvent::SetLayoutSetting, set_layout_setting_handler)
|
||||
.event(DatabaseEvent::GetLayoutSetting, get_layout_setting_handler)
|
||||
.event(DatabaseEvent::CreateDatabaseView, create_database_view)
|
||||
// Export
|
||||
.event(DatabaseEvent::ExportCSV, export_csv_handler)
|
||||
.event(DatabaseEvent::GetDatabaseSnapshots, get_snapshots_handler)
|
||||
// Field settings
|
||||
.event(DatabaseEvent::GetFieldSettings, get_field_settings_handler)
|
||||
.event(DatabaseEvent::GetAllFieldSettings, get_all_field_settings_handler)
|
||||
.event(DatabaseEvent::UpdateFieldSettings, update_field_settings_handler)
|
||||
// Calculations
|
||||
.event(DatabaseEvent::GetAllCalculations, get_all_calculations_handler)
|
||||
.event(DatabaseEvent::UpdateCalculation, update_calculation_handler)
|
||||
.event(DatabaseEvent::RemoveCalculation, remove_calculation_handler)
|
||||
// Relation
|
||||
.event(DatabaseEvent::GetRelatedDatabaseIds, get_related_database_ids_handler)
|
||||
.event(DatabaseEvent::UpdateRelationCell, update_relation_cell_handler)
|
||||
.event(DatabaseEvent::GetRelatedRowDatas, get_related_row_datas_handler)
|
||||
.event(DatabaseEvent::GetRelatedDatabaseRows, get_related_database_rows_handler)
|
||||
// AI
|
||||
.event(DatabaseEvent::SummarizeRow, summarize_row_handler)
|
||||
.event(DatabaseEvent::TranslateRow, translate_row_handler)
|
||||
.event(DatabaseEvent::GetDatabase, get_database_data_handler)
|
||||
.event(DatabaseEvent::GetDatabaseData, get_database_data_handler)
|
||||
.event(DatabaseEvent::GetDatabaseId, get_database_id_handler)
|
||||
.event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler)
|
||||
.event(DatabaseEvent::UpdateDatabaseSetting, update_database_setting_handler)
|
||||
.event(DatabaseEvent::GetAllFilters, get_all_filters_handler)
|
||||
.event(DatabaseEvent::GetAllSorts, get_all_sorts_handler)
|
||||
.event(DatabaseEvent::DeleteAllSorts, delete_all_sorts_handler)
|
||||
// Field
|
||||
.event(DatabaseEvent::GetFields, get_fields_handler)
|
||||
.event(DatabaseEvent::GetPrimaryField, get_primary_field_handler)
|
||||
.event(DatabaseEvent::UpdateField, update_field_handler)
|
||||
.event(DatabaseEvent::UpdateFieldTypeOption, update_field_type_option_handler)
|
||||
.event(DatabaseEvent::DeleteField, delete_field_handler)
|
||||
.event(DatabaseEvent::ClearField, clear_field_handler)
|
||||
.event(DatabaseEvent::UpdateFieldType, switch_to_field_handler)
|
||||
.event(DatabaseEvent::DuplicateField, duplicate_field_handler)
|
||||
.event(DatabaseEvent::MoveField, move_field_handler)
|
||||
.event(DatabaseEvent::CreateField, create_field_handler)
|
||||
// Row
|
||||
.event(DatabaseEvent::CreateRow, create_row_handler)
|
||||
.event(DatabaseEvent::GetRow, get_row_handler)
|
||||
.event(DatabaseEvent::InitRow, init_row_handler)
|
||||
.event(DatabaseEvent::GetRowMeta, get_row_meta_handler)
|
||||
.event(DatabaseEvent::UpdateRowMeta, update_row_meta_handler)
|
||||
.event(DatabaseEvent::DeleteRows, delete_rows_handler)
|
||||
.event(DatabaseEvent::DuplicateRow, duplicate_row_handler)
|
||||
.event(DatabaseEvent::MoveRow, move_row_handler)
|
||||
// Cell
|
||||
.event(DatabaseEvent::GetCell, get_cell_handler)
|
||||
.event(DatabaseEvent::UpdateCell, update_cell_handler)
|
||||
// SelectOption
|
||||
.event(DatabaseEvent::CreateSelectOption, new_select_option_handler)
|
||||
.event(DatabaseEvent::InsertOrUpdateSelectOption, insert_or_update_select_option_handler)
|
||||
.event(DatabaseEvent::DeleteSelectOption, delete_select_option_handler)
|
||||
.event(DatabaseEvent::UpdateSelectOptionCell, update_select_option_cell_handler)
|
||||
// Checklist
|
||||
.event(DatabaseEvent::UpdateChecklistCell, update_checklist_cell_handler)
|
||||
// Date
|
||||
.event(DatabaseEvent::UpdateDateCell, update_date_cell_handler)
|
||||
// Group
|
||||
.event(DatabaseEvent::SetGroupByField, set_group_by_field_handler)
|
||||
.event(DatabaseEvent::MoveGroup, move_group_handler)
|
||||
.event(DatabaseEvent::MoveGroupRow, move_group_row_handler)
|
||||
.event(DatabaseEvent::GetGroups, get_groups_handler)
|
||||
.event(DatabaseEvent::GetGroup, get_group_handler)
|
||||
.event(DatabaseEvent::UpdateGroup, update_group_handler)
|
||||
.event(DatabaseEvent::CreateGroup, create_group_handler)
|
||||
.event(DatabaseEvent::DeleteGroup, delete_group_handler)
|
||||
// Database
|
||||
.event(DatabaseEvent::GetDatabaseMeta, get_database_meta_handler)
|
||||
.event(DatabaseEvent::GetDatabases, get_databases_handler)
|
||||
// Calendar
|
||||
.event(DatabaseEvent::GetAllCalendarEvents, get_calendar_events_handler)
|
||||
.event(DatabaseEvent::GetNoDateCalendarEvents, get_no_date_calendar_events_handler)
|
||||
.event(DatabaseEvent::GetCalendarEvent, get_calendar_event_handler)
|
||||
.event(DatabaseEvent::MoveCalendarEvent, move_calendar_event_handler)
|
||||
// Layout setting
|
||||
.event(DatabaseEvent::SetLayoutSetting, set_layout_setting_handler)
|
||||
.event(DatabaseEvent::GetLayoutSetting, get_layout_setting_handler)
|
||||
.event(DatabaseEvent::CreateDatabaseView, create_database_view)
|
||||
// Export
|
||||
.event(DatabaseEvent::ExportCSV, export_csv_handler)
|
||||
.event(DatabaseEvent::GetDatabaseSnapshots, get_snapshots_handler)
|
||||
// Field settings
|
||||
.event(DatabaseEvent::GetFieldSettings, get_field_settings_handler)
|
||||
.event(DatabaseEvent::GetAllFieldSettings, get_all_field_settings_handler)
|
||||
.event(DatabaseEvent::UpdateFieldSettings, update_field_settings_handler)
|
||||
// Calculations
|
||||
.event(DatabaseEvent::GetAllCalculations, get_all_calculations_handler)
|
||||
.event(DatabaseEvent::UpdateCalculation, update_calculation_handler)
|
||||
.event(DatabaseEvent::RemoveCalculation, remove_calculation_handler)
|
||||
// Relation
|
||||
.event(DatabaseEvent::GetRelatedDatabaseIds, get_related_database_ids_handler)
|
||||
.event(DatabaseEvent::UpdateRelationCell, update_relation_cell_handler)
|
||||
.event(DatabaseEvent::GetRelatedRowDatas, get_related_row_datas_handler)
|
||||
.event(DatabaseEvent::GetRelatedDatabaseRows, get_related_database_rows_handler)
|
||||
// AI
|
||||
.event(DatabaseEvent::SummarizeRow, summarize_row_handler)
|
||||
.event(DatabaseEvent::TranslateRow, translate_row_handler)
|
||||
}
|
||||
|
||||
/// [DatabaseEvent] defines events that are used to interact with the Grid. You could check [this](https://appflowy.gitbook.io/docs/essential-documentation/contribute-to-appflowy/architecture/backend/protobuf)
|
||||
@ -377,4 +378,7 @@ pub enum DatabaseEvent {
|
||||
|
||||
#[event(input = "TranslateRowPB")]
|
||||
TranslateRow = 175,
|
||||
|
||||
#[event(input = "RowIdPB")]
|
||||
InitRow = 176,
|
||||
}
|
||||
|
@ -1,22 +1,28 @@
|
||||
use anyhow::anyhow;
|
||||
use arc_swap::ArcSwapOption;
|
||||
use async_trait::async_trait;
|
||||
use std::borrow::BorrowMut;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use collab::core::collab::{DataSource, MutexCollab};
|
||||
use collab_database::database::{DatabaseData, MutexDatabase};
|
||||
use collab::core::collab::DataSource;
|
||||
use collab::preclude::Collab;
|
||||
use collab_database::database::{Database, DatabaseData};
|
||||
use collab_database::error::DatabaseError;
|
||||
use collab_database::rows::RowId;
|
||||
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
|
||||
use collab_database::workspace_database::{
|
||||
CollabDocStateByOid, CollabFuture, DatabaseCollabService, DatabaseMeta, WorkspaceDatabase,
|
||||
DatabaseCollabService, DatabaseMeta, EncodeCollabByOid, WorkspaceDatabase,
|
||||
};
|
||||
use collab_entity::{CollabType, EncodedCollab};
|
||||
use collab_plugins::local_storage::kv::KVTransactionDB;
|
||||
use tokio::sync::{Mutex, RwLock};
|
||||
use tracing::{event, instrument, trace};
|
||||
|
||||
use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig};
|
||||
use collab_integrate::{CollabKVAction, CollabKVDB, CollabPersistenceConfig};
|
||||
use collab_integrate::collab_builder::{
|
||||
AppFlowyCollabBuilder, CollabBuilderConfig, KVDBCollabPersistenceImpl,
|
||||
};
|
||||
use collab_integrate::{CollabKVAction, CollabKVDB};
|
||||
use flowy_database_pub::cloud::{
|
||||
DatabaseAIService, DatabaseCloudService, SummaryRowContent, TranslateItem, TranslateRowContent,
|
||||
};
|
||||
@ -42,7 +48,7 @@ pub trait DatabaseUser: Send + Sync {
|
||||
|
||||
pub struct DatabaseManager {
|
||||
user: Arc<dyn DatabaseUser>,
|
||||
workspace_database: Arc<RwLock<Option<Arc<WorkspaceDatabase>>>>,
|
||||
workspace_database: ArcSwapOption<RwLock<WorkspaceDatabase>>,
|
||||
task_scheduler: Arc<RwLock<TaskDispatcher>>,
|
||||
editors: Mutex<HashMap<String, Arc<DatabaseEditor>>>,
|
||||
collab_builder: Arc<AppFlowyCollabBuilder>,
|
||||
@ -89,10 +95,10 @@ impl DatabaseManager {
|
||||
}
|
||||
self.editors.lock().await.clear();
|
||||
// 3. Clear the workspace database
|
||||
if let Some(old_workspace_database) = self.workspace_database.write().await.take() {
|
||||
old_workspace_database.close();
|
||||
if let Some(old_workspace_database) = self.workspace_database.swap(None) {
|
||||
let wdb = old_workspace_database.read().await;
|
||||
wdb.close();
|
||||
}
|
||||
*self.workspace_database.write().await = None;
|
||||
|
||||
let collab_db = self.user.collab_db(uid)?;
|
||||
let collab_builder = UserDatabaseCollabServiceImpl {
|
||||
@ -100,30 +106,27 @@ impl DatabaseManager {
|
||||
collab_builder: self.collab_builder.clone(),
|
||||
cloud_service: self.cloud_service.clone(),
|
||||
};
|
||||
let config = CollabPersistenceConfig::new().snapshot_per_update(100);
|
||||
|
||||
let workspace_id = self.user.workspace_id()?;
|
||||
let workspace_database_object_id = self.user.workspace_database_object_id()?;
|
||||
let mut workspace_database_doc_state = DataSource::Disk;
|
||||
let mut workspace_database_doc_state =
|
||||
KVDBCollabPersistenceImpl::new(collab_db.clone(), uid).into_data_source();
|
||||
// If the workspace database not exist in disk, try to fetch from remote.
|
||||
if !self.is_collab_exist(uid, &collab_db, &workspace_database_object_id) {
|
||||
trace!("workspace database not exist, try to fetch from remote");
|
||||
match self
|
||||
.cloud_service
|
||||
.get_database_object_doc_state(
|
||||
.get_database_encode_collab(
|
||||
&workspace_database_object_id,
|
||||
CollabType::WorkspaceDatabase,
|
||||
&workspace_id,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(doc_state) => match doc_state {
|
||||
Some(doc_state) => {
|
||||
workspace_database_doc_state = DataSource::DocStateV1(doc_state);
|
||||
},
|
||||
None => {
|
||||
workspace_database_doc_state = DataSource::Disk;
|
||||
},
|
||||
Ok(value) => {
|
||||
if let Some(encode_collab) = value {
|
||||
workspace_database_doc_state = DataSource::from(encode_collab);
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return Err(FlowyError::record_not_found().with_context(format!(
|
||||
@ -140,20 +143,64 @@ impl DatabaseManager {
|
||||
"open aggregate database views object: {}",
|
||||
&workspace_database_object_id
|
||||
);
|
||||
let collab = collab_builder.build_collab_with_config(
|
||||
|
||||
let workspace_id = self
|
||||
.user
|
||||
.workspace_id()
|
||||
.map_err(|err| DatabaseError::Internal(err.into()))?;
|
||||
let collab_object = self.collab_builder.collab_object(
|
||||
&workspace_id,
|
||||
uid,
|
||||
&workspace_database_object_id,
|
||||
CollabType::WorkspaceDatabase,
|
||||
collab_db.clone(),
|
||||
workspace_database_doc_state,
|
||||
config.clone(),
|
||||
)?;
|
||||
let workspace_database =
|
||||
WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder);
|
||||
*self.workspace_database.write().await = Some(Arc::new(workspace_database));
|
||||
let workspace_database = self.collab_builder.create_workspace_database(
|
||||
collab_object,
|
||||
workspace_database_doc_state,
|
||||
collab_db,
|
||||
CollabBuilderConfig::default().sync_enable(true),
|
||||
collab_builder,
|
||||
)?;
|
||||
self.workspace_database.store(Some(workspace_database));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
//FIXME: we need to initialize sync plugin for newly created collabs
|
||||
#[allow(dead_code)]
|
||||
fn initialize_plugins<T>(
|
||||
&self,
|
||||
uid: i64,
|
||||
object_id: &str,
|
||||
collab_type: CollabType,
|
||||
collab: Arc<RwLock<T>>,
|
||||
) -> FlowyResult<Arc<RwLock<T>>>
|
||||
where
|
||||
T: BorrowMut<Collab> + Send + Sync + 'static,
|
||||
{
|
||||
//FIXME: unfortunately UserDatabaseCollabService::build_collab_with_config is broken by
|
||||
// design as it assumes that we can split collab building process, which we cannot because:
|
||||
// 1. We should not be able to run plugins ie. SyncPlugin over not-fully initialized collab,
|
||||
// and that's what originally build_collab_with_config did.
|
||||
// 2. We cannot fully initialize collab from UserDatabaseCollabService, because
|
||||
// WorkspaceDatabase itself requires UserDatabaseCollabService as constructor parameter.
|
||||
// Ideally we should never need to initialize plugins that require collab instance as part of
|
||||
// that collab construction process itself - it means that we should redesign SyncPlugin to only
|
||||
// be fired once a collab is fully initialized.
|
||||
let workspace_id = self
|
||||
.user
|
||||
.workspace_id()
|
||||
.map_err(|err| DatabaseError::Internal(err.into()))?;
|
||||
let object = self
|
||||
.collab_builder
|
||||
.collab_object(&workspace_id, uid, object_id, collab_type)?;
|
||||
let collab = self.collab_builder.finalize(
|
||||
object,
|
||||
CollabBuilderConfig::default().sync_enable(true),
|
||||
collab,
|
||||
)?;
|
||||
Ok(collab)
|
||||
}
|
||||
|
||||
#[instrument(
|
||||
name = "database_initialize_with_new_user",
|
||||
level = "debug",
|
||||
@ -166,19 +213,24 @@ impl DatabaseManager {
|
||||
}
|
||||
|
||||
pub async fn get_database_inline_view_id(&self, database_id: &str) -> FlowyResult<String> {
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
let database_collab = wdb.get_database(database_id).await.ok_or_else(|| {
|
||||
FlowyError::record_not_found().with_context(format!("The database:{} not found", database_id))
|
||||
})?;
|
||||
|
||||
let lock_guard = database_collab.lock();
|
||||
let lock = self.workspace_database()?;
|
||||
let wdb = lock.read().await;
|
||||
let database_collab = wdb
|
||||
.get_or_create_database(database_id)
|
||||
.await
|
||||
.ok_or_else(|| {
|
||||
FlowyError::record_not_found()
|
||||
.with_context(format!("The database:{} not found", database_id))
|
||||
})?;
|
||||
|
||||
let lock_guard = database_collab.read().await;
|
||||
Ok(lock_guard.get_inline_view_id())
|
||||
}
|
||||
|
||||
pub async fn get_all_databases_meta(&self) -> Vec<DatabaseMeta> {
|
||||
let mut items = vec![];
|
||||
if let Ok(wdb) = self.get_database_indexer().await {
|
||||
if let Some(lock) = self.workspace_database.load_full() {
|
||||
let wdb = lock.read().await;
|
||||
items = wdb.get_all_database_meta()
|
||||
}
|
||||
items
|
||||
@ -188,7 +240,8 @@ impl DatabaseManager {
|
||||
&self,
|
||||
view_ids_by_database_id: HashMap<String, Vec<String>>,
|
||||
) -> FlowyResult<()> {
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
let lock = self.workspace_database()?;
|
||||
let mut wdb = lock.write().await;
|
||||
view_ids_by_database_id
|
||||
.into_iter()
|
||||
.for_each(|(database_id, view_ids)| {
|
||||
@ -197,13 +250,9 @@ impl DatabaseManager {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_database_with_view_id(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
||||
let database_id = self.get_database_id_with_view_id(view_id).await?;
|
||||
self.get_database(&database_id).await
|
||||
}
|
||||
|
||||
pub async fn get_database_id_with_view_id(&self, view_id: &str) -> FlowyResult<String> {
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
let lock = self.workspace_database()?;
|
||||
let wdb = lock.read().await;
|
||||
wdb.get_database_id_with_view_id(view_id).ok_or_else(|| {
|
||||
FlowyError::record_not_found()
|
||||
.with_context(format!("The database for view id: {} not found", view_id))
|
||||
@ -211,28 +260,44 @@ impl DatabaseManager {
|
||||
}
|
||||
|
||||
pub async fn get_database_row_ids_with_view_id(&self, view_id: &str) -> FlowyResult<Vec<RowId>> {
|
||||
let database = self.get_database_with_view_id(view_id).await?;
|
||||
Ok(database.get_row_ids())
|
||||
let database = self.get_database_editor_with_view_id(view_id).await?;
|
||||
Ok(database.get_row_ids().await)
|
||||
}
|
||||
|
||||
pub async fn get_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
||||
pub async fn get_database_editor_with_view_id(
|
||||
&self,
|
||||
view_id: &str,
|
||||
) -> FlowyResult<Arc<DatabaseEditor>> {
|
||||
let database_id = self.get_database_id_with_view_id(view_id).await?;
|
||||
self.get_database_editor(&database_id).await
|
||||
}
|
||||
|
||||
pub async fn get_database_editor(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
||||
if let Some(editor) = self.editors.lock().await.get(database_id).cloned() {
|
||||
return Ok(editor);
|
||||
}
|
||||
// TODO(nathan): refactor the get_database that split the database creation and database opening.
|
||||
self.open_database(database_id).await
|
||||
}
|
||||
|
||||
pub async fn open_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
||||
trace!("open database editor:{}", database_id);
|
||||
let database = self
|
||||
.get_database_indexer()
|
||||
.await?
|
||||
.get_database(database_id)
|
||||
let lock = self.workspace_database()?;
|
||||
let database = lock
|
||||
.read()
|
||||
.await
|
||||
.get_or_create_database(database_id)
|
||||
.await
|
||||
.ok_or_else(|| FlowyError::collab_not_sync().with_context("open database error"))?;
|
||||
|
||||
let editor = Arc::new(DatabaseEditor::new(database, self.task_scheduler.clone()).await?);
|
||||
let editor = Arc::new(
|
||||
DatabaseEditor::new(
|
||||
self.user.clone(),
|
||||
database,
|
||||
self.task_scheduler.clone(),
|
||||
self.collab_builder.clone(),
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
self
|
||||
.editors
|
||||
.lock()
|
||||
@ -241,17 +306,14 @@ impl DatabaseManager {
|
||||
Ok(editor)
|
||||
}
|
||||
|
||||
/// Open the database view
|
||||
pub async fn open_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
|
||||
let view_id = view_id.as_ref();
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
if let Some(database_id) = wdb.get_database_id_with_view_id(view_id) {
|
||||
if let Some(database) = wdb.open_database(&database_id) {
|
||||
if let Some(lock_database) = database.try_lock() {
|
||||
if let Some(lock_collab) = lock_database.get_collab().try_lock() {
|
||||
trace!("{} database start init sync", view_id);
|
||||
lock_collab.start_init_sync();
|
||||
}
|
||||
}
|
||||
let lock = self.workspace_database()?;
|
||||
let workspace_database = lock.read().await;
|
||||
if let Some(database_id) = workspace_database.get_database_id_with_view_id(view_id) {
|
||||
if self.editors.lock().await.get(&database_id).is_none() {
|
||||
self.open_database(&database_id).await?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@ -259,20 +321,23 @@ impl DatabaseManager {
|
||||
|
||||
pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
|
||||
let view_id = view_id.as_ref();
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
let database_id = wdb.get_database_id_with_view_id(view_id);
|
||||
let lock = self.workspace_database()?;
|
||||
let workspace_database = lock.read().await;
|
||||
let database_id = workspace_database.get_database_id_with_view_id(view_id);
|
||||
if let Some(database_id) = database_id {
|
||||
let mut editors = self.editors.lock().await;
|
||||
let mut should_remove = false;
|
||||
|
||||
if let Some(editor) = editors.get(&database_id) {
|
||||
editor.close_view(view_id).await;
|
||||
should_remove = editor.num_views().await == 0;
|
||||
// when there is no opening views, mark the database to be removed.
|
||||
should_remove = editor.num_of_opening_views().await == 0;
|
||||
}
|
||||
|
||||
if should_remove {
|
||||
trace!("remove database editor:{}", database_id);
|
||||
editors.remove(&database_id);
|
||||
wdb.close_database(&database_id);
|
||||
workspace_database.close_database(&database_id);
|
||||
}
|
||||
}
|
||||
|
||||
@ -280,13 +345,14 @@ impl DatabaseManager {
|
||||
}
|
||||
|
||||
pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult<()> {
|
||||
let database = self.get_database_with_view_id(view_id).await?;
|
||||
let database = self.get_database_editor_with_view_id(view_id).await?;
|
||||
let _ = database.delete_database_view(view_id).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn duplicate_database(&self, view_id: &str) -> FlowyResult<Vec<u8>> {
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
let lock = self.workspace_database()?;
|
||||
let wdb = lock.read().await;
|
||||
let data = wdb.get_database_data(view_id).await?;
|
||||
let json_bytes = data.to_json_bytes()?;
|
||||
Ok(json_bytes)
|
||||
@ -313,12 +379,12 @@ impl DatabaseManager {
|
||||
create_view_params.view_id = view_id.to_string();
|
||||
}
|
||||
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
let lock = self.workspace_database()?;
|
||||
let mut wdb = lock.write().await;
|
||||
let database = wdb.create_database(create_database_params)?;
|
||||
let encoded_collab = database
|
||||
.lock()
|
||||
.get_collab()
|
||||
.lock()
|
||||
.read()
|
||||
.await
|
||||
.encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?;
|
||||
Ok(encoded_collab)
|
||||
}
|
||||
@ -326,9 +392,11 @@ impl DatabaseManager {
|
||||
pub async fn create_database_with_params(
|
||||
&self,
|
||||
params: CreateDatabaseParams,
|
||||
) -> FlowyResult<Arc<MutexDatabase>> {
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
) -> FlowyResult<Arc<RwLock<Database>>> {
|
||||
let lock = self.workspace_database()?;
|
||||
let mut wdb = lock.write().await;
|
||||
let database = wdb.create_database(params)?;
|
||||
|
||||
Ok(database)
|
||||
}
|
||||
|
||||
@ -342,12 +410,14 @@ impl DatabaseManager {
|
||||
database_view_id: String,
|
||||
database_parent_view_id: String,
|
||||
) -> FlowyResult<()> {
|
||||
let wdb = self.get_database_indexer().await?;
|
||||
let lock = self.workspace_database()?;
|
||||
let mut wdb = lock.write().await;
|
||||
let mut params = CreateViewParams::new(database_id.clone(), database_view_id, name, layout);
|
||||
if let Some(database) = wdb.get_database(&database_id).await {
|
||||
if let Some(database) = wdb.get_or_create_database(&database_id).await {
|
||||
let (field, layout_setting, field_settings_map) =
|
||||
DatabaseLayoutDepsResolver::new(database, layout)
|
||||
.resolve_deps_when_create_database_linked_view(&database_parent_view_id);
|
||||
.resolve_deps_when_create_database_linked_view(&database_parent_view_id)
|
||||
.await;
|
||||
if let Some(field) = field {
|
||||
params = params.with_deps_fields(vec![field], vec![default_field_settings_by_layout_map()]);
|
||||
}
|
||||
@ -374,18 +444,12 @@ impl DatabaseManager {
|
||||
.await
|
||||
.map_err(internal_error)??;
|
||||
|
||||
// Currently, we only support importing up to 500 rows. We can support more rows in the future.
|
||||
if !cfg!(debug_assertions) && params.rows.len() > 500 {
|
||||
return Err(FlowyError::internal().with_context("The number of rows exceeds the limit"));
|
||||
}
|
||||
|
||||
let view_id = params.inline_view_id.clone();
|
||||
let database_id = params.database_id.clone();
|
||||
let database = self.create_database_with_params(params).await?;
|
||||
let encoded_collab = database
|
||||
.lock()
|
||||
.get_collab()
|
||||
.lock()
|
||||
.read()
|
||||
.await
|
||||
.encode_collab_v1(|collab| CollabType::Database.validate_require_data(collab))?;
|
||||
let result = ImportResult {
|
||||
database_id,
|
||||
@ -405,7 +469,7 @@ impl DatabaseManager {
|
||||
}
|
||||
|
||||
pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult<String> {
|
||||
let database = self.get_database_with_view_id(view_id).await?;
|
||||
let database = self.get_database_editor_with_view_id(view_id).await?;
|
||||
database.export_csv(style).await
|
||||
}
|
||||
|
||||
@ -414,7 +478,7 @@ impl DatabaseManager {
|
||||
view_id: &str,
|
||||
layout: DatabaseLayoutPB,
|
||||
) -> FlowyResult<()> {
|
||||
let database = self.get_database_with_view_id(view_id).await?;
|
||||
let database = self.get_database_editor_with_view_id(view_id).await?;
|
||||
database.update_view_layout(view_id, layout.into()).await
|
||||
}
|
||||
|
||||
@ -440,14 +504,11 @@ impl DatabaseManager {
|
||||
Ok(snapshots)
|
||||
}
|
||||
|
||||
/// Return the database indexer.
|
||||
/// Each workspace has itw own Database indexer that manages all the databases and database views
|
||||
async fn get_database_indexer(&self) -> FlowyResult<Arc<WorkspaceDatabase>> {
|
||||
let database = self.workspace_database.read().await;
|
||||
match &*database {
|
||||
None => Err(FlowyError::internal().with_context("Workspace database not initialized")),
|
||||
Some(user_database) => Ok(user_database.clone()),
|
||||
}
|
||||
fn workspace_database(&self) -> FlowyResult<Arc<RwLock<WorkspaceDatabase>>> {
|
||||
self
|
||||
.workspace_database
|
||||
.load_full()
|
||||
.ok_or_else(|| FlowyError::internal().with_context("Workspace database not initialized"))
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip_all)]
|
||||
@ -457,10 +518,10 @@ impl DatabaseManager {
|
||||
row_id: RowId,
|
||||
field_id: String,
|
||||
) -> FlowyResult<()> {
|
||||
let database = self.get_database_with_view_id(&view_id).await?;
|
||||
let database = self.get_database_editor_with_view_id(&view_id).await?;
|
||||
let mut summary_row_content = SummaryRowContent::new();
|
||||
if let Some(row) = database.get_row(&view_id, &row_id) {
|
||||
let fields = database.get_fields(&view_id, None);
|
||||
if let Some(row) = database.get_row(&view_id, &row_id).await {
|
||||
let fields = database.get_fields(&view_id, None).await;
|
||||
for field in fields {
|
||||
// When summarizing a row, skip the content in the "AI summary" cell; it does not need to
|
||||
// be summarized.
|
||||
@ -501,12 +562,12 @@ impl DatabaseManager {
|
||||
row_id: RowId,
|
||||
field_id: String,
|
||||
) -> FlowyResult<()> {
|
||||
let database = self.get_database_with_view_id(&view_id).await?;
|
||||
let database = self.get_database_editor_with_view_id(&view_id).await?;
|
||||
let mut translate_row_content = TranslateRowContent::new();
|
||||
let mut language = "english".to_string();
|
||||
|
||||
if let Some(row) = database.get_row(&view_id, &row_id) {
|
||||
let fields = database.get_fields(&view_id, None);
|
||||
if let Some(row) = database.get_row(&view_id, &row_id).await {
|
||||
let fields = database.get_fields(&view_id, None).await;
|
||||
for field in fields {
|
||||
// When translate a row, skip the content in the "AI Translate" cell; it does not need to
|
||||
// be translated.
|
||||
@ -582,79 +643,73 @@ struct UserDatabaseCollabServiceImpl {
|
||||
cloud_service: Arc<dyn DatabaseCloudService>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
|
||||
fn get_collab_doc_state(
|
||||
async fn get_encode_collab(
|
||||
&self,
|
||||
object_id: &str,
|
||||
object_ty: CollabType,
|
||||
) -> CollabFuture<Result<DataSource, DatabaseError>> {
|
||||
) -> Result<Option<EncodedCollab>, DatabaseError> {
|
||||
let workspace_id = self.user.workspace_id().unwrap();
|
||||
let object_id = object_id.to_string();
|
||||
let weak_cloud_service = Arc::downgrade(&self.cloud_service);
|
||||
Box::pin(async move {
|
||||
match weak_cloud_service.upgrade() {
|
||||
None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))),
|
||||
Some(cloud_service) => {
|
||||
let doc_state = cloud_service
|
||||
.get_database_object_doc_state(&object_id, object_ty, &workspace_id)
|
||||
.await?;
|
||||
match doc_state {
|
||||
None => Ok(DataSource::Disk),
|
||||
Some(doc_state) => Ok(DataSource::DocStateV1(doc_state)),
|
||||
}
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
match weak_cloud_service.upgrade() {
|
||||
None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))),
|
||||
Some(cloud_service) => {
|
||||
let encode_collab = cloud_service
|
||||
.get_database_encode_collab(&object_id, object_ty, &workspace_id)
|
||||
.await?;
|
||||
Ok(encode_collab)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn batch_get_collab_update(
|
||||
async fn batch_get_encode_collab(
|
||||
&self,
|
||||
object_ids: Vec<String>,
|
||||
object_ty: CollabType,
|
||||
) -> CollabFuture<Result<CollabDocStateByOid, DatabaseError>> {
|
||||
) -> Result<EncodeCollabByOid, DatabaseError> {
|
||||
let cloned_user = self.user.clone();
|
||||
let weak_cloud_service = Arc::downgrade(&self.cloud_service);
|
||||
Box::pin(async move {
|
||||
let workspace_id = cloned_user
|
||||
.workspace_id()
|
||||
.map_err(|err| DatabaseError::Internal(err.into()))?;
|
||||
match weak_cloud_service.upgrade() {
|
||||
None => {
|
||||
tracing::warn!("Cloud service is dropped");
|
||||
Ok(CollabDocStateByOid::default())
|
||||
},
|
||||
Some(cloud_service) => {
|
||||
let updates = cloud_service
|
||||
.batch_get_database_object_doc_state(object_ids, object_ty, &workspace_id)
|
||||
.await?;
|
||||
Ok(updates)
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
let workspace_id = cloned_user
|
||||
.workspace_id()
|
||||
.map_err(|err| DatabaseError::Internal(err.into()))?;
|
||||
match weak_cloud_service.upgrade() {
|
||||
None => {
|
||||
tracing::warn!("Cloud service is dropped");
|
||||
Ok(EncodeCollabByOid::default())
|
||||
},
|
||||
Some(cloud_service) => {
|
||||
let updates = cloud_service
|
||||
.batch_get_database_encode_collab(object_ids, object_ty, &workspace_id)
|
||||
.await?;
|
||||
Ok(updates)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn build_collab_with_config(
|
||||
///NOTE: this method doesn't initialize plugins, however it is passed into WorkspaceDatabase,
|
||||
/// therefore all Database/DatabaseRow creation methods must initialize plugins thmselves.
|
||||
fn build_collab(
|
||||
&self,
|
||||
uid: i64,
|
||||
object_id: &str,
|
||||
object_type: CollabType,
|
||||
collab_db: Weak<CollabKVDB>,
|
||||
collab_raw_data: DataSource,
|
||||
_persistence_config: CollabPersistenceConfig,
|
||||
) -> Result<Arc<MutexCollab>, DatabaseError> {
|
||||
data_source: DataSource,
|
||||
) -> Result<Collab, DatabaseError> {
|
||||
let workspace_id = self
|
||||
.user
|
||||
.workspace_id()
|
||||
.map_err(|err| DatabaseError::Internal(err.into()))?;
|
||||
let collab = self.collab_builder.build_with_config(
|
||||
&workspace_id,
|
||||
uid,
|
||||
object_id,
|
||||
object_type.clone(),
|
||||
collab_db.clone(),
|
||||
collab_raw_data,
|
||||
CollabBuilderConfig::default().sync_enable(true),
|
||||
)?;
|
||||
let object = self
|
||||
.collab_builder
|
||||
.collab_object(&workspace_id, uid, object_id, object_type)?;
|
||||
let collab = self
|
||||
.collab_builder
|
||||
.build_collab(&object, &collab_db, data_source)?;
|
||||
Ok(collab)
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,5 @@
|
||||
use parking_lot::RwLock;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::utils::cache::AnyTypeCache;
|
||||
|
||||
pub type CalculationsByFieldIdCache = Arc<RwLock<AnyTypeCache<String>>>;
|
||||
pub type CalculationsByFieldIdCache = Arc<AnyTypeCache<String>>;
|
||||
|
@ -1,3 +1,4 @@
|
||||
use async_trait::async_trait;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -7,7 +8,6 @@ use flowy_error::FlowyResult;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use lib_infra::future::Fut;
|
||||
use lib_infra::priority_task::{QualityOfService, Task, TaskContent, TaskDispatcher};
|
||||
|
||||
use crate::entities::{
|
||||
@ -19,13 +19,14 @@ use crate::utils::cache::AnyTypeCache;
|
||||
|
||||
use super::{Calculation, CalculationChangeset, CalculationsService};
|
||||
|
||||
#[async_trait]
|
||||
pub trait CalculationsDelegate: Send + Sync + 'static {
|
||||
fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut<Vec<Arc<RowCell>>>;
|
||||
fn get_field(&self, field_id: &str) -> Option<Field>;
|
||||
fn get_calculation(&self, view_id: &str, field_id: &str) -> Fut<Option<Arc<Calculation>>>;
|
||||
fn get_all_calculations(&self, view_id: &str) -> Fut<Arc<Vec<Arc<Calculation>>>>;
|
||||
fn update_calculation(&self, view_id: &str, calculation: Calculation);
|
||||
fn remove_calculation(&self, view_id: &str, calculation_id: &str);
|
||||
async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec<Arc<RowCell>>;
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field>;
|
||||
async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Arc<Calculation>>;
|
||||
async fn get_all_calculations(&self, view_id: &str) -> Arc<Vec<Arc<Calculation>>>;
|
||||
async fn update_calculation(&self, view_id: &str, calculation: Calculation);
|
||||
async fn remove_calculation(&self, view_id: &str, calculation_id: &str);
|
||||
}
|
||||
|
||||
pub struct CalculationsController {
|
||||
@ -45,7 +46,7 @@ impl Drop for CalculationsController {
|
||||
}
|
||||
|
||||
impl CalculationsController {
|
||||
pub async fn new<T>(
|
||||
pub fn new<T>(
|
||||
view_id: &str,
|
||||
handler_id: &str,
|
||||
delegate: T,
|
||||
@ -65,7 +66,7 @@ impl CalculationsController {
|
||||
calculations_service: CalculationsService::new(),
|
||||
notifier,
|
||||
};
|
||||
this.update_cache(calculations).await;
|
||||
this.update_cache(calculations);
|
||||
this
|
||||
}
|
||||
|
||||
@ -130,7 +131,8 @@ impl CalculationsController {
|
||||
if let Some(calculation) = calculation {
|
||||
self
|
||||
.delegate
|
||||
.remove_calculation(&self.view_id, &calculation.id);
|
||||
.remove_calculation(&self.view_id, &calculation.id)
|
||||
.await;
|
||||
|
||||
let notification = CalculationChangesetNotificationPB::from_delete(
|
||||
&self.view_id,
|
||||
@ -165,7 +167,8 @@ impl CalculationsController {
|
||||
if !calc_type.is_allowed(new_field_type) {
|
||||
self
|
||||
.delegate
|
||||
.remove_calculation(&self.view_id, &calculation.id);
|
||||
.remove_calculation(&self.view_id, &calculation.id)
|
||||
.await;
|
||||
|
||||
let notification = CalculationChangesetNotificationPB::from_delete(
|
||||
&self.view_id,
|
||||
@ -201,7 +204,8 @@ impl CalculationsController {
|
||||
if let Some(update) = update {
|
||||
self
|
||||
.delegate
|
||||
.update_calculation(&self.view_id, update.clone());
|
||||
.update_calculation(&self.view_id, update.clone())
|
||||
.await;
|
||||
|
||||
let notification = CalculationChangesetNotificationPB::from_update(
|
||||
&self.view_id,
|
||||
@ -238,7 +242,10 @@ impl CalculationsController {
|
||||
let update = self.get_updated_calculation(calculation.clone()).await;
|
||||
if let Some(update) = update {
|
||||
updates.push(CalculationPB::from(&update));
|
||||
self.delegate.update_calculation(&self.view_id, update);
|
||||
self
|
||||
.delegate
|
||||
.update_calculation(&self.view_id, update)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -252,7 +259,10 @@ impl CalculationsController {
|
||||
|
||||
if let Some(update) = update {
|
||||
updates.push(CalculationPB::from(&update));
|
||||
self.delegate.update_calculation(&self.view_id, update);
|
||||
self
|
||||
.delegate
|
||||
.update_calculation(&self.view_id, update)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -273,7 +283,7 @@ impl CalculationsController {
|
||||
.delegate
|
||||
.get_cells_for_field(&self.view_id, &calculation.field_id)
|
||||
.await;
|
||||
let field = self.delegate.get_field(&calculation.field_id)?;
|
||||
let field = self.delegate.get_field(&calculation.field_id).await?;
|
||||
|
||||
let value =
|
||||
self
|
||||
@ -299,7 +309,7 @@ impl CalculationsController {
|
||||
.get_cells_for_field(&self.view_id, &insert.field_id)
|
||||
.await;
|
||||
|
||||
let field = self.delegate.get_field(&insert.field_id)?;
|
||||
let field = self.delegate.get_field(&insert.field_id).await?;
|
||||
|
||||
let value = self
|
||||
.calculations_service
|
||||
@ -331,12 +341,11 @@ impl CalculationsController {
|
||||
notification
|
||||
}
|
||||
|
||||
async fn update_cache(&self, calculations: Vec<Arc<Calculation>>) {
|
||||
fn update_cache(&self, calculations: Vec<Arc<Calculation>>) {
|
||||
for calculation in calculations {
|
||||
let field_id = &calculation.field_id;
|
||||
self
|
||||
.calculations_by_field_cache
|
||||
.write()
|
||||
.insert(field_id, calculation.clone());
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,17 @@
|
||||
use anyhow::bail;
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::preclude::encoding::serde::from_any;
|
||||
use collab::preclude::Any;
|
||||
use collab_database::views::{CalculationMap, CalculationMapBuilder};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::entities::CalculationPB;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct Calculation {
|
||||
pub id: String,
|
||||
pub field_id: String,
|
||||
#[serde(default, rename = "ty")]
|
||||
pub calculation_type: i64,
|
||||
#[serde(default, rename = "calculation_value")]
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
@ -19,12 +22,12 @@ const CALCULATION_VALUE: &str = "calculation_value";
|
||||
|
||||
impl From<Calculation> for CalculationMap {
|
||||
fn from(data: Calculation) -> Self {
|
||||
CalculationMapBuilder::new()
|
||||
.insert_str_value(CALCULATION_ID, data.id)
|
||||
.insert_str_value(FIELD_ID, data.field_id)
|
||||
.insert_i64_value(CALCULATION_TYPE, data.calculation_type)
|
||||
.insert_str_value(CALCULATION_VALUE, data.value)
|
||||
.build()
|
||||
CalculationMapBuilder::from([
|
||||
(CALCULATION_ID.into(), data.id.into()),
|
||||
(FIELD_ID.into(), data.field_id.into()),
|
||||
(CALCULATION_TYPE.into(), data.calculation_type.into()),
|
||||
(CALCULATION_VALUE.into(), data.value.into()),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
@ -45,29 +48,7 @@ impl TryFrom<CalculationMap> for Calculation {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(calculation: CalculationMap) -> Result<Self, Self::Error> {
|
||||
match (
|
||||
calculation.get_str_value(CALCULATION_ID),
|
||||
calculation.get_str_value(FIELD_ID),
|
||||
) {
|
||||
(Some(id), Some(field_id)) => {
|
||||
let value = calculation
|
||||
.get_str_value(CALCULATION_VALUE)
|
||||
.unwrap_or_default();
|
||||
let calculation_type = calculation
|
||||
.get_i64_value(CALCULATION_TYPE)
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Calculation {
|
||||
id,
|
||||
field_id,
|
||||
calculation_type,
|
||||
value,
|
||||
})
|
||||
},
|
||||
_ => {
|
||||
bail!("Invalid calculation data")
|
||||
},
|
||||
}
|
||||
from_any(&Any::from(calculation)).map_err(|e| e.into())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
use parking_lot::RwLock;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::utils::cache::AnyTypeCache;
|
||||
|
||||
pub type CellCache = Arc<RwLock<AnyTypeCache<u64>>>;
|
||||
pub type CellCache = Arc<AnyTypeCache<u64>>;
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@ use crate::entities::{DatabaseSyncStatePB, DidFetchRowPB, RowsChangePB};
|
||||
use crate::notification::{send_notification, DatabaseNotification, DATABASE_OBSERVABLE_SOURCE};
|
||||
use crate::services::database::UpdatedRow;
|
||||
use collab_database::blocks::BlockEvent;
|
||||
use collab_database::database::MutexDatabase;
|
||||
use collab_database::database::Database;
|
||||
use collab_database::fields::FieldChange;
|
||||
use collab_database::rows::{RowChange, RowId};
|
||||
use collab_database::views::DatabaseViewChange;
|
||||
@ -10,11 +10,12 @@ use flowy_notification::{DebounceNotificationSender, NotificationBuilder};
|
||||
use futures::StreamExt;
|
||||
use lib_dispatch::prelude::af_spawn;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use tracing::{trace, warn};
|
||||
|
||||
pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc<MutexDatabase>) {
|
||||
pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc<RwLock<Database>>) {
|
||||
let weak_database = Arc::downgrade(database);
|
||||
let mut sync_state = database.lock().subscribe_sync_state();
|
||||
let mut sync_state = database.read().await.subscribe_sync_state();
|
||||
let database_id = database_id.to_string();
|
||||
af_spawn(async move {
|
||||
while let Some(sync_state) = sync_state.next().await {
|
||||
@ -35,13 +36,13 @@ pub(crate) async fn observe_sync_state(database_id: &str, database: &Arc<MutexDa
|
||||
#[allow(dead_code)]
|
||||
pub(crate) async fn observe_rows_change(
|
||||
database_id: &str,
|
||||
database: &Arc<MutexDatabase>,
|
||||
database: &Arc<RwLock<Database>>,
|
||||
notification_sender: &Arc<DebounceNotificationSender>,
|
||||
) {
|
||||
let notification_sender = notification_sender.clone();
|
||||
let database_id = database_id.to_string();
|
||||
let weak_database = Arc::downgrade(database);
|
||||
let mut row_change = database.lock().subscribe_row_change();
|
||||
let mut row_change = database.read().await.subscribe_row_change();
|
||||
af_spawn(async move {
|
||||
while let Ok(row_change) = row_change.recv().await {
|
||||
if let Some(database) = weak_database.upgrade() {
|
||||
@ -59,7 +60,7 @@ pub(crate) async fn observe_rows_change(
|
||||
let cell_id = format!("{}:{}", row_id, field_id);
|
||||
notify_cell(¬ification_sender, &cell_id);
|
||||
|
||||
let views = database.lock().get_all_database_views_meta();
|
||||
let views = database.read().await.get_all_database_views_meta();
|
||||
for view in views {
|
||||
notify_row(¬ification_sender, &view.id, &field_id, &row_id);
|
||||
}
|
||||
@ -75,10 +76,10 @@ pub(crate) async fn observe_rows_change(
|
||||
});
|
||||
}
|
||||
#[allow(dead_code)]
|
||||
pub(crate) async fn observe_field_change(database_id: &str, database: &Arc<MutexDatabase>) {
|
||||
pub(crate) async fn observe_field_change(database_id: &str, database: &Arc<RwLock<Database>>) {
|
||||
let database_id = database_id.to_string();
|
||||
let weak_database = Arc::downgrade(database);
|
||||
let mut field_change = database.lock().subscribe_field_change();
|
||||
let mut field_change = database.read().await.subscribe_field_change();
|
||||
af_spawn(async move {
|
||||
while let Ok(field_change) = field_change.recv().await {
|
||||
if weak_database.upgrade().is_none() {
|
||||
@ -100,10 +101,10 @@ pub(crate) async fn observe_field_change(database_id: &str, database: &Arc<Mutex
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) async fn observe_view_change(database_id: &str, database: &Arc<MutexDatabase>) {
|
||||
pub(crate) async fn observe_view_change(database_id: &str, database: &Arc<RwLock<Database>>) {
|
||||
let database_id = database_id.to_string();
|
||||
let weak_database = Arc::downgrade(database);
|
||||
let mut view_change = database.lock().subscribe_view_change();
|
||||
let mut view_change = database.read().await.subscribe_view_change();
|
||||
af_spawn(async move {
|
||||
while let Ok(view_change) = view_change.recv().await {
|
||||
if weak_database.upgrade().is_none() {
|
||||
@ -136,10 +137,10 @@ pub(crate) async fn observe_view_change(database_id: &str, database: &Arc<MutexD
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) async fn observe_block_event(database_id: &str, database: &Arc<MutexDatabase>) {
|
||||
pub(crate) async fn observe_block_event(database_id: &str, database: &Arc<RwLock<Database>>) {
|
||||
let database_id = database_id.to_string();
|
||||
let weak_database = Arc::downgrade(database);
|
||||
let mut block_event_rx = database.lock().subscribe_block_event();
|
||||
let mut block_event_rx = database.read().await.subscribe_block_event();
|
||||
af_spawn(async move {
|
||||
while let Ok(event) = block_event_rx.recv().await {
|
||||
if weak_database.upgrade().is_none() {
|
||||
|
@ -1,9 +1,10 @@
|
||||
use collab_database::database::{gen_field_id, MutexDatabase};
|
||||
use collab_database::database::{gen_field_id, Database};
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::views::{
|
||||
DatabaseLayout, FieldSettingsByFieldIdMap, LayoutSetting, OrderObjectPosition,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::entities::FieldType;
|
||||
use crate::services::field::{DateTypeOption, SingleSelectTypeOption};
|
||||
@ -15,20 +16,20 @@ use crate::services::setting::{BoardLayoutSetting, CalendarLayoutSetting};
|
||||
/// view depends on a field that can be used to group rows while a calendar view
|
||||
/// depends on a date field.
|
||||
pub struct DatabaseLayoutDepsResolver {
|
||||
pub database: Arc<MutexDatabase>,
|
||||
pub database: Arc<RwLock<Database>>,
|
||||
/// The new database layout.
|
||||
pub database_layout: DatabaseLayout,
|
||||
}
|
||||
|
||||
impl DatabaseLayoutDepsResolver {
|
||||
pub fn new(database: Arc<MutexDatabase>, database_layout: DatabaseLayout) -> Self {
|
||||
pub fn new(database: Arc<RwLock<Database>>, database_layout: DatabaseLayout) -> Self {
|
||||
Self {
|
||||
database,
|
||||
database_layout,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_deps_when_create_database_linked_view(
|
||||
pub async fn resolve_deps_when_create_database_linked_view(
|
||||
&self,
|
||||
view_id: &str,
|
||||
) -> (
|
||||
@ -41,9 +42,8 @@ impl DatabaseLayoutDepsResolver {
|
||||
DatabaseLayout::Board => {
|
||||
let layout_settings = BoardLayoutSetting::new().into();
|
||||
|
||||
let field = if !self
|
||||
.database
|
||||
.lock()
|
||||
let database = self.database.read().await;
|
||||
let field = if !database
|
||||
.get_fields(None)
|
||||
.into_iter()
|
||||
.any(|field| FieldType::from(field.field_type).can_be_group())
|
||||
@ -53,7 +53,7 @@ impl DatabaseLayoutDepsResolver {
|
||||
None
|
||||
};
|
||||
|
||||
let field_settings_map = self.database.lock().get_field_settings(view_id, None);
|
||||
let field_settings_map = database.get_field_settings(view_id, None);
|
||||
tracing::info!(
|
||||
"resolve_deps_when_create_database_linked_view {:?}",
|
||||
field_settings_map
|
||||
@ -68,7 +68,8 @@ impl DatabaseLayoutDepsResolver {
|
||||
DatabaseLayout::Calendar => {
|
||||
match self
|
||||
.database
|
||||
.lock()
|
||||
.read()
|
||||
.await
|
||||
.get_fields(None)
|
||||
.into_iter()
|
||||
.find(|field| FieldType::from(field.field_type) == FieldType::DateTime)
|
||||
@ -89,13 +90,20 @@ impl DatabaseLayoutDepsResolver {
|
||||
|
||||
/// If the new layout type is a calendar and there is not date field in the database, it will add
|
||||
/// a new date field to the database and create the corresponding layout setting.
|
||||
pub fn resolve_deps_when_update_layout_type(&self, view_id: &str) {
|
||||
let fields = self.database.lock().get_fields(None);
|
||||
pub async fn resolve_deps_when_update_layout_type(&self, view_id: &str) {
|
||||
let mut database = self.database.write().await;
|
||||
let fields = database.get_fields(None);
|
||||
// Insert the layout setting if it's not exist
|
||||
match &self.database_layout {
|
||||
DatabaseLayout::Grid => {},
|
||||
DatabaseLayout::Board => {
|
||||
self.create_board_layout_setting_if_need(view_id);
|
||||
if database
|
||||
.get_layout_setting::<BoardLayoutSetting>(view_id, &self.database_layout)
|
||||
.is_none()
|
||||
{
|
||||
let layout_setting = BoardLayoutSetting::new();
|
||||
database.insert_layout_setting(view_id, &self.database_layout, layout_setting);
|
||||
}
|
||||
},
|
||||
DatabaseLayout::Calendar => {
|
||||
let date_field_id = match fields
|
||||
@ -106,7 +114,7 @@ impl DatabaseLayoutDepsResolver {
|
||||
tracing::trace!("Create a new date field after layout type change");
|
||||
let field = self.create_date_field();
|
||||
let field_id = field.id.clone();
|
||||
self.database.lock().create_field(
|
||||
database.create_field(
|
||||
None,
|
||||
field,
|
||||
&OrderObjectPosition::End,
|
||||
@ -116,41 +124,17 @@ impl DatabaseLayoutDepsResolver {
|
||||
},
|
||||
Some(date_field) => date_field.id,
|
||||
};
|
||||
self.create_calendar_layout_setting_if_need(view_id, &date_field_id);
|
||||
if database
|
||||
.get_layout_setting::<CalendarLayoutSetting>(view_id, &self.database_layout)
|
||||
.is_none()
|
||||
{
|
||||
let layout_setting = CalendarLayoutSetting::new(date_field_id);
|
||||
database.insert_layout_setting(view_id, &self.database_layout, layout_setting);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn create_board_layout_setting_if_need(&self, view_id: &str) {
|
||||
if self
|
||||
.database
|
||||
.lock()
|
||||
.get_layout_setting::<BoardLayoutSetting>(view_id, &self.database_layout)
|
||||
.is_none()
|
||||
{
|
||||
let layout_setting = BoardLayoutSetting::new();
|
||||
self
|
||||
.database
|
||||
.lock()
|
||||
.insert_layout_setting(view_id, &self.database_layout, layout_setting);
|
||||
}
|
||||
}
|
||||
|
||||
fn create_calendar_layout_setting_if_need(&self, view_id: &str, field_id: &str) {
|
||||
if self
|
||||
.database
|
||||
.lock()
|
||||
.get_layout_setting::<CalendarLayoutSetting>(view_id, &self.database_layout)
|
||||
.is_none()
|
||||
{
|
||||
let layout_setting = CalendarLayoutSetting::new(field_id.to_string());
|
||||
self
|
||||
.database
|
||||
.lock()
|
||||
.insert_layout_setting(view_id, &self.database_layout, layout_setting);
|
||||
}
|
||||
}
|
||||
|
||||
fn create_date_field(&self) -> Field {
|
||||
let field_type = FieldType::DateTime;
|
||||
let default_date_type_option = DateTypeOption::default();
|
||||
|
@ -1,8 +1,8 @@
|
||||
use async_trait::async_trait;
|
||||
use collab_database::fields::Field;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::rows::RowCell;
|
||||
use lib_infra::future::{to_fut, Fut};
|
||||
|
||||
use crate::services::calculations::{
|
||||
Calculation, CalculationsController, CalculationsDelegate, CalculationsTaskHandler,
|
||||
@ -17,7 +17,7 @@ pub async fn make_calculations_controller(
|
||||
delegate: Arc<dyn DatabaseViewOperation>,
|
||||
notifier: DatabaseViewChangedNotifier,
|
||||
) -> Arc<CalculationsController> {
|
||||
let calculations = delegate.get_all_calculations(view_id);
|
||||
let calculations = delegate.get_all_calculations(view_id).await;
|
||||
let task_scheduler = delegate.get_task_scheduler();
|
||||
let calculations_delegate = DatabaseViewCalculationsDelegateImpl(delegate.clone());
|
||||
let handler_id = gen_handler_id();
|
||||
@ -29,8 +29,7 @@ pub async fn make_calculations_controller(
|
||||
calculations,
|
||||
task_scheduler.clone(),
|
||||
notifier,
|
||||
)
|
||||
.await;
|
||||
);
|
||||
|
||||
let calculations_controller = Arc::new(calculations_controller);
|
||||
task_scheduler
|
||||
@ -45,30 +44,33 @@ pub async fn make_calculations_controller(
|
||||
|
||||
struct DatabaseViewCalculationsDelegateImpl(Arc<dyn DatabaseViewOperation>);
|
||||
|
||||
#[async_trait]
|
||||
impl CalculationsDelegate for DatabaseViewCalculationsDelegateImpl {
|
||||
fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut<Vec<Arc<RowCell>>> {
|
||||
self.0.get_cells_for_field(view_id, field_id)
|
||||
async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec<Arc<RowCell>> {
|
||||
self.0.get_cells_for_field(view_id, field_id).await
|
||||
}
|
||||
|
||||
fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.0.get_field(field_id)
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.0.get_field(field_id).await
|
||||
}
|
||||
|
||||
fn get_calculation(&self, view_id: &str, field_id: &str) -> Fut<Option<Arc<Calculation>>> {
|
||||
let calculation = self.0.get_calculation(view_id, field_id).map(Arc::new);
|
||||
to_fut(async move { calculation })
|
||||
async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Arc<Calculation>> {
|
||||
self
|
||||
.0
|
||||
.get_calculation(view_id, field_id)
|
||||
.await
|
||||
.map(Arc::new)
|
||||
}
|
||||
|
||||
fn update_calculation(&self, view_id: &str, calculation: Calculation) {
|
||||
self.0.update_calculation(view_id, calculation)
|
||||
async fn update_calculation(&self, view_id: &str, calculation: Calculation) {
|
||||
self.0.update_calculation(view_id, calculation).await
|
||||
}
|
||||
|
||||
fn remove_calculation(&self, view_id: &str, calculation_id: &str) {
|
||||
self.0.remove_calculation(view_id, calculation_id)
|
||||
async fn remove_calculation(&self, view_id: &str, calculation_id: &str) {
|
||||
self.0.remove_calculation(view_id, calculation_id).await
|
||||
}
|
||||
|
||||
fn get_all_calculations(&self, view_id: &str) -> Fut<Arc<Vec<Arc<Calculation>>>> {
|
||||
let calculations = Arc::new(self.0.get_all_calculations(view_id));
|
||||
to_fut(async move { calculations })
|
||||
async fn get_all_calculations(&self, view_id: &str) -> Arc<Vec<Arc<Calculation>>> {
|
||||
self.0.get_all_calculations(view_id).await.into()
|
||||
}
|
||||
}
|
||||
|
@ -156,6 +156,7 @@ impl DatabaseViewEditor {
|
||||
let field = self
|
||||
.delegate
|
||||
.get_field(controller.get_grouping_field_id())
|
||||
.await
|
||||
.ok_or_else(|| FlowyError::internal().with_context("Failed to get grouping field"))?;
|
||||
controller.will_create_row(&mut cells, &field, &group_id);
|
||||
}
|
||||
@ -249,7 +250,10 @@ impl DatabaseViewEditor {
|
||||
field_id: Option<String>,
|
||||
) {
|
||||
if let Some(controller) = self.group_controller.write().await.as_mut() {
|
||||
let field = self.delegate.get_field(controller.get_grouping_field_id());
|
||||
let field = self
|
||||
.delegate
|
||||
.get_field(controller.get_grouping_field_id())
|
||||
.await;
|
||||
|
||||
if let Some(field) = field {
|
||||
let mut row_details = vec![Arc::new(row_detail.clone())];
|
||||
@ -413,8 +417,11 @@ impl DatabaseViewEditor {
|
||||
pub async fn v_create_group(&self, name: &str) -> FlowyResult<()> {
|
||||
let mut old_field: Option<Field> = None;
|
||||
let result = if let Some(controller) = self.group_controller.write().await.as_mut() {
|
||||
let create_group_results = controller.create_group(name.to_string())?;
|
||||
old_field = self.delegate.get_field(controller.get_grouping_field_id());
|
||||
let create_group_results = controller.create_group(name.to_string()).await?;
|
||||
old_field = self
|
||||
.delegate
|
||||
.get_field(controller.get_grouping_field_id())
|
||||
.await;
|
||||
create_group_results
|
||||
} else {
|
||||
(None, None)
|
||||
@ -447,20 +454,22 @@ impl DatabaseViewEditor {
|
||||
None => return Ok(RowsChangePB::default()),
|
||||
};
|
||||
|
||||
let old_field = self.delegate.get_field(controller.get_grouping_field_id());
|
||||
let (row_ids, type_option_data) = controller.delete_group(group_id)?;
|
||||
let old_field = self
|
||||
.delegate
|
||||
.get_field(controller.get_grouping_field_id())
|
||||
.await;
|
||||
let (row_ids, type_option_data) = controller.delete_group(group_id).await?;
|
||||
|
||||
drop(group_controller);
|
||||
|
||||
let mut changes = RowsChangePB::default();
|
||||
|
||||
if let Some(field) = old_field {
|
||||
let deleted_rows = row_ids
|
||||
.iter()
|
||||
.filter_map(|row_id| self.delegate.remove_row(row_id))
|
||||
.map(|row| row.id.into_inner());
|
||||
|
||||
changes.deleted_rows.extend(deleted_rows);
|
||||
for row_id in row_ids {
|
||||
if let Some(row) = self.delegate.remove_row(&row_id).await {
|
||||
changes.deleted_rows.push(row.id.into_inner());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(type_option) = type_option_data {
|
||||
self.delegate.update_field(type_option, field).await?;
|
||||
@ -478,19 +487,23 @@ impl DatabaseViewEditor {
|
||||
|
||||
pub async fn v_update_group(&self, changeset: Vec<GroupChangeset>) -> FlowyResult<()> {
|
||||
let mut type_option_data = None;
|
||||
let (old_field, updated_groups) =
|
||||
if let Some(controller) = self.group_controller.write().await.as_mut() {
|
||||
let old_field = self.delegate.get_field(controller.get_grouping_field_id());
|
||||
let (updated_groups, new_type_option) = controller.apply_group_changeset(&changeset)?;
|
||||
let (old_field, updated_groups) = if let Some(controller) =
|
||||
self.group_controller.write().await.as_mut()
|
||||
{
|
||||
let old_field = self
|
||||
.delegate
|
||||
.get_field(controller.get_grouping_field_id())
|
||||
.await;
|
||||
let (updated_groups, new_type_option) = controller.apply_group_changeset(&changeset).await?;
|
||||
|
||||
if new_type_option.is_some() {
|
||||
type_option_data = new_type_option;
|
||||
}
|
||||
if new_type_option.is_some() {
|
||||
type_option_data = new_type_option;
|
||||
}
|
||||
|
||||
(old_field, updated_groups)
|
||||
} else {
|
||||
(None, vec![])
|
||||
};
|
||||
(old_field, updated_groups)
|
||||
} else {
|
||||
(None, vec![])
|
||||
};
|
||||
|
||||
if let Some(old_field) = old_field {
|
||||
if let Some(type_option_data) = type_option_data {
|
||||
@ -511,7 +524,7 @@ impl DatabaseViewEditor {
|
||||
}
|
||||
|
||||
pub async fn v_get_all_sorts(&self) -> Vec<Sort> {
|
||||
self.delegate.get_all_sorts(&self.view_id)
|
||||
self.delegate.get_all_sorts(&self.view_id).await
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self), err)]
|
||||
@ -528,7 +541,7 @@ impl DatabaseViewEditor {
|
||||
condition: params.condition.into(),
|
||||
};
|
||||
|
||||
self.delegate.insert_sort(&self.view_id, sort.clone());
|
||||
self.delegate.insert_sort(&self.view_id, sort.clone()).await;
|
||||
|
||||
let mut sort_controller = self.sort_controller.write().await;
|
||||
|
||||
@ -549,7 +562,8 @@ impl DatabaseViewEditor {
|
||||
pub async fn v_reorder_sort(&self, params: ReorderSortPayloadPB) -> FlowyResult<()> {
|
||||
self
|
||||
.delegate
|
||||
.move_sort(&self.view_id, ¶ms.from_sort_id, ¶ms.to_sort_id);
|
||||
.move_sort(&self.view_id, ¶ms.from_sort_id, ¶ms.to_sort_id)
|
||||
.await;
|
||||
|
||||
let notification = self
|
||||
.sort_controller
|
||||
@ -573,7 +587,10 @@ impl DatabaseViewEditor {
|
||||
.apply_changeset(SortChangeset::from_delete(params.sort_id.clone()))
|
||||
.await;
|
||||
|
||||
self.delegate.remove_sort(&self.view_id, ¶ms.sort_id);
|
||||
self
|
||||
.delegate
|
||||
.remove_sort(&self.view_id, ¶ms.sort_id)
|
||||
.await;
|
||||
notify_did_update_sort(notification).await;
|
||||
|
||||
Ok(())
|
||||
@ -583,7 +600,7 @@ impl DatabaseViewEditor {
|
||||
let all_sorts = self.v_get_all_sorts().await;
|
||||
self.sort_controller.write().await.delete_all_sorts().await;
|
||||
|
||||
self.delegate.remove_all_sorts(&self.view_id);
|
||||
self.delegate.remove_all_sorts(&self.view_id).await;
|
||||
let mut notification = SortChangesetNotificationPB::new(self.view_id.clone());
|
||||
notification.delete_sorts = all_sorts.into_iter().map(SortPB::from).collect();
|
||||
notify_did_update_sort(notification).await;
|
||||
@ -591,7 +608,7 @@ impl DatabaseViewEditor {
|
||||
}
|
||||
|
||||
pub async fn v_get_all_calculations(&self) -> Vec<Arc<Calculation>> {
|
||||
self.delegate.get_all_calculations(&self.view_id)
|
||||
self.delegate.get_all_calculations(&self.view_id).await
|
||||
}
|
||||
|
||||
pub async fn v_update_calculations(
|
||||
@ -620,7 +637,8 @@ impl DatabaseViewEditor {
|
||||
let calculation: Calculation = Calculation::from(&insert);
|
||||
self
|
||||
.delegate
|
||||
.update_calculation(¶ms.view_id, calculation);
|
||||
.update_calculation(¶ms.view_id, calculation)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
@ -636,7 +654,8 @@ impl DatabaseViewEditor {
|
||||
) -> FlowyResult<()> {
|
||||
self
|
||||
.delegate
|
||||
.remove_calculation(¶ms.view_id, ¶ms.calculation_id);
|
||||
.remove_calculation(¶ms.view_id, ¶ms.calculation_id)
|
||||
.await;
|
||||
|
||||
let calculation = Calculation::none(params.calculation_id, params.field_id, None);
|
||||
|
||||
@ -653,11 +672,11 @@ impl DatabaseViewEditor {
|
||||
}
|
||||
|
||||
pub async fn v_get_all_filters(&self) -> Vec<Filter> {
|
||||
self.delegate.get_all_filters(&self.view_id)
|
||||
self.delegate.get_all_filters(&self.view_id).await
|
||||
}
|
||||
|
||||
pub async fn v_get_filter(&self, filter_id: &str) -> Option<Filter> {
|
||||
self.delegate.get_filter(&self.view_id, filter_id)
|
||||
self.delegate.get_filter(&self.view_id, filter_id).await
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self), err)]
|
||||
@ -686,15 +705,23 @@ impl DatabaseViewEditor {
|
||||
match layout_ty {
|
||||
DatabaseLayout::Grid => {},
|
||||
DatabaseLayout::Board => {
|
||||
if let Some(value) = self.delegate.get_layout_setting(&self.view_id, layout_ty) {
|
||||
if let Some(value) = self
|
||||
.delegate
|
||||
.get_layout_setting(&self.view_id, layout_ty)
|
||||
.await
|
||||
{
|
||||
layout_setting.board = Some(value.into());
|
||||
}
|
||||
},
|
||||
DatabaseLayout::Calendar => {
|
||||
if let Some(value) = self.delegate.get_layout_setting(&self.view_id, layout_ty) {
|
||||
if let Some(value) = self
|
||||
.delegate
|
||||
.get_layout_setting(&self.view_id, layout_ty)
|
||||
.await
|
||||
{
|
||||
let calendar_setting = CalendarLayoutSetting::from(value);
|
||||
// Check the field exist or not
|
||||
if let Some(field) = self.delegate.get_field(&calendar_setting.field_id) {
|
||||
if let Some(field) = self.delegate.get_field(&calendar_setting.field_id).await {
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
|
||||
// Check the type of field is Datetime or not
|
||||
@ -723,27 +750,33 @@ impl DatabaseViewEditor {
|
||||
DatabaseLayout::Board => {
|
||||
let layout_setting = params.board.unwrap();
|
||||
|
||||
self.delegate.insert_layout_setting(
|
||||
&self.view_id,
|
||||
¶ms.layout_type,
|
||||
layout_setting.clone().into(),
|
||||
);
|
||||
self
|
||||
.delegate
|
||||
.insert_layout_setting(
|
||||
&self.view_id,
|
||||
¶ms.layout_type,
|
||||
layout_setting.clone().into(),
|
||||
)
|
||||
.await;
|
||||
|
||||
Some(DatabaseLayoutSettingPB::from_board(layout_setting))
|
||||
},
|
||||
DatabaseLayout::Calendar => {
|
||||
let layout_setting = params.calendar.unwrap();
|
||||
|
||||
if let Some(field) = self.delegate.get_field(&layout_setting.field_id) {
|
||||
if let Some(field) = self.delegate.get_field(&layout_setting.field_id).await {
|
||||
if FieldType::from(field.field_type) != FieldType::DateTime {
|
||||
return Err(FlowyError::unexpect_calendar_field_type());
|
||||
}
|
||||
|
||||
self.delegate.insert_layout_setting(
|
||||
&self.view_id,
|
||||
¶ms.layout_type,
|
||||
layout_setting.clone().into(),
|
||||
);
|
||||
self
|
||||
.delegate
|
||||
.insert_layout_setting(
|
||||
&self.view_id,
|
||||
¶ms.layout_type,
|
||||
layout_setting.clone().into(),
|
||||
)
|
||||
.await;
|
||||
|
||||
Some(DatabaseLayoutSettingPB::from_calendar(layout_setting))
|
||||
} else {
|
||||
@ -769,10 +802,10 @@ impl DatabaseViewEditor {
|
||||
let notification = self.filter_controller.apply_changeset(changeset).await;
|
||||
notify_did_update_filter(notification).await;
|
||||
|
||||
let sorts = self.delegate.get_all_sorts(&self.view_id);
|
||||
let sorts = self.delegate.get_all_sorts(&self.view_id).await;
|
||||
|
||||
if let Some(sort) = sorts.iter().find(|sort| sort.field_id == deleted_field_id) {
|
||||
self.delegate.remove_sort(&self.view_id, &sort.id);
|
||||
self.delegate.remove_sort(&self.view_id, &sort.id).await;
|
||||
let notification = self
|
||||
.sort_controller
|
||||
.write()
|
||||
@ -810,7 +843,7 @@ impl DatabaseViewEditor {
|
||||
pub async fn v_did_update_field_type_option(&self, old_field: &Field) -> FlowyResult<()> {
|
||||
let field_id = &old_field.id;
|
||||
|
||||
if let Some(field) = self.delegate.get_field(field_id) {
|
||||
if let Some(field) = self.delegate.get_field(field_id).await {
|
||||
self
|
||||
.sort_controller
|
||||
.read()
|
||||
@ -839,7 +872,7 @@ impl DatabaseViewEditor {
|
||||
/// Called when a grouping field is updated.
|
||||
#[tracing::instrument(level = "debug", skip_all, err)]
|
||||
pub async fn v_group_by_field(&self, field_id: &str) -> FlowyResult<()> {
|
||||
if let Some(field) = self.delegate.get_field(field_id) {
|
||||
if let Some(field) = self.delegate.get_field(field_id).await {
|
||||
tracing::trace!("create new group controller");
|
||||
|
||||
let new_group_controller = new_group_controller(
|
||||
@ -890,7 +923,7 @@ impl DatabaseViewEditor {
|
||||
let text_cell = get_cell_for_row(self.delegate.clone(), &primary_field.id, &row_id).await?;
|
||||
|
||||
// Date
|
||||
let date_field = self.delegate.get_field(&calendar_setting.field_id)?;
|
||||
let date_field = self.delegate.get_field(&calendar_setting.field_id).await?;
|
||||
|
||||
let date_cell = get_cell_for_row(self.delegate.clone(), &date_field.id, &row_id).await?;
|
||||
let title = text_cell
|
||||
@ -981,20 +1014,23 @@ impl DatabaseViewEditor {
|
||||
}
|
||||
|
||||
pub async fn v_get_layout_type(&self) -> DatabaseLayout {
|
||||
self.delegate.get_layout_for_view(&self.view_id)
|
||||
self.delegate.get_layout_for_view(&self.view_id).await
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
pub async fn v_update_layout_type(&self, new_layout_type: DatabaseLayout) -> FlowyResult<()> {
|
||||
self
|
||||
.delegate
|
||||
.update_layout_type(&self.view_id, &new_layout_type);
|
||||
.update_layout_type(&self.view_id, &new_layout_type)
|
||||
.await;
|
||||
|
||||
// using the {} brackets to denote the lifetime of the resolver. Because the DatabaseLayoutDepsResolver
|
||||
// is not sync and send, so we can't pass it to the async block.
|
||||
{
|
||||
let resolver = DatabaseLayoutDepsResolver::new(self.delegate.get_database(), new_layout_type);
|
||||
resolver.resolve_deps_when_update_layout_type(&self.view_id);
|
||||
resolver
|
||||
.resolve_deps_when_update_layout_type(&self.view_id)
|
||||
.await;
|
||||
}
|
||||
|
||||
// initialize the group controller if the current layout support grouping
|
||||
@ -1034,12 +1070,14 @@ impl DatabaseViewEditor {
|
||||
}
|
||||
|
||||
pub async fn v_get_field_settings(&self, field_ids: &[String]) -> HashMap<String, FieldSettings> {
|
||||
self.delegate.get_field_settings(&self.view_id, field_ids)
|
||||
self
|
||||
.delegate
|
||||
.get_field_settings(&self.view_id, field_ids)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn v_update_field_settings(&self, params: FieldSettingsChangesetPB) -> FlowyResult<()> {
|
||||
self.delegate.update_field_settings(params);
|
||||
|
||||
self.delegate.update_field_settings(params).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1053,7 +1091,7 @@ impl DatabaseViewEditor {
|
||||
.await
|
||||
.as_ref()
|
||||
.map(|controller| controller.get_grouping_field_id().to_owned())?;
|
||||
let field = self.delegate.get_field(&group_field_id)?;
|
||||
let field = self.delegate.get_field(&group_field_id).await?;
|
||||
let mut write_guard = self.group_controller.write().await;
|
||||
if let Some(group_controller) = &mut *write_guard {
|
||||
f(group_controller, field).ok()
|
||||
|
@ -1,10 +1,9 @@
|
||||
use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{RowDetail, RowId};
|
||||
|
||||
use lib_infra::future::Fut;
|
||||
|
||||
use crate::services::cell::CellCache;
|
||||
use crate::services::database_view::{
|
||||
gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation,
|
||||
@ -43,28 +42,29 @@ pub async fn make_filter_controller(
|
||||
|
||||
struct DatabaseViewFilterDelegateImpl(Arc<dyn DatabaseViewOperation>);
|
||||
|
||||
#[async_trait]
|
||||
impl FilterDelegate for DatabaseViewFilterDelegateImpl {
|
||||
fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.0.get_field(field_id)
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.0.get_field(field_id).await
|
||||
}
|
||||
|
||||
fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Fut<Vec<Field>> {
|
||||
self.0.get_fields(view_id, field_ids)
|
||||
async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field> {
|
||||
self.0.get_fields(view_id, field_ids).await
|
||||
}
|
||||
|
||||
fn get_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>> {
|
||||
self.0.get_rows(view_id)
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
|
||||
self.0.get_rows(view_id).await
|
||||
}
|
||||
|
||||
fn get_row(&self, view_id: &str, rows_id: &RowId) -> Fut<Option<(usize, Arc<RowDetail>)>> {
|
||||
self.0.get_row(view_id, rows_id)
|
||||
async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)> {
|
||||
self.0.get_row(view_id, rows_id).await
|
||||
}
|
||||
|
||||
fn get_all_filters(&self, view_id: &str) -> Vec<Filter> {
|
||||
self.0.get_all_filters(view_id)
|
||||
async fn get_all_filters(&self, view_id: &str) -> Vec<Filter> {
|
||||
self.0.get_all_filters(view_id).await
|
||||
}
|
||||
|
||||
fn save_filters(&self, view_id: &str, filters: &[Filter]) {
|
||||
self.0.save_filters(view_id, filters)
|
||||
async fn save_filters(&self, view_id: &str, filters: &[Filter]) {
|
||||
self.0.save_filters(view_id, filters).await
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{RowDetail, RowId};
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
use lib_infra::future::{to_fut, Fut};
|
||||
|
||||
use crate::entities::FieldType;
|
||||
use crate::services::database_view::DatabaseViewOperation;
|
||||
@ -21,7 +21,7 @@ pub async fn new_group_controller(
|
||||
filter_controller: Arc<FilterController>,
|
||||
grouping_field: Option<Field>,
|
||||
) -> FlowyResult<Option<Box<dyn GroupController>>> {
|
||||
if !delegate.get_layout_for_view(&view_id).is_board() {
|
||||
if !delegate.get_layout_for_view(&view_id).await.is_board() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
@ -61,45 +61,45 @@ pub(crate) struct GroupControllerDelegateImpl {
|
||||
filter_controller: Arc<FilterController>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl GroupContextDelegate for GroupControllerDelegateImpl {
|
||||
fn get_group_setting(&self, view_id: &str) -> Fut<Option<Arc<GroupSetting>>> {
|
||||
let mut settings = self.delegate.get_group_setting(view_id);
|
||||
to_fut(async move {
|
||||
if settings.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Arc::new(settings.remove(0)))
|
||||
}
|
||||
})
|
||||
async fn get_group_setting(&self, view_id: &str) -> Option<Arc<GroupSetting>> {
|
||||
let mut settings = self.delegate.get_group_setting(view_id).await;
|
||||
if settings.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Arc::new(settings.remove(0)))
|
||||
}
|
||||
}
|
||||
|
||||
fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Fut<Vec<RowSingleCellData>> {
|
||||
let field_id = field_id.to_owned();
|
||||
let view_id = view_id.to_owned();
|
||||
async fn get_configuration_cells(&self, view_id: &str, field_id: &str) -> Vec<RowSingleCellData> {
|
||||
let delegate = self.delegate.clone();
|
||||
to_fut(async move { get_cells_for_field(delegate, &view_id, &field_id).await })
|
||||
get_cells_for_field(delegate, view_id, field_id).await
|
||||
}
|
||||
|
||||
fn save_configuration(&self, view_id: &str, group_setting: GroupSetting) -> Fut<FlowyResult<()>> {
|
||||
self.delegate.insert_group_setting(view_id, group_setting);
|
||||
to_fut(async move { Ok(()) })
|
||||
async fn save_configuration(
|
||||
&self,
|
||||
view_id: &str,
|
||||
group_setting: GroupSetting,
|
||||
) -> FlowyResult<()> {
|
||||
self
|
||||
.delegate
|
||||
.insert_group_setting(view_id, group_setting)
|
||||
.await;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl GroupControllerDelegate for GroupControllerDelegateImpl {
|
||||
fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.delegate.get_field(field_id)
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.delegate.get_field(field_id).await
|
||||
}
|
||||
|
||||
fn get_all_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>> {
|
||||
let view_id = view_id.to_string();
|
||||
let delegate = self.delegate.clone();
|
||||
let filter_controller = self.filter_controller.clone();
|
||||
to_fut(async move {
|
||||
let mut row_details = delegate.get_rows(&view_id).await;
|
||||
filter_controller.filter_rows(&mut row_details).await;
|
||||
row_details
|
||||
})
|
||||
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
|
||||
let mut row_details = self.delegate.get_rows(view_id).await;
|
||||
self.filter_controller.filter_rows(&mut row_details).await;
|
||||
row_details
|
||||
}
|
||||
}
|
||||
|
||||
@ -108,7 +108,7 @@ pub(crate) async fn get_cell_for_row(
|
||||
field_id: &str,
|
||||
row_id: &RowId,
|
||||
) -> Option<RowSingleCellData> {
|
||||
let field = delegate.get_field(field_id)?;
|
||||
let field = delegate.get_field(field_id).await?;
|
||||
let row_cell = delegate.get_cell_in_row(field_id, row_id).await;
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
let handler = delegate.get_type_option_cell_handler(&field)?;
|
||||
@ -131,7 +131,7 @@ pub(crate) async fn get_cells_for_field(
|
||||
view_id: &str,
|
||||
field_id: &str,
|
||||
) -> Vec<RowSingleCellData> {
|
||||
if let Some(field) = delegate.get_field(field_id) {
|
||||
if let Some(field) = delegate.get_field(field_id).await {
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
if let Some(handler) = delegate.get_type_option_cell_handler(&field) {
|
||||
let cells = delegate.get_cells_for_field(view_id, field_id).await;
|
||||
|
@ -1,14 +1,14 @@
|
||||
use async_trait::async_trait;
|
||||
use collab_database::database::Database;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::database::MutexDatabase;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{Row, RowCell, RowDetail, RowId};
|
||||
use collab_database::views::{DatabaseLayout, DatabaseView, LayoutSetting};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use flowy_error::FlowyError;
|
||||
use lib_infra::future::{Fut, FutureResult};
|
||||
use lib_infra::priority_task::TaskDispatcher;
|
||||
|
||||
use crate::entities::{FieldSettingsChangesetPB, FieldType};
|
||||
@ -20,97 +20,102 @@ use crate::services::group::GroupSetting;
|
||||
use crate::services::sort::Sort;
|
||||
|
||||
/// Defines the operation that can be performed on a database view
|
||||
#[async_trait]
|
||||
pub trait DatabaseViewOperation: Send + Sync + 'static {
|
||||
/// Get the database that the view belongs to
|
||||
fn get_database(&self) -> Arc<MutexDatabase>;
|
||||
fn get_database(&self) -> Arc<RwLock<Database>>;
|
||||
|
||||
/// Get the view of the database with the view_id
|
||||
fn get_view(&self, view_id: &str) -> Fut<Option<DatabaseView>>;
|
||||
async fn get_view(&self, view_id: &str) -> Option<DatabaseView>;
|
||||
/// If the field_ids is None, then it will return all the field revisions
|
||||
fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Fut<Vec<Field>>;
|
||||
async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>;
|
||||
|
||||
/// Returns the field with the field_id
|
||||
fn get_field(&self, field_id: &str) -> Option<Field>;
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field>;
|
||||
|
||||
fn create_field(
|
||||
async fn create_field(
|
||||
&self,
|
||||
view_id: &str,
|
||||
name: &str,
|
||||
field_type: FieldType,
|
||||
type_option_data: TypeOptionData,
|
||||
) -> Fut<Field>;
|
||||
) -> Field;
|
||||
|
||||
fn update_field(
|
||||
async fn update_field(
|
||||
&self,
|
||||
type_option_data: TypeOptionData,
|
||||
old_field: Field,
|
||||
) -> FutureResult<(), FlowyError>;
|
||||
) -> Result<(), FlowyError>;
|
||||
|
||||
fn get_primary_field(&self) -> Fut<Option<Arc<Field>>>;
|
||||
async fn get_primary_field(&self) -> Option<Arc<Field>>;
|
||||
|
||||
/// Returns the index of the row with row_id
|
||||
fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Fut<Option<usize>>;
|
||||
async fn index_of_row(&self, view_id: &str, row_id: &RowId) -> Option<usize>;
|
||||
|
||||
/// Returns the `index` and `RowRevision` with row_id
|
||||
fn get_row(&self, view_id: &str, row_id: &RowId) -> Fut<Option<(usize, Arc<RowDetail>)>>;
|
||||
async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option<(usize, Arc<RowDetail>)>;
|
||||
|
||||
/// Returns all the rows in the view
|
||||
fn get_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>>;
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>;
|
||||
|
||||
fn remove_row(&self, row_id: &RowId) -> Option<Row>;
|
||||
async fn remove_row(&self, row_id: &RowId) -> Option<Row>;
|
||||
|
||||
fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Fut<Vec<Arc<RowCell>>>;
|
||||
async fn get_cells_for_field(&self, view_id: &str, field_id: &str) -> Vec<Arc<RowCell>>;
|
||||
|
||||
fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Fut<Arc<RowCell>>;
|
||||
async fn get_cell_in_row(&self, field_id: &str, row_id: &RowId) -> Arc<RowCell>;
|
||||
|
||||
/// Return the database layout type for the view with given view_id
|
||||
/// The default layout type is [DatabaseLayout::Grid]
|
||||
fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout;
|
||||
async fn get_layout_for_view(&self, view_id: &str) -> DatabaseLayout;
|
||||
|
||||
fn get_group_setting(&self, view_id: &str) -> Vec<GroupSetting>;
|
||||
async fn get_group_setting(&self, view_id: &str) -> Vec<GroupSetting>;
|
||||
|
||||
fn insert_group_setting(&self, view_id: &str, setting: GroupSetting);
|
||||
async fn insert_group_setting(&self, view_id: &str, setting: GroupSetting);
|
||||
|
||||
fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Sort>;
|
||||
async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Sort>;
|
||||
|
||||
fn insert_sort(&self, view_id: &str, sort: Sort);
|
||||
async fn insert_sort(&self, view_id: &str, sort: Sort);
|
||||
|
||||
fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str);
|
||||
async fn move_sort(&self, view_id: &str, from_sort_id: &str, to_sort_id: &str);
|
||||
|
||||
fn remove_sort(&self, view_id: &str, sort_id: &str);
|
||||
async fn remove_sort(&self, view_id: &str, sort_id: &str);
|
||||
|
||||
fn get_all_sorts(&self, view_id: &str) -> Vec<Sort>;
|
||||
async fn get_all_sorts(&self, view_id: &str) -> Vec<Sort>;
|
||||
|
||||
fn remove_all_sorts(&self, view_id: &str);
|
||||
async fn remove_all_sorts(&self, view_id: &str);
|
||||
|
||||
fn get_all_calculations(&self, view_id: &str) -> Vec<Arc<Calculation>>;
|
||||
async fn get_all_calculations(&self, view_id: &str) -> Vec<Arc<Calculation>>;
|
||||
|
||||
fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Calculation>;
|
||||
async fn get_calculation(&self, view_id: &str, field_id: &str) -> Option<Calculation>;
|
||||
|
||||
fn update_calculation(&self, view_id: &str, calculation: Calculation);
|
||||
async fn update_calculation(&self, view_id: &str, calculation: Calculation);
|
||||
|
||||
fn remove_calculation(&self, view_id: &str, calculation_id: &str);
|
||||
async fn remove_calculation(&self, view_id: &str, calculation_id: &str);
|
||||
|
||||
fn get_all_filters(&self, view_id: &str) -> Vec<Filter>;
|
||||
async fn get_all_filters(&self, view_id: &str) -> Vec<Filter>;
|
||||
|
||||
fn get_filter(&self, view_id: &str, filter_id: &str) -> Option<Filter>;
|
||||
async fn get_filter(&self, view_id: &str, filter_id: &str) -> Option<Filter>;
|
||||
|
||||
fn delete_filter(&self, view_id: &str, filter_id: &str);
|
||||
async fn delete_filter(&self, view_id: &str, filter_id: &str);
|
||||
|
||||
fn insert_filter(&self, view_id: &str, filter: Filter);
|
||||
async fn insert_filter(&self, view_id: &str, filter: Filter);
|
||||
|
||||
fn save_filters(&self, view_id: &str, filters: &[Filter]);
|
||||
async fn save_filters(&self, view_id: &str, filters: &[Filter]);
|
||||
|
||||
fn get_layout_setting(&self, view_id: &str, layout_ty: &DatabaseLayout) -> Option<LayoutSetting>;
|
||||
async fn get_layout_setting(
|
||||
&self,
|
||||
view_id: &str,
|
||||
layout_ty: &DatabaseLayout,
|
||||
) -> Option<LayoutSetting>;
|
||||
|
||||
fn insert_layout_setting(
|
||||
async fn insert_layout_setting(
|
||||
&self,
|
||||
view_id: &str,
|
||||
layout_ty: &DatabaseLayout,
|
||||
layout_setting: LayoutSetting,
|
||||
);
|
||||
|
||||
fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout);
|
||||
async fn update_layout_type(&self, view_id: &str, layout_type: &DatabaseLayout);
|
||||
|
||||
/// Returns a `TaskDispatcher` used to poll a `Task`
|
||||
fn get_task_scheduler(&self) -> Arc<RwLock<TaskDispatcher>>;
|
||||
@ -120,11 +125,11 @@ pub trait DatabaseViewOperation: Send + Sync + 'static {
|
||||
field: &Field,
|
||||
) -> Option<Box<dyn TypeOptionCellDataHandler>>;
|
||||
|
||||
fn get_field_settings(
|
||||
async fn get_field_settings(
|
||||
&self,
|
||||
view_id: &str,
|
||||
field_ids: &[String],
|
||||
) -> HashMap<String, FieldSettings>;
|
||||
|
||||
fn update_field_settings(&self, params: FieldSettingsChangesetPB);
|
||||
async fn update_field_settings(&self, params: FieldSettingsChangesetPB);
|
||||
}
|
||||
|
@ -1,11 +1,10 @@
|
||||
use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::RowDetail;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use lib_infra::future::{to_fut, Fut};
|
||||
|
||||
use crate::services::cell::CellCache;
|
||||
use crate::services::database_view::{
|
||||
gen_handler_id, DatabaseViewChangedNotifier, DatabaseViewOperation,
|
||||
@ -23,6 +22,7 @@ pub(crate) async fn make_sort_controller(
|
||||
let handler_id = gen_handler_id();
|
||||
let sorts = delegate
|
||||
.get_all_sorts(view_id)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(Arc::new)
|
||||
.collect();
|
||||
@ -53,38 +53,31 @@ struct DatabaseViewSortDelegateImpl {
|
||||
filter_controller: Arc<FilterController>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl SortDelegate for DatabaseViewSortDelegateImpl {
|
||||
fn get_sort(&self, view_id: &str, sort_id: &str) -> Fut<Option<Arc<Sort>>> {
|
||||
let sort = self.delegate.get_sort(view_id, sort_id).map(Arc::new);
|
||||
to_fut(async move { sort })
|
||||
async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Arc<Sort>> {
|
||||
self.delegate.get_sort(view_id, sort_id).await.map(Arc::new)
|
||||
}
|
||||
|
||||
fn get_rows(&self, view_id: &str) -> Fut<Vec<Arc<RowDetail>>> {
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
|
||||
let view_id = view_id.to_string();
|
||||
let delegate = self.delegate.clone();
|
||||
let filter_controller = self.filter_controller.clone();
|
||||
to_fut(async move {
|
||||
let mut row_details = delegate.get_rows(&view_id).await;
|
||||
filter_controller.filter_rows(&mut row_details).await;
|
||||
row_details
|
||||
})
|
||||
let mut row_details = self.delegate.get_rows(&view_id).await;
|
||||
self.filter_controller.filter_rows(&mut row_details).await;
|
||||
row_details
|
||||
}
|
||||
|
||||
fn filter_row(&self, row_detail: &RowDetail) -> Fut<bool> {
|
||||
let filter_controller = self.filter_controller.clone();
|
||||
async fn filter_row(&self, row_detail: &RowDetail) -> bool {
|
||||
let row_detail = row_detail.clone();
|
||||
to_fut(async move {
|
||||
let mut row_details = vec![Arc::new(row_detail)];
|
||||
filter_controller.filter_rows(&mut row_details).await;
|
||||
!row_details.is_empty()
|
||||
})
|
||||
let mut row_details = vec![Arc::new(row_detail)];
|
||||
self.filter_controller.filter_rows(&mut row_details).await;
|
||||
!row_details.is_empty()
|
||||
}
|
||||
|
||||
fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.delegate.get_field(field_id)
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
self.delegate.get_field(field_id).await
|
||||
}
|
||||
|
||||
fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Fut<Vec<Field>> {
|
||||
self.delegate.get_fields(view_id, field_ids)
|
||||
async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field> {
|
||||
self.delegate.get_fields(view_id, field_ids).await
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
use collab_database::database::Database;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::database::MutexDatabase;
|
||||
use nanoid::nanoid;
|
||||
use tokio::sync::{broadcast, RwLock};
|
||||
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use flowy_error::FlowyResult;
|
||||
|
||||
use crate::services::cell::CellCache;
|
||||
use crate::services::database::DatabaseRowEvent;
|
||||
@ -17,7 +17,7 @@ pub type EditorByViewId = HashMap<String, Arc<DatabaseViewEditor>>;
|
||||
|
||||
pub struct DatabaseViews {
|
||||
#[allow(dead_code)]
|
||||
database: Arc<MutexDatabase>,
|
||||
database: Arc<RwLock<Database>>,
|
||||
cell_cache: CellCache,
|
||||
view_operation: Arc<dyn DatabaseViewOperation>,
|
||||
view_editors: Arc<RwLock<EditorByViewId>>,
|
||||
@ -25,7 +25,7 @@ pub struct DatabaseViews {
|
||||
|
||||
impl DatabaseViews {
|
||||
pub async fn new(
|
||||
database: Arc<MutexDatabase>,
|
||||
database: Arc<RwLock<Database>>,
|
||||
cell_cache: CellCache,
|
||||
view_operation: Arc<dyn DatabaseViewOperation>,
|
||||
view_editors: Arc<RwLock<EditorByViewId>>,
|
||||
@ -59,13 +59,10 @@ impl DatabaseViews {
|
||||
return Ok(editor.clone());
|
||||
}
|
||||
|
||||
let mut editor_map = self.view_editors.try_write().map_err(|err| {
|
||||
FlowyError::internal().with_context(format!(
|
||||
"fail to acquire the lock of editor_by_view_id: {}",
|
||||
err
|
||||
))
|
||||
})?;
|
||||
let database_id = self.database.lock().get_database_id();
|
||||
//FIXME: not thread-safe
|
||||
let mut editor_map = self.view_editors.write().await;
|
||||
let database_id = self.database.read().await.get_database_id();
|
||||
//FIXME: that method below is not Send+Sync
|
||||
let editor = Arc::new(
|
||||
DatabaseViewEditor::new(
|
||||
database_id,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
|
||||
use crate::entities::FieldType;
|
||||
use crate::services::database::DatabaseEditor;
|
||||
@ -11,14 +11,15 @@ pub async fn edit_field_type_option<T: TypeOption>(
|
||||
editor: Arc<DatabaseEditor>,
|
||||
action: impl FnOnce(&mut T),
|
||||
) -> FlowyResult<()> {
|
||||
let get_type_option = async {
|
||||
let field = editor.get_field(field_id)?;
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
field.get_type_option::<T>(field_type)
|
||||
};
|
||||
let field = editor
|
||||
.get_field(field_id)
|
||||
.await
|
||||
.ok_or_else(FlowyError::field_record_not_found)?;
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
let get_type_option = field.get_type_option::<T>(field_type);
|
||||
|
||||
if let Some(mut type_option) = get_type_option.await {
|
||||
if let Some(old_field) = editor.get_field(field_id) {
|
||||
if let Some(mut type_option) = get_type_option {
|
||||
if let Some(old_field) = editor.get_field(field_id).await {
|
||||
action(&mut type_option);
|
||||
let type_option_data = type_option.into();
|
||||
editor
|
||||
|
@ -35,7 +35,7 @@ impl From<TypeOptionData> for CheckboxTypeOption {
|
||||
|
||||
impl From<CheckboxTypeOption> for TypeOptionData {
|
||||
fn from(_data: CheckboxTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new().build()
|
||||
TypeOptionDataBuilder::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use bytes::Bytes;
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
@ -21,16 +21,16 @@ impl TypeOptionCellData for CheckboxCellDataPB {
|
||||
|
||||
impl From<&Cell> for CheckboxCellDataPB {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
let value = cell.get_str_value(CELL_DATA).unwrap_or_default();
|
||||
let value: String = cell.get_as(CELL_DATA).unwrap_or_default();
|
||||
CheckboxCellDataPB::from_str(&value).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CheckboxCellDataPB> for Cell {
|
||||
fn from(data: CheckboxCellDataPB) -> Self {
|
||||
new_cell_builder(FieldType::Checkbox)
|
||||
.insert_str_value(CELL_DATA, data.to_string())
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::Checkbox);
|
||||
cell.insert(CELL_DATA.into(), data.to_string().into());
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ impl From<TypeOptionData> for ChecklistTypeOption {
|
||||
|
||||
impl From<ChecklistTypeOption> for TypeOptionData {
|
||||
fn from(_data: ChecklistTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new().build()
|
||||
TypeOptionDataBuilder::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::entities::FieldType;
|
||||
use crate::services::field::{SelectOption, TypeOptionCellData, CELL_DATA};
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
@ -64,7 +64,7 @@ impl ChecklistCellData {
|
||||
impl From<&Cell> for ChecklistCellData {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
cell
|
||||
.get_str_value(CELL_DATA)
|
||||
.get_as::<String>(CELL_DATA)
|
||||
.map(|data| serde_json::from_str::<ChecklistCellData>(&data).unwrap_or_default())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
@ -73,9 +73,9 @@ impl From<&Cell> for ChecklistCellData {
|
||||
impl From<ChecklistCellData> for Cell {
|
||||
fn from(cell_data: ChecklistCellData) -> Self {
|
||||
let data = serde_json::to_string(&cell_data).unwrap_or_default();
|
||||
new_cell_builder(FieldType::Checklist)
|
||||
.insert_str_value(CELL_DATA, data)
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::Checklist);
|
||||
cell.insert(CELL_DATA.into(), data.into());
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ use std::str::FromStr;
|
||||
|
||||
use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, NaiveTime, Offset, TimeZone};
|
||||
use chrono_tz::Tz;
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::Cell;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -36,14 +36,14 @@ impl TypeOption for DateTypeOption {
|
||||
impl From<TypeOptionData> for DateTypeOption {
|
||||
fn from(data: TypeOptionData) -> Self {
|
||||
let date_format = data
|
||||
.get_i64_value("date_format")
|
||||
.get_as::<i64>("date_format")
|
||||
.map(DateFormat::from)
|
||||
.unwrap_or_default();
|
||||
let time_format = data
|
||||
.get_i64_value("time_format")
|
||||
.get_as::<i64>("time_format")
|
||||
.map(TimeFormat::from)
|
||||
.unwrap_or_default();
|
||||
let timezone_id = data.get_str_value("timezone_id").unwrap_or_default();
|
||||
let timezone_id: String = data.get_as("timezone_id").unwrap_or_default();
|
||||
Self {
|
||||
date_format,
|
||||
time_format,
|
||||
@ -54,11 +54,11 @@ impl From<TypeOptionData> for DateTypeOption {
|
||||
|
||||
impl From<DateTypeOption> for TypeOptionData {
|
||||
fn from(data: DateTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new()
|
||||
.insert_i64_value("date_format", data.date_format.value())
|
||||
.insert_i64_value("time_format", data.time_format.value())
|
||||
.insert_str_value("timezone_id", data.timezone_id)
|
||||
.build()
|
||||
TypeOptionDataBuilder::from([
|
||||
("date_format".into(), data.date_format.value().into()),
|
||||
("time_format".into(), data.time_format.value().into()),
|
||||
("timezone_id".into(), data.timezone_id.into()),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#![allow(clippy::upper_case_acronyms)]
|
||||
|
||||
use bytes::Bytes;
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
use serde::de::Visitor;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -58,14 +58,14 @@ impl TypeOptionCellData for DateCellData {
|
||||
impl From<&Cell> for DateCellData {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
let timestamp = cell
|
||||
.get_str_value(CELL_DATA)
|
||||
.get_as::<String>(CELL_DATA)
|
||||
.and_then(|data| data.parse::<i64>().ok());
|
||||
let end_timestamp = cell
|
||||
.get_str_value("end_timestamp")
|
||||
.get_as::<String>("end_timestamp")
|
||||
.and_then(|data| data.parse::<i64>().ok());
|
||||
let include_time = cell.get_bool_value("include_time").unwrap_or_default();
|
||||
let is_range = cell.get_bool_value("is_range").unwrap_or_default();
|
||||
let reminder_id = cell.get_str_value("reminder_id").unwrap_or_default();
|
||||
let include_time: bool = cell.get_as("include_time").unwrap_or_default();
|
||||
let is_range: bool = cell.get_as("is_range").unwrap_or_default();
|
||||
let reminder_id: String = cell.get_as("reminder_id").unwrap_or_default();
|
||||
|
||||
Self {
|
||||
timestamp,
|
||||
@ -101,13 +101,16 @@ impl From<&DateCellData> for Cell {
|
||||
};
|
||||
// Most of the case, don't use these keys in other places. Otherwise, we should define
|
||||
// constants for them.
|
||||
new_cell_builder(FieldType::DateTime)
|
||||
.insert_str_value(CELL_DATA, timestamp_string)
|
||||
.insert_str_value("end_timestamp", end_timestamp_string)
|
||||
.insert_bool_value("include_time", cell_data.include_time)
|
||||
.insert_bool_value("is_range", cell_data.is_range)
|
||||
.insert_str_value("reminder_id", cell_data.reminder_id.to_owned())
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::DateTime);
|
||||
cell.insert(CELL_DATA.into(), timestamp_string.into());
|
||||
cell.insert("end_timestamp".into(), end_timestamp_string.into());
|
||||
cell.insert("include_time".into(), cell_data.include_time.into());
|
||||
cell.insert("is_range".into(), cell_data.is_range.into());
|
||||
cell.insert(
|
||||
"reminder_id".into(),
|
||||
cell_data.reminder_id.to_owned().into(),
|
||||
);
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,14 +1,16 @@
|
||||
use collab::preclude::encoding::serde::from_any;
|
||||
use collab::preclude::Any;
|
||||
use collab::util::AnyMapExt;
|
||||
use std::cmp::Ordering;
|
||||
use std::default::Default;
|
||||
use std::str::FromStr;
|
||||
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
use fancy_regex::Regex;
|
||||
use lazy_static::lazy_static;
|
||||
use rust_decimal::Decimal;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
|
||||
@ -25,12 +27,24 @@ use crate::services::sort::SortCondition;
|
||||
// Number
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NumberTypeOption {
|
||||
#[serde(default, deserialize_with = "number_format_from_i64")]
|
||||
pub format: NumberFormat,
|
||||
#[serde(default)]
|
||||
pub scale: u32,
|
||||
#[serde(default)]
|
||||
pub symbol: String,
|
||||
#[serde(default)]
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
fn number_format_from_i64<'de, D>(deserializer: D) -> Result<NumberFormat, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = i64::deserialize(deserializer)?;
|
||||
Ok(NumberFormat::from(value))
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct NumberCellData(pub String);
|
||||
|
||||
@ -42,15 +56,15 @@ impl TypeOptionCellData for NumberCellData {
|
||||
|
||||
impl From<&Cell> for NumberCellData {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
Self(cell.get_str_value(CELL_DATA).unwrap_or_default())
|
||||
Self(cell.get_as(CELL_DATA).unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NumberCellData> for Cell {
|
||||
fn from(data: NumberCellData) -> Self {
|
||||
new_cell_builder(FieldType::Number)
|
||||
.insert_str_value(CELL_DATA, data.0)
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::Number);
|
||||
cell.insert(CELL_DATA.into(), data.0.into());
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
@ -75,30 +89,18 @@ impl TypeOption for NumberTypeOption {
|
||||
|
||||
impl From<TypeOptionData> for NumberTypeOption {
|
||||
fn from(data: TypeOptionData) -> Self {
|
||||
let format = data
|
||||
.get_i64_value("format")
|
||||
.map(NumberFormat::from)
|
||||
.unwrap_or_default();
|
||||
let scale = data.get_i64_value("scale").unwrap_or_default() as u32;
|
||||
let symbol = data.get_str_value("symbol").unwrap_or_default();
|
||||
let name = data.get_str_value("name").unwrap_or_default();
|
||||
Self {
|
||||
format,
|
||||
scale,
|
||||
symbol,
|
||||
name,
|
||||
}
|
||||
from_any(&Any::from(data)).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NumberTypeOption> for TypeOptionData {
|
||||
fn from(data: NumberTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new()
|
||||
.insert_i64_value("format", data.format.value())
|
||||
.insert_i64_value("scale", data.scale as i64)
|
||||
.insert_str_value("name", data.name)
|
||||
.insert_str_value("symbol", data.symbol)
|
||||
.build()
|
||||
TypeOptionDataBuilder::from([
|
||||
("format".into(), data.format.value().into()),
|
||||
("scale".into(), data.scale.into()),
|
||||
("name".into(), data.name.into()),
|
||||
("symbol".into(), data.symbol.into()),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use collab::util::AnyMapExt;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::Cell;
|
||||
use flowy_error::FlowyResult;
|
||||
@ -23,16 +23,14 @@ pub struct RelationTypeOption {
|
||||
|
||||
impl From<TypeOptionData> for RelationTypeOption {
|
||||
fn from(value: TypeOptionData) -> Self {
|
||||
let database_id = value.get_str_value("database_id").unwrap_or_default();
|
||||
let database_id: String = value.get_as("database_id").unwrap_or_default();
|
||||
Self { database_id }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RelationTypeOption> for TypeOptionData {
|
||||
fn from(value: RelationTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new()
|
||||
.insert_str_value("database_id", value.database_id)
|
||||
.build()
|
||||
TypeOptionDataBuilder::from([("database_id".into(), value.database_id.into())])
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,7 +55,7 @@ impl CellDataChangeset for RelationTypeOption {
|
||||
return Ok(((&cell_data).into(), cell_data));
|
||||
}
|
||||
|
||||
let cell_data: RelationCellData = cell.unwrap().as_ref().into();
|
||||
let cell_data: RelationCellData = cell.as_ref().unwrap().into();
|
||||
let mut row_ids = cell_data.row_ids.clone();
|
||||
for inserted in changeset.inserted_row_ids.iter() {
|
||||
if !row_ids.iter().any(|row_id| row_id == inserted) {
|
||||
|
@ -40,9 +40,9 @@ impl From<&RelationCellData> for Cell {
|
||||
.map(|id| Any::String(Arc::from(id.to_string())))
|
||||
.collect::<Vec<_>>(),
|
||||
));
|
||||
new_cell_builder(FieldType::Relation)
|
||||
.insert_any(CELL_DATA, data)
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::Relation);
|
||||
cell.insert(CELL_DATA.into(), data);
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use collab::util::AnyMapExt;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::Cell;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -33,8 +33,8 @@ impl TypeOption for MultiSelectTypeOption {
|
||||
impl From<TypeOptionData> for MultiSelectTypeOption {
|
||||
fn from(data: TypeOptionData) -> Self {
|
||||
data
|
||||
.get_str_value("content")
|
||||
.map(|s| serde_json::from_str::<MultiSelectTypeOption>(&s).unwrap_or_default())
|
||||
.get_as::<String>("content")
|
||||
.map(|json| serde_json::from_str::<MultiSelectTypeOption>(&json).unwrap_or_default())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
@ -42,9 +42,7 @@ impl From<TypeOptionData> for MultiSelectTypeOption {
|
||||
impl From<MultiSelectTypeOption> for TypeOptionData {
|
||||
fn from(data: MultiSelectTypeOption) -> Self {
|
||||
let content = serde_json::to_string(&data).unwrap_or_default();
|
||||
TypeOptionDataBuilder::new()
|
||||
.insert_str_value("content", content)
|
||||
.build()
|
||||
TypeOptionDataBuilder::from([("content".into(), content.into())])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use collab::util::AnyMapExt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
|
||||
use flowy_error::FlowyError;
|
||||
@ -26,9 +26,9 @@ impl SelectOptionIds {
|
||||
self.0
|
||||
}
|
||||
pub fn to_cell_data(&self, field_type: FieldType) -> Cell {
|
||||
new_cell_builder(field_type)
|
||||
.insert_str_value(CELL_DATA, self.to_string())
|
||||
.build()
|
||||
let mut cell = new_cell_builder(field_type);
|
||||
cell.insert(CELL_DATA.into(), self.to_string().into());
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
@ -40,7 +40,7 @@ impl TypeOptionCellData for SelectOptionIds {
|
||||
|
||||
impl From<&Cell> for SelectOptionIds {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
let value = cell.get_str_value(CELL_DATA).unwrap_or_default();
|
||||
let value: String = cell.get_as(CELL_DATA).unwrap_or_default();
|
||||
Self::from_str(&value).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use crate::services::field::{
|
||||
SelectOptionCellChangeset, SelectOptionIds, SelectTypeOptionSharedAction,
|
||||
};
|
||||
use crate::services::sort::SortCondition;
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::Cell;
|
||||
use flowy_error::FlowyResult;
|
||||
@ -32,7 +32,7 @@ impl TypeOption for SingleSelectTypeOption {
|
||||
impl From<TypeOptionData> for SingleSelectTypeOption {
|
||||
fn from(data: TypeOptionData) -> Self {
|
||||
data
|
||||
.get_str_value("content")
|
||||
.get_as::<String>("content")
|
||||
.map(|s| serde_json::from_str::<SingleSelectTypeOption>(&s).unwrap_or_default())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
@ -41,9 +41,7 @@ impl From<TypeOptionData> for SingleSelectTypeOption {
|
||||
impl From<SingleSelectTypeOption> for TypeOptionData {
|
||||
fn from(data: SingleSelectTypeOption) -> Self {
|
||||
let content = serde_json::to_string(&data).unwrap_or_default();
|
||||
TypeOptionDataBuilder::new()
|
||||
.insert_str_value("content", content)
|
||||
.build()
|
||||
TypeOptionDataBuilder::from([("content".into(), content.into())])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@ use crate::services::field::{
|
||||
TypeOptionCellDataSerde, TypeOptionTransform,
|
||||
};
|
||||
use crate::services::sort::SortCondition;
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::Cell;
|
||||
use flowy_error::FlowyResult;
|
||||
@ -20,16 +20,14 @@ pub struct SummarizationTypeOption {
|
||||
|
||||
impl From<TypeOptionData> for SummarizationTypeOption {
|
||||
fn from(value: TypeOptionData) -> Self {
|
||||
let auto_fill = value.get_bool_value("auto_fill").unwrap_or_default();
|
||||
let auto_fill: bool = value.get_as("auto_fill").unwrap_or_default();
|
||||
Self { auto_fill }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SummarizationTypeOption> for TypeOptionData {
|
||||
fn from(value: SummarizationTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new()
|
||||
.insert_bool_value("auto_fill", value.auto_fill)
|
||||
.build()
|
||||
TypeOptionDataBuilder::from([("auto_fill".into(), value.auto_fill.into())])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::entities::FieldType;
|
||||
use crate::services::field::{TypeOptionCellData, CELL_DATA};
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
|
||||
#[derive(Default, Debug, Clone)]
|
||||
@ -21,15 +21,15 @@ impl TypeOptionCellData for SummaryCellData {
|
||||
|
||||
impl From<&Cell> for SummaryCellData {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
Self(cell.get_str_value(CELL_DATA).unwrap_or_default())
|
||||
Self(cell.get_as::<String>(CELL_DATA).unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SummaryCellData> for Cell {
|
||||
fn from(data: SummaryCellData) -> Self {
|
||||
new_cell_builder(FieldType::Summary)
|
||||
.insert_str_value(CELL_DATA, data.0)
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::Summary);
|
||||
cell.insert(CELL_DATA.into(), data.0.into());
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use collab::util::AnyMapExt;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab_database::fields::{Field, TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -33,16 +33,15 @@ impl TypeOption for RichTextTypeOption {
|
||||
|
||||
impl From<TypeOptionData> for RichTextTypeOption {
|
||||
fn from(data: TypeOptionData) -> Self {
|
||||
let s = data.get_str_value(CELL_DATA).unwrap_or_default();
|
||||
Self { inner: s }
|
||||
Self {
|
||||
inner: data.get_as(CELL_DATA).unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RichTextTypeOption> for TypeOptionData {
|
||||
fn from(data: RichTextTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new()
|
||||
.insert_str_value(CELL_DATA, data.inner)
|
||||
.build()
|
||||
TypeOptionDataBuilder::from([(CELL_DATA.into(), data.inner.into())])
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,15 +163,15 @@ impl TypeOptionCellData for StringCellData {
|
||||
|
||||
impl From<&Cell> for StringCellData {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
Self(cell.get_str_value(CELL_DATA).unwrap_or_default())
|
||||
Self(cell.get_as(CELL_DATA).unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StringCellData> for Cell {
|
||||
fn from(data: StringCellData) -> Self {
|
||||
new_cell_builder(FieldType::RichText)
|
||||
.insert_str_value(CELL_DATA, data.0)
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::RichText);
|
||||
cell.insert(CELL_DATA.into(), data.0.into());
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ impl From<TypeOptionData> for TimeTypeOption {
|
||||
|
||||
impl From<TimeTypeOption> for TypeOptionData {
|
||||
fn from(_data: TimeTypeOption) -> Self {
|
||||
TypeOptionDataBuilder::new().build()
|
||||
TypeOptionDataBuilder::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::entities::FieldType;
|
||||
use crate::services::field::{TypeOptionCellData, CELL_DATA};
|
||||
use collab::core::any_map::AnyMapExtension;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::rows::{new_cell_builder, Cell};
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
@ -16,7 +16,7 @@ impl From<&Cell> for TimeCellData {
|
||||
fn from(cell: &Cell) -> Self {
|
||||
Self(
|
||||
cell
|
||||
.get_str_value(CELL_DATA)
|
||||
.get_as::<String>(CELL_DATA)
|
||||
.and_then(|data| data.parse::<i64>().ok()),
|
||||
)
|
||||
}
|
||||
@ -40,8 +40,8 @@ impl ToString for TimeCellData {
|
||||
|
||||
impl From<&TimeCellData> for Cell {
|
||||
fn from(data: &TimeCellData) -> Self {
|
||||
new_cell_builder(FieldType::Time)
|
||||
.insert_str_value(CELL_DATA, data.to_string())
|
||||
.build()
|
||||
let mut cell = new_cell_builder(FieldType::Time);
|
||||
cell.insert(CELL_DATA.into(), data.to_string().into());
|
||||
cell
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user