feat: migrate user data to cloud (#3078)

* refactor: weak passed-in params in handler

* refactor: rename struct

* chore: update tables

* chore: update schema

* chore: add permission

* chore: update tables

* chore: support transaction mode

* chore: workspace database id

* chore: add user workspace

* feat: return list of workspaces

* chore: add user to workspace

* feat: separate database row table

* refactor: update schema

* chore: partition table

* chore: use transaction

* refactor: dir

* refactor: collab db ref

* fix: collab db lock

* chore: rename files

* chore: add tables descriptions

* chore: update readme

* docs: update documentation

* chore: rename crate

* chore: update ref

* chore: update tests

* chore: update tests

* refactor: crate deps

* chore: update crate ref

* chore: remove unused deps

* chore: remove unused deps

* chore: update collab crate refs

* chore: replace client with transaction in pooler

* refactor: return error type

* refactor: use anyhow error in deps

* feat: supabase postgrest user signin (wip)

* fix: Cargo.toml source git deps, changed Error to anyhow::Error

* fix: uuid serialization

* chore: fix conflict

* chore: extend the response

* feat: add implementation place holders

* feat: impl get_user_workspaces

* feat: impl get_user_profile

* test: create workspace

* fix: postgrest: field names and alias

* chore: implement folder restful api

* chore: implement collab storate with restful api

* feat: added placeholders for impl: update_user_profile, check_user

* feat: impl: update_user_profile

* feat: impl: check_user

* fix: use UidResponse, add more debug info for serde serialization error

* fix: get_user_profile: use Optional<UserProfileResponse>

* chore: imple init sync

* chore: support soft delete

* feat: postgresql: add migration test

* feat: postgresql migration test: added UID display and colored output

* feat: postgresql migration test: workspace role

* feat: postgresql migration test: create shared common utils

* feat: postgresql migration test: fixed shebang

* chore: add flush_collab_update pg function

* chore: implement datbaase and document restful api

* chore: migrate to use restful api

* chore: update table schema

* chore: fix tests

* chore: remove unused code

* chore: format code

* chore: remove unused env

* fix: tauri build

* fix: tauri build

---------

Co-authored-by: Fu Zi Xiang <speed2exe@live.com.sg>
This commit is contained in:
Nathan.fooo
2023-07-29 09:46:24 +08:00
committed by GitHub
parent a885170869
commit 2cd88594e8
179 changed files with 4999 additions and 5314 deletions

View File

@ -1,9 +1,9 @@
import 'dart:async'; import 'dart:async';
import 'package:appflowy_backend/protobuf/flowy-user/user_setting.pb.dart';
import 'package:connectivity_plus/connectivity_plus.dart'; import 'package:connectivity_plus/connectivity_plus.dart';
import 'package:appflowy_backend/log.dart'; import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart'; import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-net/network_state.pb.dart';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
class NetworkListener { class NetworkListener {
@ -46,12 +46,12 @@ class NetworkListener {
return NetworkTypePB.VPN; return NetworkTypePB.VPN;
case ConnectivityResult.none: case ConnectivityResult.none:
case ConnectivityResult.other: case ConnectivityResult.other:
return NetworkTypePB.Unknown; return NetworkTypePB.NetworkUnknown;
} }
}(); }();
Log.info("Network type: $networkType"); Log.info("Network type: $networkType");
final state = NetworkStatePB.create()..ty = networkType; final state = NetworkStatePB.create()..ty = networkType;
NetworkEventUpdateNetworkType(state).send().then((result) { UserEventUpdateNetworkState(state).send().then((result) {
result.fold( result.fold(
(l) {}, (l) {},
(e) => Log.error(e), (e) => Log.error(e),

View File

@ -28,53 +28,13 @@ abstract class Env {
defaultValue: '', defaultValue: '',
) )
static final String supabaseAnonKey = _Env.supabaseAnonKey; static final String supabaseAnonKey = _Env.supabaseAnonKey;
@EnviedField(
obfuscate: true,
varName: 'SUPABASE_KEY',
defaultValue: '',
)
static final String supabaseKey = _Env.supabaseKey;
@EnviedField( @EnviedField(
obfuscate: true, obfuscate: true,
varName: 'SUPABASE_JWT_SECRET', varName: 'SUPABASE_JWT_SECRET',
defaultValue: '', defaultValue: '',
) )
static final String supabaseJwtSecret = _Env.supabaseJwtSecret; static final String supabaseJwtSecret = _Env.supabaseJwtSecret;
@EnviedField(
obfuscate: true,
varName: 'SUPABASE_DB',
defaultValue: '',
)
static final String supabaseDb = _Env.supabaseDb;
@EnviedField(
obfuscate: true,
varName: 'SUPABASE_DB_USER',
defaultValue: '',
)
static final String supabaseDbUser = _Env.supabaseDbUser;
@EnviedField(
obfuscate: true,
varName: 'SUPABASE_DB_PASSWORD',
defaultValue: '',
)
static final String supabaseDbPassword = _Env.supabaseDbPassword;
@EnviedField(
obfuscate: true,
varName: 'SUPABASE_DB_PORT',
defaultValue: '5432',
)
static final String supabaseDbPort = _Env.supabaseDbPort;
@EnviedField(
obfuscate: true,
varName: 'ENABLE_SUPABASE_SYNC',
defaultValue: true,
)
static final bool enableSupabaseSync = _Env.enableSupabaseSync;
} }
bool get isSupabaseEnable => false; bool get isSupabaseEnable => false;

View File

@ -30,19 +30,11 @@ class InitRustSDKTask extends LaunchTask {
} }
AppFlowyEnv getAppFlowyEnv() { AppFlowyEnv getAppFlowyEnv() {
final postgresConfig = PostgresConfiguration(
url: Env.supabaseDb,
password: Env.supabaseDbPassword,
port: int.parse(Env.supabaseDbPort),
user_name: Env.supabaseDbUser,
);
final supabaseConfig = SupabaseConfiguration( final supabaseConfig = SupabaseConfiguration(
enable_sync: Env.enableSupabaseSync, enable_sync: true,
url: Env.supabaseUrl, url: Env.supabaseUrl,
key: Env.supabaseKey, anon_key: Env.supabaseAnonKey,
jwt_secret: Env.supabaseJwtSecret, jwt_secret: Env.supabaseJwtSecret,
postgres_config: postgresConfig,
); );
return AppFlowyEnv( return AppFlowyEnv(

View File

@ -145,10 +145,9 @@ class SupabaseAuthService implements AuthService {
Future<void> signOut({ Future<void> signOut({
AuthTypePB authType = AuthTypePB.Supabase, AuthTypePB authType = AuthTypePB.Supabase,
}) async { }) async {
if (!isSupabaseEnable) { if (isSupabaseEnable) {
return _appFlowyAuthService.signOut(); await _auth.signOut();
} }
await _auth.signOut();
await _appFlowyAuthService.signOut( await _appFlowyAuthService.signOut(
authType: authType, authType: authType,
); );

View File

@ -125,7 +125,7 @@ class SignInBloc extends Bloc<SignInEvent, SignInState> {
} }
SignInState stateFromCode(FlowyError error) { SignInState stateFromCode(FlowyError error) {
switch (ErrorCode.valueOf(error.code)!) { switch (ErrorCode.valueOf(error.code)) {
case ErrorCode.EmailFormatInvalid: case ErrorCode.EmailFormatInvalid:
return state.copyWith( return state.copyWith(
isSubmitting: false, isSubmitting: false,

View File

@ -1,4 +1,5 @@
import 'package:appflowy/plugins/document/presentation/more/cubit/document_appearance_cubit.dart'; import 'package:appflowy/plugins/document/presentation/more/cubit/document_appearance_cubit.dart';
import 'package:appflowy/startup/entry_point.dart';
import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/util/color_generator/color_generator.dart'; import 'package:appflowy/util/color_generator/color_generator.dart';
import 'package:appflowy/workspace/application/menu/menu_user_bloc.dart'; import 'package:appflowy/workspace/application/menu/menu_user_bloc.dart';
@ -87,7 +88,8 @@ class MenuUser extends StatelessWidget {
} }
Widget _renderUserName(BuildContext context) { Widget _renderUserName(BuildContext context) {
final String name = userName(context.read<MenuUserBloc>().state.userProfile); final String name =
userName(context.read<MenuUserBloc>().state.userProfile);
return FlowyText.medium( return FlowyText.medium(
name, name,
overflow: TextOverflow.ellipsis, overflow: TextOverflow.ellipsis,
@ -106,7 +108,17 @@ class MenuUser extends StatelessWidget {
builder: (context) { builder: (context) {
return BlocProvider<DocumentAppearanceCubit>.value( return BlocProvider<DocumentAppearanceCubit>.value(
value: BlocProvider.of<DocumentAppearanceCubit>(context), value: BlocProvider.of<DocumentAppearanceCubit>(context),
child: SettingsDialog(userProfile), child: SettingsDialog(
userProfile,
didLogout: () async {
Navigator.of(context).pop();
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
);
},
dismissDialog: () => Navigator.of(context).pop(),
),
); );
}, },
); );

View File

@ -18,8 +18,15 @@ const _dialogHorizontalPadding = EdgeInsets.symmetric(horizontal: 12);
const _contentInsetPadding = EdgeInsets.fromLTRB(0.0, 12.0, 0.0, 16.0); const _contentInsetPadding = EdgeInsets.fromLTRB(0.0, 12.0, 0.0, 16.0);
class SettingsDialog extends StatelessWidget { class SettingsDialog extends StatelessWidget {
final VoidCallback dismissDialog;
final VoidCallback didLogout;
final UserProfilePB user; final UserProfilePB user;
SettingsDialog(this.user, {Key? key}) : super(key: ValueKey(user.id)); SettingsDialog(
this.user, {
required this.dismissDialog,
required this.didLogout,
Key? key,
}) : super(key: ValueKey(user.id));
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
@ -86,7 +93,11 @@ class SettingsDialog extends StatelessWidget {
case SettingsPage.files: case SettingsPage.files:
return const SettingsFileSystemView(); return const SettingsFileSystemView();
case SettingsPage.user: case SettingsPage.user:
return SettingsUserView(user); return SettingsUserView(
user,
didLogin: () => dismissDialog(),
didLogout: didLogout,
);
case SettingsPage.supabaseSetting: case SettingsPage.supabaseSetting:
return const SupabaseSettingView(); return const SupabaseSettingView();
case SettingsPage.shortcuts: case SettingsPage.shortcuts:

View File

@ -1,3 +1,5 @@
import 'package:appflowy/startup/entry_point.dart';
import 'package:appflowy/startup/launch_configuration.dart';
import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/user/application/sign_in_bloc.dart'; import 'package:appflowy/user/application/sign_in_bloc.dart';
import 'package:appflowy/user/presentation/sign_in_screen.dart'; import 'package:appflowy/user/presentation/sign_in_screen.dart';
@ -9,7 +11,8 @@ import 'package:flutter/widgets.dart';
import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_bloc/flutter_bloc.dart';
class SettingThirdPartyLogin extends StatelessWidget { class SettingThirdPartyLogin extends StatelessWidget {
const SettingThirdPartyLogin({super.key}); final VoidCallback didLogin;
const SettingThirdPartyLogin({required this.didLogin, super.key});
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
@ -27,13 +30,20 @@ class SettingThirdPartyLogin extends StatelessWidget {
); );
} }
void _handleSuccessOrFail( Future<void> _handleSuccessOrFail(
Either<UserProfilePB, FlowyError> result, Either<UserProfilePB, FlowyError> result,
BuildContext context, BuildContext context,
) { ) async {
result.fold( result.fold(
(user) { (user) async {
// TODO(Lucas): push to home screen didLogin();
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
config: const LaunchConfiguration(
autoRegistrationSupported: true,
),
);
}, },
(error) => showSnapBar(context, error.msg), (error) => showSnapBar(context, error.msg),
); );

View File

@ -3,11 +3,11 @@ import 'dart:async';
import 'package:appflowy/env/env.dart'; import 'package:appflowy/env/env.dart';
import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/startup/entry_point.dart';
import 'package:appflowy/startup/startup.dart'; import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/user/application/auth/auth_service.dart'; import 'package:appflowy/user/application/auth/auth_service.dart';
import 'package:appflowy/util/debounce.dart'; import 'package:appflowy/util/debounce.dart';
import 'package:appflowy/workspace/application/user/settings_user_bloc.dart'; import 'package:appflowy/workspace/application/user/settings_user_bloc.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'; import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:easy_localization/easy_localization.dart'; import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra/image.dart'; import 'package:flowy_infra/image.dart';
@ -22,8 +22,15 @@ const defaultUserAvatar = '1F600';
const _iconSize = Size(60, 60); const _iconSize = Size(60, 60);
class SettingsUserView extends StatelessWidget { class SettingsUserView extends StatelessWidget {
final VoidCallback didLogin;
final VoidCallback didLogout;
final UserProfilePB user; final UserProfilePB user;
SettingsUserView(this.user, {Key? key}) : super(key: ValueKey(user.id)); SettingsUserView(
this.user, {
required this.didLogin,
required this.didLogout,
Key? key,
}) : super(key: ValueKey(user.id));
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
@ -58,7 +65,9 @@ class SettingsUserView extends StatelessWidget {
} }
if (state.userProfile.authType == AuthTypePB.Local) { if (state.userProfile.authType == AuthTypePB.Local) {
return const SettingThirdPartyLogin(); return SettingThirdPartyLogin(
didLogin: didLogin,
);
} else { } else {
return _renderLogoutButton(context); return _renderLogoutButton(context);
} }
@ -88,15 +97,17 @@ class SettingsUserView extends StatelessWidget {
Widget _renderLogoutButton(BuildContext context) { Widget _renderLogoutButton(BuildContext context) {
return FlowyButton( return FlowyButton(
useIntrinsicWidth: true, useIntrinsicWidth: true,
text: const FlowyText( text: FlowyText(
'Logout', LocaleKeys.settings_menu_logout.tr(),
), ),
onTap: () async { onTap: () async {
await getIt<AuthService>().signOut(); NavigatorAlertDialog(
await FlowyRunner.run( title: LocaleKeys.settings_menu_logoutPrompt.tr(),
FlowyApp(), confirm: () async {
integrationEnv(), await getIt<AuthService>().signOut();
); didLogout();
},
).show(context);
}, },
); );
} }

View File

@ -4,7 +4,6 @@ import 'package:appflowy_backend/log.dart';
// ignore: unnecessary_import // ignore: unnecessary_import
import 'package:appflowy_backend/protobuf/dart-ffi/ffi_response.pb.dart'; import 'package:appflowy_backend/protobuf/dart-ffi/ffi_response.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-net/network_state.pb.dart';
import 'package:isolates/isolates.dart'; import 'package:isolates/isolates.dart';
import 'package:isolates/ports.dart'; import 'package:isolates/ports.dart';
import 'package:ffi/ffi.dart'; import 'package:ffi/ffi.dart';
@ -23,11 +22,9 @@ import 'package:protobuf/protobuf.dart';
import 'dart:convert' show utf8; import 'dart:convert' show utf8;
import '../protobuf/flowy-config/entities.pb.dart'; import '../protobuf/flowy-config/entities.pb.dart';
import '../protobuf/flowy-config/event_map.pb.dart'; import '../protobuf/flowy-config/event_map.pb.dart';
import '../protobuf/flowy-net/event_map.pb.dart';
import 'error.dart'; import 'error.dart';
part 'dart_event/flowy-folder2/dart_event.dart'; part 'dart_event/flowy-folder2/dart_event.dart';
part 'dart_event/flowy-net/dart_event.dart';
part 'dart_event/flowy-user/dart_event.dart'; part 'dart_event/flowy-user/dart_event.dart';
part 'dart_event/flowy-database2/dart_event.dart'; part 'dart_event/flowy-database2/dart_event.dart';
part 'dart_event/flowy-document2/dart_event.dart'; part 'dart_event/flowy-document2/dart_event.dart';

View File

@ -26,16 +26,14 @@ class SupabaseConfiguration {
/// Indicates whether the sync feature is enabled. /// Indicates whether the sync feature is enabled.
final bool enable_sync; final bool enable_sync;
final String url; final String url;
final String key; final String anon_key;
final String jwt_secret; final String jwt_secret;
final PostgresConfiguration postgres_config;
SupabaseConfiguration({ SupabaseConfiguration({
this.enable_sync = true, this.enable_sync = true,
required this.url, required this.url,
required this.key, required this.anon_key,
required this.jwt_secret, required this.jwt_secret,
required this.postgres_config,
}); });
factory SupabaseConfiguration.fromJson(Map<String, dynamic> json) => factory SupabaseConfiguration.fromJson(Map<String, dynamic> json) =>
@ -43,23 +41,3 @@ class SupabaseConfiguration {
Map<String, dynamic> toJson() => _$SupabaseConfigurationToJson(this); Map<String, dynamic> toJson() => _$SupabaseConfigurationToJson(this);
} }
@JsonSerializable()
class PostgresConfiguration {
final String url;
final String user_name;
final String password;
final int port;
PostgresConfiguration({
required this.url,
required this.user_name,
required this.password,
required this.port,
});
factory PostgresConfiguration.fromJson(Map<String, dynamic> json) =>
_$PostgresConfigurationFromJson(json);
Map<String, dynamic> toJson() => _$PostgresConfigurationToJson(this);
}

View File

@ -21,10 +21,8 @@ SupabaseConfiguration _$SupabaseConfigurationFromJson(
SupabaseConfiguration( SupabaseConfiguration(
enable_sync: json['enable_sync'] as bool? ?? true, enable_sync: json['enable_sync'] as bool? ?? true,
url: json['url'] as String, url: json['url'] as String,
key: json['key'] as String, anon_key: json['anon_key'] as String,
jwt_secret: json['jwt_secret'] as String, jwt_secret: json['jwt_secret'] as String,
postgres_config: PostgresConfiguration.fromJson(
json['postgres_config'] as Map<String, dynamic>),
); );
Map<String, dynamic> _$SupabaseConfigurationToJson( Map<String, dynamic> _$SupabaseConfigurationToJson(
@ -32,25 +30,6 @@ Map<String, dynamic> _$SupabaseConfigurationToJson(
<String, dynamic>{ <String, dynamic>{
'enable_sync': instance.enable_sync, 'enable_sync': instance.enable_sync,
'url': instance.url, 'url': instance.url,
'key': instance.key, 'anon_key': instance.anon_key,
'jwt_secret': instance.jwt_secret, 'jwt_secret': instance.jwt_secret,
'postgres_config': instance.postgres_config,
};
PostgresConfiguration _$PostgresConfigurationFromJson(
Map<String, dynamic> json) =>
PostgresConfiguration(
url: json['url'] as String,
user_name: json['user_name'] as String,
password: json['password'] as String,
port: json['port'] as int,
);
Map<String, dynamic> _$PostgresConfigurationToJson(
PostgresConfiguration instance) =>
<String, dynamic>{
'url': instance.url,
'user_name': instance.user_name,
'password': instance.password,
'port': instance.port,
}; };

View File

@ -105,7 +105,7 @@ checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]] [[package]]
name = "appflowy-integrate" name = "appflowy-integrate"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
@ -642,15 +642,6 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" checksum = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a"
[[package]]
name = "block-buffer"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
dependencies = [
"generic-array",
]
[[package]] [[package]]
name = "block-buffer" name = "block-buffer"
version = "0.10.4" version = "0.10.4"
@ -1030,7 +1021,7 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -1048,7 +1039,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-client-ws" name = "collab-client-ws"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"bytes", "bytes",
"collab-sync", "collab-sync",
@ -1059,14 +1050,14 @@ dependencies = [
"tokio", "tokio",
"tokio-retry", "tokio-retry",
"tokio-stream", "tokio-stream",
"tokio-tungstenite 0.18.0", "tokio-tungstenite",
"tracing", "tracing",
] ]
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1093,7 +1084,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-derive" name = "collab-derive"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1105,7 +1096,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
@ -1124,7 +1115,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"chrono", "chrono",
@ -1144,7 +1135,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-persistence" name = "collab-persistence"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"bincode", "bincode",
"chrono", "chrono",
@ -1164,30 +1155,26 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
"aws-config", "aws-config",
"aws-credential-types", "aws-credential-types",
"aws-sdk-dynamodb", "aws-sdk-dynamodb",
"base64 0.21.2",
"collab", "collab",
"collab-client-ws", "collab-client-ws",
"collab-persistence", "collab-persistence",
"collab-sync", "collab-sync",
"futures-util", "futures-util",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"postgrest",
"rand 0.8.5", "rand 0.8.5",
"refinery",
"rusoto_credential", "rusoto_credential",
"serde", "serde",
"serde_json", "serde_json",
"similar 2.2.1", "similar 2.2.1",
"thiserror", "thiserror",
"tokio", "tokio",
"tokio-postgres",
"tokio-retry", "tokio-retry",
"tokio-stream", "tokio-stream",
"tracing", "tracing",
@ -1198,7 +1185,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-sync" name = "collab-sync"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f420738#f4207385738961a9aa4ea871731de204dfee8455" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=12811d#12811d26a96330f6c1acaa8815f1d8d61ca3aa61"
dependencies = [ dependencies = [
"bytes", "bytes",
"collab", "collab",
@ -1483,40 +1470,6 @@ dependencies = [
"parking_lot_core 0.9.8", "parking_lot_core 0.9.8",
] ]
[[package]]
name = "deadpool"
version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e"
dependencies = [
"async-trait",
"deadpool-runtime",
"num_cpus",
"retain_mut",
"tokio",
]
[[package]]
name = "deadpool-postgres"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "836a24a9d49deefe610b8b60c767a7412e9a931d79a89415cd2d2d71630ca8d7"
dependencies = [
"deadpool",
"log",
"tokio",
"tokio-postgres",
]
[[package]]
name = "deadpool-runtime"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1"
dependencies = [
"tokio",
]
[[package]] [[package]]
name = "derivative" name = "derivative"
version = "2.2.0" version = "2.2.0"
@ -1579,22 +1532,13 @@ dependencies = [
"migrations_macros", "migrations_macros",
] ]
[[package]]
name = "digest"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
dependencies = [
"generic-array",
]
[[package]] [[package]]
name = "digest" name = "digest"
version = "0.10.7" version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [ dependencies = [
"block-buffer 0.10.4", "block-buffer",
"crypto-common", "crypto-common",
"subtle", "subtle",
] ]
@ -1853,12 +1797,10 @@ dependencies = [
name = "flowy-config" name = "flowy-config"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"appflowy-integrate",
"bytes", "bytes",
"flowy-codegen", "flowy-codegen",
"flowy-derive", "flowy-derive",
"flowy-error", "flowy-error",
"flowy-server",
"flowy-sqlite", "flowy-sqlite",
"lib-dispatch", "lib-dispatch",
"protobuf", "protobuf",
@ -1873,9 +1815,12 @@ dependencies = [
"bytes", "bytes",
"diesel", "diesel",
"flowy-config", "flowy-config",
"flowy-database-deps",
"flowy-database2", "flowy-database2",
"flowy-document-deps",
"flowy-document2", "flowy-document2",
"flowy-error", "flowy-error",
"flowy-folder-deps",
"flowy-folder2", "flowy-folder2",
"flowy-net", "flowy-net",
"flowy-server", "flowy-server",
@ -1883,11 +1828,11 @@ dependencies = [
"flowy-sqlite", "flowy-sqlite",
"flowy-task", "flowy-task",
"flowy-user", "flowy-user",
"flowy-user-deps",
"futures-core", "futures-core",
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
"lib-log", "lib-log",
"lib-ws",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"serde", "serde",
"serde_json", "serde_json",
@ -1897,6 +1842,16 @@ dependencies = [
"uuid", "uuid",
] ]
[[package]]
name = "flowy-database-deps"
version = "0.1.0"
dependencies = [
"anyhow",
"collab-plugins",
"flowy-error",
"lib-infra",
]
[[package]] [[package]]
name = "flowy-database2" name = "flowy-database2"
version = "0.1.0" version = "0.1.0"
@ -1914,6 +1869,7 @@ dependencies = [
"dashmap", "dashmap",
"fancy-regex 0.10.0", "fancy-regex 0.10.0",
"flowy-codegen", "flowy-codegen",
"flowy-database-deps",
"flowy-derive", "flowy-derive",
"flowy-error", "flowy-error",
"flowy-notification", "flowy-notification",
@ -1954,6 +1910,16 @@ dependencies = [
"walkdir", "walkdir",
] ]
[[package]]
name = "flowy-document-deps"
version = "0.1.0"
dependencies = [
"anyhow",
"collab-document",
"flowy-error",
"lib-infra",
]
[[package]] [[package]]
name = "flowy-document2" name = "flowy-document2"
version = "0.1.0" version = "0.1.0"
@ -1965,6 +1931,7 @@ dependencies = [
"collab-document", "collab-document",
"flowy-codegen", "flowy-codegen",
"flowy-derive", "flowy-derive",
"flowy-document-deps",
"flowy-error", "flowy-error",
"flowy-notification", "flowy-notification",
"futures", "futures",
@ -1976,7 +1943,6 @@ dependencies = [
"protobuf", "protobuf",
"serde", "serde",
"serde_json", "serde_json",
"strum",
"strum_macros", "strum_macros",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
@ -1995,7 +1961,6 @@ dependencies = [
"flowy-codegen", "flowy-codegen",
"flowy-derive", "flowy-derive",
"flowy-sqlite", "flowy-sqlite",
"http-error-code",
"lib-dispatch", "lib-dispatch",
"protobuf", "protobuf",
"r2d2", "r2d2",
@ -2004,6 +1969,18 @@ dependencies = [
"serde_json", "serde_json",
"serde_repr", "serde_repr",
"thiserror", "thiserror",
"tokio-postgres",
]
[[package]]
name = "flowy-folder-deps"
version = "0.1.0"
dependencies = [
"anyhow",
"collab-folder",
"flowy-error",
"lib-infra",
"uuid",
] ]
[[package]] [[package]]
@ -2018,6 +1995,7 @@ dependencies = [
"flowy-codegen", "flowy-codegen",
"flowy-derive", "flowy-derive",
"flowy-error", "flowy-error",
"flowy-folder-deps",
"flowy-notification", "flowy-notification",
"lazy_static", "lazy_static",
"lib-dispatch", "lib-dispatch",
@ -2025,7 +2003,6 @@ dependencies = [
"nanoid", "nanoid",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"protobuf", "protobuf",
"strum",
"strum_macros", "strum_macros",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
@ -2038,15 +2015,10 @@ dependencies = [
name = "flowy-net" name = "flowy-net"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"bytes", "bytes",
"flowy-codegen", "flowy-codegen",
"flowy-derive",
"flowy-error",
"lib-dispatch", "lib-dispatch",
"protobuf", "protobuf",
"strum_macros",
"thiserror",
"tracing", "tracing",
] ]
@ -2069,36 +2041,32 @@ name = "flowy-server"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-integrate",
"async-stream",
"bytes", "bytes",
"chrono", "chrono",
"collab",
"collab-document", "collab-document",
"collab-folder", "collab-plugins",
"config", "config",
"deadpool-postgres", "flowy-database-deps",
"flowy-database2", "flowy-document-deps",
"flowy-document2",
"flowy-error", "flowy-error",
"flowy-folder2", "flowy-folder-deps",
"flowy-server-config", "flowy-server-config",
"flowy-user", "flowy-user-deps",
"futures", "futures",
"futures-util", "futures-util",
"hex",
"hyper", "hyper",
"lazy_static", "lazy_static",
"lib-infra", "lib-infra",
"nanoid",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"postgrest", "postgrest",
"refinery",
"reqwest", "reqwest",
"serde", "serde",
"serde-aux", "serde-aux",
"serde_json", "serde_json",
"thiserror", "thiserror",
"tokio", "tokio",
"tokio-postgres",
"tokio-retry", "tokio-retry",
"tracing", "tracing",
"uuid", "uuid",
@ -2147,7 +2115,9 @@ name = "flowy-user"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"appflowy-integrate", "appflowy-integrate",
"base64 0.21.2",
"bytes", "bytes",
"chrono",
"collab", "collab",
"collab-folder", "collab-folder",
"diesel", "diesel",
@ -2159,6 +2129,7 @@ dependencies = [
"flowy-notification", "flowy-notification",
"flowy-server-config", "flowy-server-config",
"flowy-sqlite", "flowy-sqlite",
"flowy-user-deps",
"lazy_static", "lazy_static",
"lib-dispatch", "lib-dispatch",
"lib-infra", "lib-infra",
@ -2169,7 +2140,6 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"serde_repr", "serde_repr",
"strum",
"strum_macros", "strum_macros",
"tokio", "tokio",
"tracing", "tracing",
@ -2178,6 +2148,20 @@ dependencies = [
"validator", "validator",
] ]
[[package]]
name = "flowy-user-deps"
version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
"flowy-error",
"lib-infra",
"serde",
"serde_json",
"serde_repr",
"uuid",
]
[[package]] [[package]]
name = "fnv" name = "fnv"
version = "1.0.7" version = "1.0.7"
@ -2737,7 +2721,7 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e"
dependencies = [ dependencies = [
"digest 0.10.7", "digest",
] ]
[[package]] [[package]]
@ -2776,16 +2760,6 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "http-error-code"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Server?branch=refactor/appflowy_server#1ccd296de8530760d92652dbd9f38f27178059b6"
dependencies = [
"serde",
"serde_repr",
"thiserror",
]
[[package]] [[package]]
name = "http-range" name = "http-range"
version = "0.1.5" version = "0.1.5"
@ -3200,31 +3174,6 @@ dependencies = [
"tracing-subscriber 0.2.25", "tracing-subscriber 0.2.25",
] ]
[[package]]
name = "lib-ws"
version = "0.1.0"
dependencies = [
"bytes",
"dashmap",
"futures",
"futures-channel",
"futures-core",
"futures-util",
"lib-infra",
"log",
"parking_lot 0.12.1",
"pin-project",
"protobuf",
"serde",
"serde_json",
"serde_repr",
"strum_macros",
"tokio",
"tokio-tungstenite 0.15.0",
"tracing",
"url",
]
[[package]] [[package]]
name = "lib0" name = "lib0"
version = "0.16.8" version = "0.16.8"
@ -3415,7 +3364,7 @@ version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca"
dependencies = [ dependencies = [
"digest 0.10.7", "digest",
] ]
[[package]] [[package]]
@ -3704,12 +3653,6 @@ version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "opaque-debug"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]] [[package]]
name = "open" name = "open"
version = "3.2.0" version = "3.2.0"
@ -4167,10 +4110,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f028f05971fe20f512bcc679e2c10227e57809a3af86a7606304435bc8896cd6" checksum = "f028f05971fe20f512bcc679e2c10227e57809a3af86a7606304435bc8896cd6"
dependencies = [ dependencies = [
"bytes", "bytes",
"chrono",
"fallible-iterator", "fallible-iterator",
"postgres-protocol", "postgres-protocol",
"uuid",
] ]
[[package]] [[package]]
@ -4548,51 +4489,6 @@ dependencies = [
"thiserror", "thiserror",
] ]
[[package]]
name = "refinery"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24"
dependencies = [
"refinery-core",
"refinery-macros",
]
[[package]]
name = "refinery-core"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb"
dependencies = [
"async-trait",
"cfg-if",
"lazy_static",
"log",
"regex",
"serde",
"siphasher",
"thiserror",
"time 0.3.22",
"tokio",
"tokio-postgres",
"toml 0.7.5",
"url",
"walkdir",
]
[[package]]
name = "refinery-macros"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174"
dependencies = [
"proc-macro2",
"quote",
"refinery-core",
"regex",
"syn 2.0.22",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.8.4" version = "1.8.4"
@ -4676,12 +4572,6 @@ dependencies = [
"winreg 0.10.1", "winreg 0.10.1",
] ]
[[package]]
name = "retain_mut"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0"
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.16.20" version = "0.16.20"
@ -5140,19 +5030,6 @@ dependencies = [
"stable_deref_trait", "stable_deref_trait",
] ]
[[package]]
name = "sha-1"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
dependencies = [
"block-buffer 0.9.0",
"cfg-if",
"cpufeatures",
"digest 0.9.0",
"opaque-debug",
]
[[package]] [[package]]
name = "sha1" name = "sha1"
version = "0.10.5" version = "0.10.5"
@ -5161,7 +5038,7 @@ checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"cpufeatures", "cpufeatures",
"digest 0.10.7", "digest",
] ]
[[package]] [[package]]
@ -5178,7 +5055,7 @@ checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"cpufeatures", "cpufeatures",
"digest 0.10.7", "digest",
] ]
[[package]] [[package]]
@ -6003,19 +5880,6 @@ dependencies = [
"tokio-util", "tokio-util",
] ]
[[package]]
name = "tokio-tungstenite"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "511de3f85caf1c98983545490c3d09685fa8eb634e57eec22bb4db271f46cbd8"
dependencies = [
"futures-util",
"log",
"pin-project",
"tokio",
"tungstenite 0.14.0",
]
[[package]] [[package]]
name = "tokio-tungstenite" name = "tokio-tungstenite"
version = "0.18.0" version = "0.18.0"
@ -6025,7 +5889,7 @@ dependencies = [
"futures-util", "futures-util",
"log", "log",
"tokio", "tokio",
"tungstenite 0.18.0", "tungstenite",
] ]
[[package]] [[package]]
@ -6251,25 +6115,6 @@ version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
[[package]]
name = "tungstenite"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0b2d8558abd2e276b0a8df5c05a2ec762609344191e5fd23e292c910e9165b5"
dependencies = [
"base64 0.13.1",
"byteorder",
"bytes",
"http",
"httparse",
"log",
"rand 0.8.5",
"sha-1",
"thiserror",
"url",
"utf-8",
]
[[package]] [[package]]
name = "tungstenite" name = "tungstenite"
version = "0.18.0" version = "0.18.0"

View File

@ -34,18 +34,20 @@ default = ["custom-protocol"]
custom-protocol = ["tauri/custom-protocol"] custom-protocol = ["tauri/custom-protocol"]
[patch.crates-io] [patch.crates-io]
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
#collab = { path = "../../../../AppFlowy-Collab/collab" } #collab = { path = "../../AppFlowy-Collab/collab" }
#collab-folder = { path = "../../../../AppFlowy-Collab/collab-folder" } #collab-folder = { path = "../../AppFlowy-Collab/collab-folder" }
#collab-document = { path = "../../../../AppFlowy-Collab/collab-document" } #collab-document = { path = "../../AppFlowy-Collab/collab-document" }
#collab-database = { path = "../../../../AppFlowy-Collab/collab-database" } #collab-database = { path = "../../AppFlowy-Collab/collab-database" }
#appflowy-integrate = { path = "../../../../AppFlowy-Collab/appflowy-integrate" } #appflowy-integrate = { path = "../../AppFlowy-Collab/appflowy-integrate" }
#collab-plugins = { path = "../../AppFlowy-Collab/collab-plugins" }

View File

@ -2,7 +2,6 @@ export * from "./models/flowy-user";
export * from "./models/flowy-database2"; export * from "./models/flowy-database2";
export * from "./models/flowy-folder2"; export * from "./models/flowy-folder2";
export * from "./models/flowy-document2"; export * from "./models/flowy-document2";
export * from "./models/flowy-net";
export * from "./models/flowy-error"; export * from "./models/flowy-error";
export * from "./models/flowy-config"; export * from "./models/flowy-config";

View File

@ -1,8 +0,0 @@
[package]
name = "flowy-server-config"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View File

@ -1,14 +0,0 @@
pub fn add(left: usize, right: usize) -> usize {
left + right
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
}
}

View File

@ -210,6 +210,8 @@
"user": "User", "user": "User",
"files": "Files", "files": "Files",
"open": "Open Settings", "open": "Open Settings",
"logout": "Logout",
"logoutPrompt": "Are you sure to logout?",
"supabaseSetting": "Supabase Setting" "supabaseSetting": "Supabase Setting"
}, },
"appearance": { "appearance": {

File diff suppressed because it is too large Load Diff

View File

@ -6,13 +6,17 @@ members = [
"flowy-core", "flowy-core",
"dart-ffi", "dart-ffi",
"flowy-user", "flowy-user",
"flowy-user-deps",
"flowy-test", "flowy-test",
"flowy-sqlite", "flowy-sqlite",
"flowy-folder2", "flowy-folder2",
"flowy-folder-deps",
"flowy-notification", "flowy-notification",
"flowy-document2", "flowy-document2",
"flowy-document-deps",
"flowy-error", "flowy-error",
"flowy-database2", "flowy-database2",
"flowy-database-deps",
"flowy-task", "flowy-task",
"flowy-server", "flowy-server",
"flowy-server-config", "flowy-server-config",
@ -34,15 +38,17 @@ opt-level = 3
incremental = false incremental = false
[patch.crates-io] [patch.crates-io]
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f420738" } appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "5783a5" }
#collab = { path = "../../../AppFlowy-Collab/collab" } #collab = { path = "../AppFlowy-Collab/collab" }
#collab-folder = { path = "../../../AppFlowy-Collab/collab-folder" } #collab-folder = { path = "../AppFlowy-Collab/collab-folder" }
#collab-database= { path = "../../../AppFlowy-Collab/collab-database" } #collab-database= { path = "../AppFlowy-Collab/collab-database" }
#collab-document = { path = "../../../AppFlowy-Collab/collab-document" } #collab-document = { path = "../AppFlowy-Collab/collab-document" }
#appflowy-integrate = { path = "../../../AppFlowy-Collab/appflowy-integrate" } #collab-plugins = { path = "../AppFlowy-Collab/collab-plugins" }
#appflowy-integrate = { path = "../AppFlowy-Collab/appflowy-integrate" }

View File

@ -13,8 +13,6 @@ protobuf = {version = "2.28.0"}
bytes = { version = "1.4" } bytes = { version = "1.4" }
flowy-error = { path = "../flowy-error" } flowy-error = { path = "../flowy-error" }
strum_macros = "0.21" strum_macros = "0.21"
appflowy-integrate = {version = "0.1.0" }
flowy-server = { path = "../flowy-server" }
[build-dependencies] [build-dependencies]
flowy-codegen = { path = "../../../shared-lib/flowy-codegen"} flowy-codegen = { path = "../../../shared-lib/flowy-codegen"}

View File

@ -1,7 +1,4 @@
use appflowy_integrate::config::AWSDynamoDBConfig;
use flowy_derive::ProtoBuf; use flowy_derive::ProtoBuf;
use flowy_error::FlowyError;
#[derive(Default, ProtoBuf)] #[derive(Default, ProtoBuf)]
pub struct KeyValuePB { pub struct KeyValuePB {
@ -17,34 +14,3 @@ pub struct KeyPB {
#[pb(index = 1)] #[pb(index = 1)]
pub key: String, pub key: String,
} }
#[derive(Default, ProtoBuf)]
pub struct CollabPluginConfigPB {
#[pb(index = 1, one_of)]
pub aws_config: Option<AWSDynamoDBConfigPB>,
}
#[derive(Default, ProtoBuf)]
pub struct AWSDynamoDBConfigPB {
#[pb(index = 1)]
pub access_key_id: String,
#[pb(index = 2)]
pub secret_access_key: String,
// Region list: https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.RegionsAndAvailabilityZones.html
#[pb(index = 3)]
pub region: String,
}
impl TryFrom<AWSDynamoDBConfigPB> for AWSDynamoDBConfig {
type Error = FlowyError;
fn try_from(config: AWSDynamoDBConfigPB) -> Result<Self, Self::Error> {
Ok(AWSDynamoDBConfig {
access_key_id: config.access_key_id,
secret_access_key: config.secret_access_key,
region: config.region,
enable: true,
})
}
}

View File

@ -1,10 +1,8 @@
use appflowy_integrate::config::AWSDynamoDBConfig;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_sqlite::kv::KV; use flowy_sqlite::kv::KV;
use lib_dispatch::prelude::{data_result_ok, AFPluginData, DataResult}; use lib_dispatch::prelude::{data_result_ok, AFPluginData, DataResult};
use crate::entities::{CollabPluginConfigPB, KeyPB, KeyValuePB}; use crate::entities::{KeyPB, KeyValuePB};
pub(crate) async fn set_key_value_handler(data: AFPluginData<KeyValuePB>) -> FlowyResult<()> { pub(crate) async fn set_key_value_handler(data: AFPluginData<KeyValuePB>) -> FlowyResult<()> {
let data = data.into_inner(); let data = data.into_inner();
@ -33,15 +31,3 @@ pub(crate) async fn remove_key_value_handler(data: AFPluginData<KeyPB>) -> Flowy
KV::remove(&data.key); KV::remove(&data.key);
Ok(()) Ok(())
} }
pub(crate) async fn set_collab_plugin_config_handler(
data: AFPluginData<CollabPluginConfigPB>,
) -> FlowyResult<()> {
let config = data.into_inner();
if let Some(aws_config_pb) = config.aws_config {
if let Ok(aws_config) = AWSDynamoDBConfig::try_from(aws_config_pb) {
aws_config.write_env();
}
}
Ok(())
}

View File

@ -11,10 +11,6 @@ pub fn init() -> AFPlugin {
.event(ConfigEvent::SetKeyValue, set_key_value_handler) .event(ConfigEvent::SetKeyValue, set_key_value_handler)
.event(ConfigEvent::GetKeyValue, get_key_value_handler) .event(ConfigEvent::GetKeyValue, get_key_value_handler)
.event(ConfigEvent::RemoveKeyValue, remove_key_value_handler) .event(ConfigEvent::RemoveKeyValue, remove_key_value_handler)
.event(
ConfigEvent::SetCollabPluginConfig,
set_collab_plugin_config_handler,
)
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash, Display, ProtoBuf_Enum, Flowy_Event)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Display, ProtoBuf_Enum, Flowy_Event)]
@ -28,7 +24,4 @@ pub enum ConfigEvent {
#[event(input = "KeyPB")] #[event(input = "KeyPB")]
RemoveKeyValue = 2, RemoveKeyValue = 2,
#[event(input = "CollabPluginConfigPB")]
SetCollabPluginConfig = 4,
} }

View File

@ -9,17 +9,21 @@ edition = "2018"
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
lib-log = { path = "../lib-log" } lib-log = { path = "../lib-log" }
flowy-user = { path = "../flowy-user" } flowy-user = { path = "../flowy-user" }
flowy-user-deps = { path = "../flowy-user-deps" }
flowy-net = { path = "../flowy-net" } flowy-net = { path = "../flowy-net" }
flowy-folder2 = { path = "../flowy-folder2" } flowy-folder2 = { path = "../flowy-folder2" }
flowy-folder-deps = { path = "../flowy-folder-deps" }
flowy-database2 = { path = "../flowy-database2" } flowy-database2 = { path = "../flowy-database2" }
flowy-database-deps = { path = "../flowy-database-deps" }
flowy-sqlite = { path = "../flowy-sqlite" } flowy-sqlite = { path = "../flowy-sqlite" }
flowy-document2 = { path = "../flowy-document2" } flowy-document2 = { path = "../flowy-document2" }
flowy-document-deps = { path = "../flowy-document-deps" }
flowy-error = { path = "../flowy-error" } flowy-error = { path = "../flowy-error" }
flowy-task = { path = "../flowy-task" } flowy-task = { path = "../flowy-task" }
flowy-server = { path = "../flowy-server" } flowy-server = { path = "../flowy-server" }
flowy-server-config = { path = "../flowy-server-config" } flowy-server-config = { path = "../flowy-server-config" }
flowy-config = { path = "../flowy-config" } flowy-config = { path = "../flowy-config" }
appflowy-integrate = { version = "0.1.0" } appflowy-integrate = { version = "0.1.0", features = ["postgres_storage_plugin", "snapshot_plugin"] }
diesel = { version = "1.4.8", features = ["sqlite"] } diesel = { version = "1.4.8", features = ["sqlite"] }
uuid = { version = "1.3.3", features = ["v4"] } uuid = { version = "1.3.3", features = ["v4"] }
@ -30,7 +34,6 @@ tokio = { version = "1.26", features = ["full"] }
console-subscriber = { version = "0.1.8", optional = true } console-subscriber = { version = "0.1.8", optional = true }
parking_lot = "0.12.1" parking_lot = "0.12.1"
lib-ws = { path = "../../../shared-lib/lib-ws" }
lib-infra = { path = "../../../shared-lib/lib-infra" } lib-infra = { path = "../../../shared-lib/lib-infra" }
serde = "1.0" serde = "1.0"
serde_json = "1.0" serde_json = "1.0"

View File

@ -4,23 +4,23 @@ use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::RocksCollabDB; use appflowy_integrate::RocksCollabDB;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use flowy_database2::deps::{DatabaseCloudService, DatabaseUser2}; use flowy_database2::{DatabaseManager, DatabaseUser};
use flowy_database2::DatabaseManager2; use flowy_database_deps::cloud::DatabaseCloudService;
use flowy_error::FlowyError; use flowy_error::FlowyError;
use flowy_task::TaskDispatcher; use flowy_task::TaskDispatcher;
use flowy_user::services::UserSession; use flowy_user::services::UserSession;
pub struct Database2DepsResolver(); pub struct DatabaseDepsResolver();
impl Database2DepsResolver { impl DatabaseDepsResolver {
pub async fn resolve( pub async fn resolve(
user_session: Weak<UserSession>, user_session: Weak<UserSession>,
task_scheduler: Arc<RwLock<TaskDispatcher>>, task_scheduler: Arc<RwLock<TaskDispatcher>>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
cloud_service: Arc<dyn DatabaseCloudService>, cloud_service: Arc<dyn DatabaseCloudService>,
) -> Arc<DatabaseManager2> { ) -> Arc<DatabaseManager> {
let user = Arc::new(DatabaseUserImpl(user_session)); let user = Arc::new(DatabaseUserImpl(user_session));
Arc::new(DatabaseManager2::new( Arc::new(DatabaseManager::new(
user, user,
task_scheduler, task_scheduler,
collab_builder, collab_builder,
@ -30,7 +30,7 @@ impl Database2DepsResolver {
} }
struct DatabaseUserImpl(Weak<UserSession>); struct DatabaseUserImpl(Weak<UserSession>);
impl DatabaseUser2 for DatabaseUserImpl { impl DatabaseUser for DatabaseUserImpl {
fn user_id(&self) -> Result<i64, FlowyError> { fn user_id(&self) -> Result<i64, FlowyError> {
self self
.0 .0
@ -47,7 +47,7 @@ impl DatabaseUser2 for DatabaseUserImpl {
.token() .token()
} }
fn collab_db(&self, uid: i64) -> Result<Arc<RocksCollabDB>, FlowyError> { fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError> {
self self
.0 .0
.upgrade() .upgrade()

View File

@ -3,17 +3,17 @@ use std::sync::{Arc, Weak};
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder; use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::RocksCollabDB; use appflowy_integrate::RocksCollabDB;
use flowy_database2::DatabaseManager2; use flowy_database2::DatabaseManager;
use flowy_document2::deps::{DocumentCloudService, DocumentUser}; use flowy_document2::manager::{DocumentManager, DocumentUser};
use flowy_document2::manager::DocumentManager; use flowy_document_deps::cloud::DocumentCloudService;
use flowy_error::FlowyError; use flowy_error::FlowyError;
use flowy_user::services::UserSession; use flowy_user::services::UserSession;
pub struct Document2DepsResolver(); pub struct DocumentDepsResolver();
impl Document2DepsResolver { impl DocumentDepsResolver {
pub fn resolve( pub fn resolve(
user_session: Weak<UserSession>, user_session: Weak<UserSession>,
_database_manager: &Arc<DatabaseManager2>, _database_manager: &Arc<DatabaseManager>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
cloud_service: Arc<dyn DocumentCloudService>, cloud_service: Arc<dyn DocumentCloudService>,
) -> Arc<DocumentManager> { ) -> Arc<DocumentManager> {
@ -44,7 +44,7 @@ impl DocumentUser for DocumentUserImpl {
.token() .token()
} }
fn collab_db(&self, uid: i64) -> Result<Arc<RocksCollabDB>, FlowyError> { fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError> {
self self
.0 .0
.upgrade() .upgrade()

View File

@ -10,29 +10,29 @@ use tokio::sync::RwLock;
use flowy_database2::entities::DatabaseLayoutPB; use flowy_database2::entities::DatabaseLayoutPB;
use flowy_database2::services::share::csv::CSVFormat; use flowy_database2::services::share::csv::CSVFormat;
use flowy_database2::template::{make_default_board, make_default_calendar, make_default_grid}; use flowy_database2::template::{make_default_board, make_default_calendar, make_default_grid};
use flowy_database2::DatabaseManager2; use flowy_database2::DatabaseManager;
use flowy_document2::entities::DocumentDataPB; use flowy_document2::entities::DocumentDataPB;
use flowy_document2::manager::DocumentManager; use flowy_document2::manager::DocumentManager;
use flowy_document2::parser::json::parser::JsonToDocumentParser; use flowy_document2::parser::json::parser::JsonToDocumentParser;
use flowy_error::FlowyError; use flowy_error::FlowyError;
use flowy_folder2::deps::{FolderCloudService, FolderUser};
use flowy_folder2::entities::ViewLayoutPB; use flowy_folder2::entities::ViewLayoutPB;
use flowy_folder2::manager::FolderManager; use flowy_folder2::manager::{FolderManager, FolderUser};
use flowy_folder2::share::ImportType; use flowy_folder2::share::ImportType;
use flowy_folder2::view_operation::{ use flowy_folder2::view_operation::{
FolderOperationHandler, FolderOperationHandlers, View, WorkspaceViewBuilder, FolderOperationHandler, FolderOperationHandlers, View, WorkspaceViewBuilder,
}; };
use flowy_folder2::ViewLayout; use flowy_folder2::ViewLayout;
use flowy_folder_deps::cloud::FolderCloudService;
use flowy_user::services::UserSession; use flowy_user::services::UserSession;
use lib_dispatch::prelude::ToBytes; use lib_dispatch::prelude::ToBytes;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
pub struct Folder2DepsResolver(); pub struct FolderDepsResolver();
impl Folder2DepsResolver { impl FolderDepsResolver {
pub async fn resolve( pub async fn resolve(
user_session: Weak<UserSession>, user_session: Weak<UserSession>,
document_manager: &Arc<DocumentManager>, document_manager: &Arc<DocumentManager>,
database_manager: &Arc<DatabaseManager2>, database_manager: &Arc<DatabaseManager>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
folder_cloud: Arc<dyn FolderCloudService>, folder_cloud: Arc<dyn FolderCloudService>,
) -> Arc<FolderManager> { ) -> Arc<FolderManager> {
@ -49,7 +49,7 @@ impl Folder2DepsResolver {
fn folder_operation_handlers( fn folder_operation_handlers(
document_manager: Arc<DocumentManager>, document_manager: Arc<DocumentManager>,
database_manager: Arc<DatabaseManager2>, database_manager: Arc<DatabaseManager>,
) -> FolderOperationHandlers { ) -> FolderOperationHandlers {
let mut map: HashMap<ViewLayout, Arc<dyn FolderOperationHandler + Send + Sync>> = HashMap::new(); let mut map: HashMap<ViewLayout, Arc<dyn FolderOperationHandler + Send + Sync>> = HashMap::new();
@ -81,7 +81,7 @@ impl FolderUser for FolderUserImpl {
.token() .token()
} }
fn collab_db(&self, uid: i64) -> Result<Arc<RocksCollabDB>, FlowyError> { fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError> {
self self
.0 .0
.upgrade() .upgrade()
@ -218,7 +218,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
} }
} }
struct DatabaseFolderOperation(Arc<DatabaseManager2>); struct DatabaseFolderOperation(Arc<DatabaseManager>);
impl FolderOperationHandler for DatabaseFolderOperation { impl FolderOperationHandler for DatabaseFolderOperation {
fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> { fn close_view(&self, view_id: &str) -> FutureResult<(), FlowyError> {
let database_manager = self.0.clone(); let database_manager = self.0.clone();

View File

@ -1,11 +1,11 @@
pub use collab_deps::*; pub use collab_deps::*;
pub use database_deps::*; pub use database_deps::*;
pub use document2_deps::*; pub use document_deps::*;
pub use folder2_deps::*; pub use folder_deps::*;
mod collab_deps; mod collab_deps;
mod document2_deps; mod document_deps;
mod folder2_deps; mod folder_deps;
mod util; mod util;
mod database_deps; mod database_deps;

View File

@ -2,25 +2,28 @@ use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use appflowy_integrate::collab_builder::{CollabStorageProvider, CollabStorageType}; use appflowy_integrate::collab_builder::{CollabStorageProvider, CollabStorageType};
use appflowy_integrate::RemoteCollabStorage; use appflowy_integrate::{CollabType, RemoteCollabStorage, YrsDocAction};
use parking_lot::RwLock; use parking_lot::RwLock;
use serde_repr::*; use serde_repr::*;
use flowy_database2::deps::{ use flowy_database_deps::cloud::*;
CollabObjectUpdate, CollabObjectUpdateByOid, DatabaseCloudService, DatabaseSnapshot, use flowy_document2::deps::DocumentData;
}; use flowy_document_deps::cloud::{DocumentCloudService, DocumentSnapshot};
use flowy_document2::deps::{DocumentCloudService, DocumentData, DocumentSnapshot};
use flowy_error::{ErrorCode, FlowyError, FlowyResult}; use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use flowy_folder2::deps::{FolderCloudService, FolderData, FolderSnapshot, Workspace}; use flowy_folder_deps::cloud::*;
use flowy_server::local_server::LocalServer; use flowy_server::local_server::{LocalServer, LocalServerDB};
use flowy_server::self_host::configuration::self_host_server_configuration; use flowy_server::self_host::configuration::self_host_server_configuration;
use flowy_server::self_host::SelfHostServer; use flowy_server::self_host::SelfHostServer;
use flowy_server::supabase::SupabaseServer; use flowy_server::supabase::SupabaseServer;
use flowy_server::AppFlowyServer; use flowy_server::AppFlowyServer;
use flowy_server_config::supabase_config::SupabaseConfiguration; use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_sqlite::kv::KV; use flowy_sqlite::kv::KV;
use flowy_user::event_map::{UserAuthService, UserCloudServiceProvider}; use flowy_user::event_map::UserCloudServiceProvider;
use flowy_user::services::AuthType; use flowy_user::services::database::{
get_user_profile, get_user_workspace, open_collab_db, open_user_db,
};
use flowy_user_deps::cloud::UserService;
use flowy_user_deps::entities::*;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
use crate::AppFlowyCoreConfig; use crate::AppFlowyCoreConfig;
@ -45,7 +48,7 @@ pub enum ServerProviderType {
/// The [AppFlowyServerProvider] provides list of [AppFlowyServer] base on the [AuthType]. Using /// The [AppFlowyServerProvider] provides list of [AppFlowyServer] base on the [AuthType]. Using
/// the auth type, the [AppFlowyServerProvider] will create a new [AppFlowyServer] if it doesn't /// the auth type, the [AppFlowyServerProvider] will create a new [AppFlowyServer] if it doesn't
/// exist. /// exist.
/// Each server implements the [AppFlowyServer] trait, which provides the [UserAuthService], etc. /// Each server implements the [AppFlowyServer] trait, which provides the [UserService], etc.
pub struct AppFlowyServerProvider { pub struct AppFlowyServerProvider {
config: AppFlowyCoreConfig, config: AppFlowyCoreConfig,
provider_type: RwLock<ServerProviderType>, provider_type: RwLock<ServerProviderType>,
@ -78,7 +81,11 @@ impl AppFlowyServerProvider {
let server = match provider_type { let server = match provider_type {
ServerProviderType::Local => { ServerProviderType::Local => {
let server = Arc::new(LocalServer::new(&self.config.storage_path)); let local_db = Arc::new(LocalServerDBImpl {
storage_path: self.config.storage_path.clone(),
});
let server = Arc::new(LocalServer::new(local_db));
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(server) Ok::<Arc<dyn AppFlowyServer>, FlowyError>(server)
}, },
ServerProviderType::SelfHosted => { ServerProviderType::SelfHosted => {
@ -142,9 +149,9 @@ impl UserCloudServiceProvider for AppFlowyServerProvider {
} }
} }
/// Returns the [UserAuthService] base on the current [ServerProviderType]. /// Returns the [UserService] base on the current [ServerProviderType].
/// Creates a new [AppFlowyServer] if it doesn't exist. /// Creates a new [AppFlowyServer] if it doesn't exist.
fn get_auth_service(&self) -> Result<Arc<dyn UserAuthService>, FlowyError> { fn get_user_service(&self) -> Result<Arc<dyn UserService>, FlowyError> {
Ok( Ok(
self self
.get_provider(&self.provider_type.read())? .get_provider(&self.provider_type.read())?
@ -154,13 +161,13 @@ impl UserCloudServiceProvider for AppFlowyServerProvider {
} }
impl FolderCloudService for AppFlowyServerProvider { impl FolderCloudService for AppFlowyServerProvider {
fn create_workspace(&self, uid: i64, name: &str) -> FutureResult<Workspace, FlowyError> { fn create_workspace(&self, uid: i64, name: &str) -> FutureResult<Workspace, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
let name = name.to_string(); let name = name.to_string();
FutureResult::new(async move { server?.folder_service().create_workspace(uid, &name).await }) FutureResult::new(async move { server?.folder_service().create_workspace(uid, &name).await })
} }
fn get_folder_data(&self, workspace_id: &str) -> FutureResult<Option<FolderData>, FlowyError> { fn get_folder_data(&self, workspace_id: &str) -> FutureResult<Option<FolderData>, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
@ -174,7 +181,7 @@ impl FolderCloudService for AppFlowyServerProvider {
fn get_folder_latest_snapshot( fn get_folder_latest_snapshot(
&self, &self,
workspace_id: &str, workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, FlowyError> { ) -> FutureResult<Option<FolderSnapshot>, Error> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
FutureResult::new(async move { FutureResult::new(async move {
@ -185,11 +192,7 @@ impl FolderCloudService for AppFlowyServerProvider {
}) })
} }
fn get_folder_updates( fn get_folder_updates(&self, workspace_id: &str, uid: i64) -> FutureResult<Vec<Vec<u8>>, Error> {
&self,
workspace_id: &str,
uid: i64,
) -> FutureResult<Vec<Vec<u8>>, FlowyError> {
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
FutureResult::new(async move { FutureResult::new(async move {
@ -209,13 +212,17 @@ impl FolderCloudService for AppFlowyServerProvider {
} }
impl DatabaseCloudService for AppFlowyServerProvider { impl DatabaseCloudService for AppFlowyServerProvider {
fn get_collab_update(&self, object_id: &str) -> FutureResult<CollabObjectUpdate, FlowyError> { fn get_collab_update(
&self,
object_id: &str,
object_ty: CollabType,
) -> FutureResult<CollabObjectUpdate, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
let database_id = object_id.to_string(); let database_id = object_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
server? server?
.database_service() .database_service()
.get_collab_update(&database_id) .get_collab_update(&database_id, object_ty)
.await .await
}) })
} }
@ -223,12 +230,13 @@ impl DatabaseCloudService for AppFlowyServerProvider {
fn batch_get_collab_updates( fn batch_get_collab_updates(
&self, &self,
object_ids: Vec<String>, object_ids: Vec<String>,
) -> FutureResult<CollabObjectUpdateByOid, FlowyError> { object_ty: CollabType,
) -> FutureResult<CollabObjectUpdateByOid, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
FutureResult::new(async move { FutureResult::new(async move {
server? server?
.database_service() .database_service()
.batch_get_collab_updates(object_ids) .batch_get_collab_updates(object_ids, object_ty)
.await .await
}) })
} }
@ -236,7 +244,7 @@ impl DatabaseCloudService for AppFlowyServerProvider {
fn get_collab_latest_snapshot( fn get_collab_latest_snapshot(
&self, &self,
object_id: &str, object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, FlowyError> { ) -> FutureResult<Option<DatabaseSnapshot>, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
let database_id = object_id.to_string(); let database_id = object_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
@ -249,7 +257,7 @@ impl DatabaseCloudService for AppFlowyServerProvider {
} }
impl DocumentCloudService for AppFlowyServerProvider { impl DocumentCloudService for AppFlowyServerProvider {
fn get_document_updates(&self, document_id: &str) -> FutureResult<Vec<Vec<u8>>, FlowyError> { fn get_document_updates(&self, document_id: &str) -> FutureResult<Vec<Vec<u8>>, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
let document_id = document_id.to_string(); let document_id = document_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
@ -263,7 +271,7 @@ impl DocumentCloudService for AppFlowyServerProvider {
fn get_document_latest_snapshot( fn get_document_latest_snapshot(
&self, &self,
document_id: &str, document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, FlowyError> { ) -> FutureResult<Option<DocumentSnapshot>, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
let document_id = document_id.to_string(); let document_id = document_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
@ -274,7 +282,7 @@ impl DocumentCloudService for AppFlowyServerProvider {
}) })
} }
fn get_document_data(&self, document_id: &str) -> FutureResult<Option<DocumentData>, FlowyError> { fn get_document_data(&self, document_id: &str) -> FutureResult<Option<DocumentData>, Error> {
let server = self.get_provider(&self.provider_type.read()); let server = self.get_provider(&self.provider_type.read());
let document_id = document_id.to_string(); let document_id = document_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {
@ -334,3 +342,31 @@ fn current_server_provider() -> ServerProviderType {
Some(provider_type) => provider_type, Some(provider_type) => provider_type,
} }
} }
struct LocalServerDBImpl {
storage_path: String,
}
impl LocalServerDB for LocalServerDBImpl {
fn get_user_profile(&self, uid: i64) -> Result<Option<UserProfile>, FlowyError> {
let sqlite_db = open_user_db(&self.storage_path, uid)?;
let user_profile = get_user_profile(&sqlite_db, uid).ok();
Ok(user_profile)
}
fn get_user_workspace(&self, uid: i64) -> Result<Option<UserWorkspace>, FlowyError> {
let sqlite_db = open_user_db(&self.storage_path, uid)?;
let user_workspace = get_user_workspace(&sqlite_db, uid)?;
Ok(user_workspace)
}
fn get_collab_updates(&self, uid: i64, object_id: &str) -> Result<Vec<Vec<u8>>, FlowyError> {
let collab_db = open_collab_db(&self.storage_path, uid)?;
let read_txn = collab_db.read_txn();
let updates = read_txn
.get_all_updates(uid, object_id)
.map_err(|e| FlowyError::internal().context(format!("Failed to open collab db: {:?}", e)))?;
Ok(updates)
}
}

View File

@ -11,17 +11,16 @@ use std::{
use appflowy_integrate::collab_builder::{AppFlowyCollabBuilder, CollabStorageType}; use appflowy_integrate::collab_builder::{AppFlowyCollabBuilder, CollabStorageType};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use tracing::debug;
use flowy_database2::DatabaseManager2; use flowy_database2::DatabaseManager;
use flowy_document2::manager::DocumentManager as DocumentManager2; use flowy_document2::manager::DocumentManager;
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
use flowy_folder2::manager::{FolderInitializeData, FolderManager}; use flowy_folder2::manager::{FolderInitializeData, FolderManager};
use flowy_sqlite::kv::KV; use flowy_sqlite::kv::KV;
use flowy_task::{TaskDispatcher, TaskRunner}; use flowy_task::{TaskDispatcher, TaskRunner};
use flowy_user::entities::UserProfile;
use flowy_user::event_map::{SignUpContext, UserCloudServiceProvider, UserStatusCallback}; use flowy_user::event_map::{SignUpContext, UserCloudServiceProvider, UserStatusCallback};
use flowy_user::services::{get_supabase_config, AuthType, UserSession, UserSessionConfig}; use flowy_user::services::{get_supabase_config, UserSession, UserSessionConfig};
use flowy_user_deps::entities::{AuthType, UserProfile, UserWorkspace};
use lib_dispatch::prelude::*; use lib_dispatch::prelude::*;
use lib_dispatch::runtime::tokio_default_runtime; use lib_dispatch::runtime::tokio_default_runtime;
use lib_infra::future::{to_fut, Fut}; use lib_infra::future::{to_fut, Fut};
@ -113,9 +112,9 @@ pub struct AppFlowyCore {
#[allow(dead_code)] #[allow(dead_code)]
pub config: AppFlowyCoreConfig, pub config: AppFlowyCoreConfig,
pub user_session: Arc<UserSession>, pub user_session: Arc<UserSession>,
pub document_manager2: Arc<DocumentManager2>, pub document_manager: Arc<DocumentManager>,
pub folder_manager: Arc<FolderManager>, pub folder_manager: Arc<FolderManager>,
pub database_manager: Arc<DatabaseManager2>, pub database_manager: Arc<DatabaseManager>,
pub event_dispatcher: Arc<AFPluginDispatcher>, pub event_dispatcher: Arc<AFPluginDispatcher>,
pub server_provider: Arc<AppFlowyServerProvider>, pub server_provider: Arc<AppFlowyServerProvider>,
pub task_dispatcher: Arc<RwLock<TaskDispatcher>>, pub task_dispatcher: Arc<RwLock<TaskDispatcher>>,
@ -135,7 +134,7 @@ impl AppFlowyCore {
// Init the key value database // Init the key value database
init_kv(&config.storage_path); init_kv(&config.storage_path);
debug!("🔥 {:?}", &config); tracing::info!("🔥 {:?}", &config);
let runtime = tokio_default_runtime().unwrap(); let runtime = tokio_default_runtime().unwrap();
let task_scheduler = TaskDispatcher::new(Duration::from_secs(2)); let task_scheduler = TaskDispatcher::new(Duration::from_secs(2));
let task_dispatcher = Arc::new(RwLock::new(task_scheduler)); let task_dispatcher = Arc::new(RwLock::new(task_scheduler));
@ -146,52 +145,61 @@ impl AppFlowyCore {
get_supabase_config(), get_supabase_config(),
)); ));
let (user_session, folder_manager, server_provider, database_manager, document_manager2) = let (
runtime.block_on(async { user_session,
let user_session = mk_user_session(&config, server_provider.clone()); folder_manager,
/// The shared collab builder is used to build the [Collab] instance. The plugins will be loaded server_provider,
/// on demand based on the [CollabPluginConfig]. database_manager,
let collab_builder = Arc::new(AppFlowyCollabBuilder::new( document_manager,
server_provider.clone(), collab_builder,
Some(Arc::new(SnapshotDBImpl(Arc::downgrade(&user_session)))), ) = runtime.block_on(async {
)); let user_session = mk_user_session(&config, server_provider.clone());
/// The shared collab builder is used to build the [Collab] instance. The plugins will be loaded
/// on demand based on the [CollabPluginConfig].
let collab_builder = Arc::new(AppFlowyCollabBuilder::new(
server_provider.clone(),
Some(Arc::new(SnapshotDBImpl(Arc::downgrade(&user_session)))),
));
let database_manager2 = Database2DepsResolver::resolve( let database_manager = DatabaseDepsResolver::resolve(
Arc::downgrade(&user_session), Arc::downgrade(&user_session),
task_dispatcher.clone(), task_dispatcher.clone(),
collab_builder.clone(), collab_builder.clone(),
server_provider.clone(), server_provider.clone(),
) )
.await; .await;
let document_manager2 = Document2DepsResolver::resolve( let document_manager = DocumentDepsResolver::resolve(
Arc::downgrade(&user_session), Arc::downgrade(&user_session),
&database_manager2, &database_manager,
collab_builder.clone(), collab_builder.clone(),
server_provider.clone(), server_provider.clone(),
); );
let folder_manager = Folder2DepsResolver::resolve( let folder_manager = FolderDepsResolver::resolve(
Arc::downgrade(&user_session), Arc::downgrade(&user_session),
&document_manager2, &document_manager,
&database_manager2, &database_manager,
collab_builder, collab_builder.clone(),
server_provider.clone(), server_provider.clone(),
) )
.await; .await;
( (
user_session, user_session,
folder_manager, folder_manager,
server_provider, server_provider,
database_manager2, database_manager,
document_manager2, document_manager,
) collab_builder,
}); )
});
let user_status_listener = UserStatusCallbackImpl { let user_status_listener = UserStatusCallbackImpl {
collab_builder,
folder_manager: folder_manager.clone(), folder_manager: folder_manager.clone(),
database_manager: database_manager.clone(), database_manager: database_manager.clone(),
document_manager: document_manager.clone(),
config: config.clone(), config: config.clone(),
}; };
@ -204,17 +212,17 @@ impl AppFlowyCore {
let event_dispatcher = Arc::new(AFPluginDispatcher::construct(runtime, || { let event_dispatcher = Arc::new(AFPluginDispatcher::construct(runtime, || {
make_plugins( make_plugins(
&folder_manager, Arc::downgrade(&folder_manager),
&database_manager, Arc::downgrade(&database_manager),
&user_session, Arc::downgrade(&user_session),
&document_manager2, Arc::downgrade(&document_manager),
) )
})); }));
Self { Self {
config, config,
user_session, user_session,
document_manager2, document_manager,
folder_manager, folder_manager,
database_manager, database_manager,
event_dispatcher, event_dispatcher,
@ -223,6 +231,7 @@ impl AppFlowyCore {
} }
} }
/// Only expose the dispatcher in test
pub fn dispatcher(&self) -> Arc<AFPluginDispatcher> { pub fn dispatcher(&self) -> Arc<AFPluginDispatcher> {
self.event_dispatcher.clone() self.event_dispatcher.clone()
} }
@ -254,8 +263,10 @@ fn mk_user_session(
} }
struct UserStatusCallbackImpl { struct UserStatusCallbackImpl {
collab_builder: Arc<AppFlowyCollabBuilder>,
folder_manager: Arc<FolderManager>, folder_manager: Arc<FolderManager>,
database_manager: Arc<DatabaseManager2>, database_manager: Arc<DatabaseManager>,
document_manager: Arc<DocumentManager>,
#[allow(dead_code)] #[allow(dead_code)]
config: AppFlowyCoreConfig, config: AppFlowyCoreConfig,
} }
@ -263,32 +274,56 @@ struct UserStatusCallbackImpl {
impl UserStatusCallback for UserStatusCallbackImpl { impl UserStatusCallback for UserStatusCallbackImpl {
fn auth_type_did_changed(&self, _auth_type: AuthType) {} fn auth_type_did_changed(&self, _auth_type: AuthType) {}
fn did_init(&self, user_id: i64, workspace_id: &str) -> Fut<FlowyResult<()>> { fn did_init(&self, user_id: i64, user_workspace: &UserWorkspace) -> Fut<FlowyResult<()>> {
let user_id = user_id.to_owned(); let user_id = user_id.to_owned();
let workspace_id = workspace_id.to_owned(); let user_workspace = user_workspace.clone();
let collab_builder = self.collab_builder.clone();
let folder_manager = self.folder_manager.clone(); let folder_manager = self.folder_manager.clone();
let database_manager = self.database_manager.clone(); let database_manager = self.database_manager.clone();
let document_manager = self.document_manager.clone();
to_fut(async move { to_fut(async move {
collab_builder.initialize(user_workspace.id.clone());
folder_manager folder_manager
.initialize(user_id, &workspace_id, FolderInitializeData::Empty) .initialize(user_id, &user_workspace.id, FolderInitializeData::Empty)
.await?;
database_manager
.initialize(
user_id,
user_workspace.id.clone(),
user_workspace.database_storage_id,
)
.await?;
document_manager
.initialize(user_id, user_workspace.id)
.await?; .await?;
database_manager.initialize(user_id).await?;
Ok(()) Ok(())
}) })
} }
fn did_sign_in(&self, user_id: i64, workspace_id: &str) -> Fut<FlowyResult<()>> { fn did_sign_in(&self, user_id: i64, user_workspace: &UserWorkspace) -> Fut<FlowyResult<()>> {
let user_id = user_id.to_owned(); let user_id = user_id.to_owned();
let workspace_id = workspace_id.to_owned(); let user_workspace = user_workspace.clone();
let collab_builder = self.collab_builder.clone();
let folder_manager = self.folder_manager.clone(); let folder_manager = self.folder_manager.clone();
let database_manager = self.database_manager.clone(); let database_manager = self.database_manager.clone();
let document_manager = self.document_manager.clone();
to_fut(async move { to_fut(async move {
collab_builder.initialize(user_workspace.id.clone());
folder_manager folder_manager
.initialize_when_sign_in(user_id, &workspace_id) .initialize_with_workspace_id(user_id, &user_workspace.id)
.await?;
database_manager
.initialize(
user_id,
user_workspace.id.clone(),
user_workspace.database_storage_id,
)
.await?;
document_manager
.initialize(user_id, user_workspace.id)
.await?; .await?;
database_manager.initialize(user_id).await?;
Ok(()) Ok(())
}) })
} }
@ -297,25 +332,36 @@ impl UserStatusCallback for UserStatusCallbackImpl {
&self, &self,
context: SignUpContext, context: SignUpContext,
user_profile: &UserProfile, user_profile: &UserProfile,
user_workspace: &UserWorkspace,
) -> Fut<FlowyResult<()>> { ) -> Fut<FlowyResult<()>> {
let user_profile = user_profile.clone(); let user_profile = user_profile.clone();
let collab_builder = self.collab_builder.clone();
let folder_manager = self.folder_manager.clone(); let folder_manager = self.folder_manager.clone();
let database_manager = self.database_manager.clone(); let database_manager = self.database_manager.clone();
let user_workspace = user_workspace.clone();
let document_manager = self.document_manager.clone();
to_fut(async move { to_fut(async move {
collab_builder.initialize(user_workspace.id.clone());
folder_manager folder_manager
.initialize_when_sign_up( .initialize_with_new_user(
user_profile.id, user_profile.id,
&user_profile.token, &user_profile.token,
context.is_new, context.is_new,
context.local_folder, context.local_folder,
&user_profile.workspace_id, &user_workspace.id,
)
.await?;
database_manager
.initialize_with_new_user(
user_profile.id,
user_workspace.id.clone(),
user_workspace.database_storage_id,
) )
.await?; .await?;
database_manager document_manager
.initialize_with_new_user(user_profile.id, &user_profile.token) .initialize_with_new_user(user_profile.id, user_workspace.id)
.await?; .await?;
Ok(()) Ok(())
}) })
} }
@ -327,6 +373,37 @@ impl UserStatusCallback for UserStatusCallbackImpl {
Ok(()) Ok(())
}) })
} }
fn open_workspace(&self, user_id: i64, user_workspace: &UserWorkspace) -> Fut<FlowyResult<()>> {
let user_workspace = user_workspace.clone();
let collab_builder = self.collab_builder.clone();
let folder_manager = self.folder_manager.clone();
let database_manager = self.database_manager.clone();
let document_manager = self.document_manager.clone();
to_fut(async move {
collab_builder.initialize(user_workspace.id.clone());
folder_manager
.initialize_with_workspace_id(user_id, &user_workspace.id)
.await?;
database_manager
.initialize(
user_id,
user_workspace.id.clone(),
user_workspace.database_storage_id,
)
.await?;
document_manager
.initialize(user_id, user_workspace.id)
.await?;
Ok(())
})
}
fn did_update_network(&self, reachable: bool) {
self.collab_builder.update_network(reachable);
}
} }
impl From<ServerProviderType> for CollabStorageType { impl From<ServerProviderType> for CollabStorageType {

View File

@ -1,22 +1,22 @@
use std::sync::Arc; use std::sync::Weak;
use flowy_database2::DatabaseManager2; use flowy_database2::DatabaseManager;
use flowy_document2::manager::DocumentManager as DocumentManager2; use flowy_document2::manager::DocumentManager as DocumentManager2;
use flowy_folder2::manager::FolderManager; use flowy_folder2::manager::FolderManager;
use flowy_user::services::UserSession; use flowy_user::services::UserSession;
use lib_dispatch::prelude::AFPlugin; use lib_dispatch::prelude::AFPlugin;
pub fn make_plugins( pub fn make_plugins(
folder_manager: &Arc<FolderManager>, folder_manager: Weak<FolderManager>,
database_manager: &Arc<DatabaseManager2>, database_manager: Weak<DatabaseManager>,
user_session: &Arc<UserSession>, user_session: Weak<UserSession>,
document_manager2: &Arc<DocumentManager2>, document_manager2: Weak<DocumentManager2>,
) -> Vec<AFPlugin> { ) -> Vec<AFPlugin> {
let user_plugin = flowy_user::event_map::init(user_session.clone()); let user_plugin = flowy_user::event_map::init(user_session);
let folder_plugin = flowy_folder2::event_map::init(folder_manager.clone()); let folder_plugin = flowy_folder2::event_map::init(folder_manager);
let network_plugin = flowy_net::event_map::init(); let network_plugin = flowy_net::event_map::init();
let database_plugin = flowy_database2::event_map::init(database_manager.clone()); let database_plugin = flowy_database2::event_map::init(database_manager);
let document_plugin2 = flowy_document2::event_map::init(document_manager2.clone()); let document_plugin2 = flowy_document2::event_map::init(document_manager2);
let config_plugin = flowy_config::event_map::init(); let config_plugin = flowy_config::event_map::init();
vec![ vec![
user_plugin, user_plugin,

View File

@ -0,0 +1,12 @@
[package]
name = "flowy-database-deps"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-error = { path = "../flowy-error" }
collab-plugins = { version = "0.1.0" }
anyhow = "1.0.71"

View File

@ -0,0 +1,38 @@
use anyhow::Error;
use std::collections::HashMap;
use collab_plugins::cloud_storage::CollabType;
use lib_infra::future::FutureResult;
pub type CollabObjectUpdateByOid = HashMap<String, CollabObjectUpdate>;
pub type CollabObjectUpdate = Vec<Vec<u8>>;
/// A trait for database cloud service.
/// Each kind of server should implement this trait. Check out the [AppFlowyServerProvider] of
/// [flowy-server] crate for more information.
pub trait DatabaseCloudService: Send + Sync {
fn get_collab_update(
&self,
object_id: &str,
object_ty: CollabType,
) -> FutureResult<CollabObjectUpdate, Error>;
fn batch_get_collab_updates(
&self,
object_ids: Vec<String>,
object_ty: CollabType,
) -> FutureResult<CollabObjectUpdateByOid, Error>;
fn get_collab_latest_snapshot(
&self,
object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, Error>;
}
pub struct DatabaseSnapshot {
pub snapshot_id: i64,
pub database_id: String,
pub data: Vec<u8>,
pub created_at: i64,
}

View File

@ -0,0 +1 @@
pub mod cloud;

View File

@ -9,12 +9,13 @@ edition = "2021"
collab = { version = "0.1.0" } collab = { version = "0.1.0" }
collab-database = { version = "0.1.0" } collab-database = { version = "0.1.0" }
appflowy-integrate = {version = "0.1.0" } appflowy-integrate = {version = "0.1.0" }
flowy-database-deps = { path = "../flowy-database-deps" }
flowy-derive = { path = "../../../shared-lib/flowy-derive" } flowy-derive = { path = "../../../shared-lib/flowy-derive" }
flowy-notification = { path = "../flowy-notification" } flowy-notification = { path = "../flowy-notification" }
parking_lot = "0.12.1" parking_lot = "0.12.1"
protobuf = {version = "2.28.0"} protobuf = {version = "2.28.0"}
flowy-error = { path = "../flowy-error", features = ["adaptor_dispatch", "collab"]} flowy-error = { path = "../flowy-error", features = ["impl_from_dispatch_error", "impl_from_collab"]}
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
tokio = { version = "1.26", features = ["sync"] } tokio = { version = "1.26", features = ["sync"] }
flowy-task= { path = "../flowy-task" } flowy-task= { path = "../flowy-task" }

View File

@ -1,38 +0,0 @@
use std::sync::Arc;
use appflowy_integrate::RocksCollabDB;
pub use collab_database::user::CollabObjectUpdate;
pub use collab_database::user::CollabObjectUpdateByOid;
use flowy_error::FlowyError;
use lib_infra::future::FutureResult;
pub trait DatabaseUser2: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
fn token(&self) -> Result<Option<String>, FlowyError>;
fn collab_db(&self, uid: i64) -> Result<Arc<RocksCollabDB>, FlowyError>;
}
/// A trait for database cloud service.
/// Each kind of server should implement this trait. Check out the [AppFlowyServerProvider] of
/// [flowy-server] crate for more information.
pub trait DatabaseCloudService: Send + Sync {
fn get_collab_update(&self, object_id: &str) -> FutureResult<CollabObjectUpdate, FlowyError>;
fn batch_get_collab_updates(
&self,
object_ids: Vec<String>,
) -> FutureResult<CollabObjectUpdateByOid, FlowyError>;
fn get_collab_latest_snapshot(
&self,
object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, FlowyError>;
}
pub struct DatabaseSnapshot {
pub snapshot_id: i64,
pub database_id: String,
pub data: Vec<u8>,
pub created_at: i64,
}

View File

@ -1,4 +1,4 @@
use std::sync::Arc; use std::sync::{Arc, Weak};
use collab_database::database::gen_row_id; use collab_database::database::gen_row_id;
use collab_database::rows::RowId; use collab_database::rows::RowId;
@ -8,7 +8,7 @@ use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataRes
use lib_infra::util::timestamp; use lib_infra::util::timestamp;
use crate::entities::*; use crate::entities::*;
use crate::manager::DatabaseManager2; use crate::manager::DatabaseManager;
use crate::services::cell::CellBuilder; use crate::services::cell::CellBuilder;
use crate::services::field::checklist_type_option::ChecklistCellChangeset; use crate::services::field::checklist_type_option::ChecklistCellChangeset;
use crate::services::field::{ use crate::services::field::{
@ -17,11 +17,21 @@ use crate::services::field::{
use crate::services::group::{GroupChangeset, GroupSettingChangeset}; use crate::services::group::{GroupChangeset, GroupSettingChangeset};
use crate::services::share::csv::CSVFormat; use crate::services::share::csv::CSVFormat;
fn upgrade_manager(
database_manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<Arc<DatabaseManager>> {
let manager = database_manager
.upgrade()
.ok_or(FlowyError::internal().context("The database manager is already dropped"))?;
Ok(manager)
}
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_database_data_handler( pub(crate) async fn get_database_data_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<DatabasePB, FlowyError> { ) -> DataResult<DatabasePB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let data = database_editor.get_database_data(view_id.as_ref()).await?; let data = database_editor.get_database_data(view_id.as_ref()).await?;
@ -31,8 +41,9 @@ pub(crate) async fn get_database_data_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn open_database_handler( pub(crate) async fn open_database_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_id = manager let database_id = manager
.get_database_id_with_view_id(view_id.as_ref()) .get_database_id_with_view_id(view_id.as_ref())
@ -44,8 +55,9 @@ pub(crate) async fn open_database_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_database_id_handler( pub(crate) async fn get_database_id_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<DatabaseIdPB, FlowyError> { ) -> DataResult<DatabaseIdPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_id = manager let database_id = manager
.get_database_id_with_view_id(view_id.as_ref()) .get_database_id_with_view_id(view_id.as_ref())
@ -56,8 +68,9 @@ pub(crate) async fn get_database_id_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_setting_handler( pub(crate) async fn get_database_setting_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<DatabaseViewSettingPB, FlowyError> { ) -> DataResult<DatabaseViewSettingPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let data = database_editor let data = database_editor
@ -69,8 +82,9 @@ pub(crate) async fn get_database_setting_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_database_setting_handler( pub(crate) async fn update_database_setting_handler(
data: AFPluginData<DatabaseSettingChangesetPB>, data: AFPluginData<DatabaseSettingChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?; let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database_with_view_id(&params.view_id).await?; let editor = manager.get_database_with_view_id(&params.view_id).await?;
@ -100,8 +114,9 @@ pub(crate) async fn update_database_setting_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_filters_handler( pub(crate) async fn get_all_filters_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedFilterPB, FlowyError> { ) -> DataResult<RepeatedFilterPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let filters = database_editor.get_all_filters(view_id.as_ref()).await; let filters = database_editor.get_all_filters(view_id.as_ref()).await;
@ -111,8 +126,9 @@ pub(crate) async fn get_all_filters_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_all_sorts_handler( pub(crate) async fn get_all_sorts_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedSortPB, FlowyError> { ) -> DataResult<RepeatedSortPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let sorts = database_editor.get_all_sorts(view_id.as_ref()).await; let sorts = database_editor.get_all_sorts(view_id.as_ref()).await;
@ -122,8 +138,9 @@ pub(crate) async fn get_all_sorts_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_all_sorts_handler( pub(crate) async fn delete_all_sorts_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
database_editor.delete_all_sorts(view_id.as_ref()).await; database_editor.delete_all_sorts(view_id.as_ref()).await;
@ -133,8 +150,9 @@ pub(crate) async fn delete_all_sorts_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_fields_handler( pub(crate) async fn get_fields_handler(
data: AFPluginData<GetFieldPayloadPB>, data: AFPluginData<GetFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedFieldPB, FlowyError> { ) -> DataResult<RepeatedFieldPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: GetFieldParams = data.into_inner().try_into()?; let params: GetFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let fields = database_editor let fields = database_editor
@ -149,8 +167,9 @@ pub(crate) async fn get_fields_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_primary_field_handler( pub(crate) async fn get_primary_field_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<FieldPB, FlowyError> { ) -> DataResult<FieldPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id = data.into_inner().value; let view_id = data.into_inner().value;
let database_editor = manager.get_database_with_view_id(&view_id).await?; let database_editor = manager.get_database_with_view_id(&view_id).await?;
let mut fields = database_editor let mut fields = database_editor
@ -177,8 +196,9 @@ pub(crate) async fn get_primary_field_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_handler( pub(crate) async fn update_field_handler(
data: AFPluginData<FieldChangesetPB>, data: AFPluginData<FieldChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: FieldChangesetParams = data.into_inner().try_into()?; let params: FieldChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor.update_field(params).await?; database_editor.update_field(params).await?;
@ -188,8 +208,9 @@ pub(crate) async fn update_field_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn update_field_type_option_handler( pub(crate) async fn update_field_type_option_handler(
data: AFPluginData<TypeOptionChangesetPB>, data: AFPluginData<TypeOptionChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: TypeOptionChangesetParams = data.into_inner().try_into()?; let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
if let Some(old_field) = database_editor.get_field(&params.field_id) { if let Some(old_field) = database_editor.get_field(&params.field_id) {
@ -211,8 +232,9 @@ pub(crate) async fn update_field_type_option_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn delete_field_handler( pub(crate) async fn delete_field_handler(
data: AFPluginData<DeleteFieldPayloadPB>, data: AFPluginData<DeleteFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor.delete_field(&params.field_id).await?; database_editor.delete_field(&params.field_id).await?;
@ -222,8 +244,9 @@ pub(crate) async fn delete_field_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn switch_to_field_handler( pub(crate) async fn switch_to_field_handler(
data: AFPluginData<UpdateFieldTypePayloadPB>, data: AFPluginData<UpdateFieldTypePayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: EditFieldParams = data.into_inner().try_into()?; let params: EditFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let old_field = database_editor.get_field(&params.field_id); let old_field = database_editor.get_field(&params.field_id);
@ -257,8 +280,9 @@ pub(crate) async fn switch_to_field_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn duplicate_field_handler( pub(crate) async fn duplicate_field_handler(
data: AFPluginData<DuplicateFieldPayloadPB>, data: AFPluginData<DuplicateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -271,8 +295,9 @@ pub(crate) async fn duplicate_field_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_field_type_option_data_handler( pub(crate) async fn get_field_type_option_data_handler(
data: AFPluginData<TypeOptionPathPB>, data: AFPluginData<TypeOptionPathPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> { ) -> DataResult<TypeOptionPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: TypeOptionPathParams = data.into_inner().try_into()?; let params: TypeOptionPathParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
if let Some((field, data)) = database_editor if let Some((field, data)) = database_editor
@ -294,8 +319,9 @@ pub(crate) async fn get_field_type_option_data_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn create_field_type_option_data_handler( pub(crate) async fn create_field_type_option_data_handler(
data: AFPluginData<CreateFieldPayloadPB>, data: AFPluginData<CreateFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<TypeOptionPB, FlowyError> { ) -> DataResult<TypeOptionPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CreateFieldParams = data.into_inner().try_into()?; let params: CreateFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let (field, data) = database_editor let (field, data) = database_editor
@ -313,8 +339,9 @@ pub(crate) async fn create_field_type_option_data_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn move_field_handler( pub(crate) async fn move_field_handler(
data: AFPluginData<MoveFieldPayloadPB>, data: AFPluginData<MoveFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: MoveFieldParams = data.into_inner().try_into()?; let params: MoveFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -331,8 +358,9 @@ pub(crate) async fn move_field_handler(
// #[tracing::instrument(level = "debug", skip(data, manager), err)] // #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_row_handler( pub(crate) async fn get_row_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<OptionalRowPB, FlowyError> { ) -> DataResult<OptionalRowPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let row = database_editor let row = database_editor
@ -343,8 +371,9 @@ pub(crate) async fn get_row_handler(
pub(crate) async fn get_row_meta_handler( pub(crate) async fn get_row_meta_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RowMetaPB, FlowyError> { ) -> DataResult<RowMetaPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
match database_editor.get_row_meta(&params.view_id, &params.row_id) { match database_editor.get_row_meta(&params.view_id, &params.row_id) {
@ -355,8 +384,9 @@ pub(crate) async fn get_row_meta_handler(
pub(crate) async fn update_row_meta_handler( pub(crate) async fn update_row_meta_handler(
data: AFPluginData<UpdateRowMetaChangesetPB>, data: AFPluginData<UpdateRowMetaChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?;
let params: UpdateRowMetaParams = data.into_inner().try_into()?; let params: UpdateRowMetaParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let row_id = RowId::from(params.id.clone()); let row_id = RowId::from(params.id.clone());
@ -367,8 +397,9 @@ pub(crate) async fn update_row_meta_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn delete_row_handler( pub(crate) async fn delete_row_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor.delete_row(&params.row_id).await; database_editor.delete_row(&params.row_id).await;
@ -378,8 +409,9 @@ pub(crate) async fn delete_row_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn duplicate_row_handler( pub(crate) async fn duplicate_row_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -391,8 +423,9 @@ pub(crate) async fn duplicate_row_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_row_handler( pub(crate) async fn move_row_handler(
data: AFPluginData<MoveRowPayloadPB>, data: AFPluginData<MoveRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: MoveRowParams = data.into_inner().try_into()?; let params: MoveRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -404,8 +437,9 @@ pub(crate) async fn move_row_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn create_row_handler( pub(crate) async fn create_row_handler(
data: AFPluginData<CreateRowPayloadPB>, data: AFPluginData<CreateRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RowMetaPB, FlowyError> { ) -> DataResult<RowMetaPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CreateRowParams = data.into_inner().try_into()?; let params: CreateRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let fields = database_editor.get_fields(&params.view_id, None); let fields = database_editor.get_fields(&params.view_id, None);
@ -433,8 +467,9 @@ pub(crate) async fn create_row_handler(
// #[tracing::instrument(level = "trace", skip_all, err)] // #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_cell_handler( pub(crate) async fn get_cell_handler(
data: AFPluginData<CellIdPB>, data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<CellPB, FlowyError> { ) -> DataResult<CellPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let cell = database_editor let cell = database_editor
@ -447,8 +482,9 @@ pub(crate) async fn get_cell_handler(
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn update_cell_handler( pub(crate) async fn update_cell_handler(
data: AFPluginData<CellChangesetPB>, data: AFPluginData<CellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CellChangesetPB = data.into_inner(); let params: CellChangesetPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -465,8 +501,9 @@ pub(crate) async fn update_cell_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn new_select_option_handler( pub(crate) async fn new_select_option_handler(
data: AFPluginData<CreateSelectOptionPayloadPB>, data: AFPluginData<CreateSelectOptionPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<SelectOptionPB, FlowyError> { ) -> DataResult<SelectOptionPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CreateSelectOptionParams = data.into_inner().try_into()?; let params: CreateSelectOptionParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let result = database_editor let result = database_editor
@ -483,8 +520,9 @@ pub(crate) async fn new_select_option_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn insert_or_update_select_option_handler( pub(crate) async fn insert_or_update_select_option_handler(
data: AFPluginData<RepeatedSelectOptionPayload>, data: AFPluginData<RepeatedSelectOptionPayload>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params = data.into_inner(); let params = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -501,8 +539,9 @@ pub(crate) async fn insert_or_update_select_option_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn delete_select_option_handler( pub(crate) async fn delete_select_option_handler(
data: AFPluginData<RepeatedSelectOptionPayload>, data: AFPluginData<RepeatedSelectOptionPayload>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params = data.into_inner(); let params = data.into_inner();
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -519,8 +558,9 @@ pub(crate) async fn delete_select_option_handler(
#[tracing::instrument(level = "trace", skip(data, manager), err)] #[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_select_option_handler( pub(crate) async fn get_select_option_handler(
data: AFPluginData<CellIdPB>, data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<SelectOptionCellDataPB, FlowyError> { ) -> DataResult<SelectOptionCellDataPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let options = database_editor let options = database_editor
@ -532,8 +572,9 @@ pub(crate) async fn get_select_option_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_select_option_cell_handler( pub(crate) async fn update_select_option_cell_handler(
data: AFPluginData<SelectOptionCellChangesetPB>, data: AFPluginData<SelectOptionCellChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?; let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let database_editor = manager let database_editor = manager
.get_database_with_view_id(&params.cell_identifier.view_id) .get_database_with_view_id(&params.cell_identifier.view_id)
@ -556,8 +597,9 @@ pub(crate) async fn update_select_option_cell_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_checklist_cell_data_handler( pub(crate) async fn get_checklist_cell_data_handler(
data: AFPluginData<CellIdPB>, data: AFPluginData<CellIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<ChecklistCellDataPB, FlowyError> { ) -> DataResult<ChecklistCellDataPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let data = database_editor let data = database_editor
@ -569,8 +611,9 @@ pub(crate) async fn get_checklist_cell_data_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_checklist_cell_handler( pub(crate) async fn update_checklist_cell_handler(
data: AFPluginData<ChecklistCellDataChangesetPB>, data: AFPluginData<ChecklistCellDataChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let params: ChecklistCellDataChangesetParams = data.into_inner().try_into()?; let params: ChecklistCellDataChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let changeset = ChecklistCellChangeset { let changeset = ChecklistCellChangeset {
@ -588,8 +631,9 @@ pub(crate) async fn update_checklist_cell_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_date_cell_handler( pub(crate) async fn update_date_cell_handler(
data: AFPluginData<DateChangesetPB>, data: AFPluginData<DateChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let data = data.into_inner(); let data = data.into_inner();
let cell_id: CellIdParams = data.cell_id.try_into()?; let cell_id: CellIdParams = data.cell_id.try_into()?;
let cell_changeset = DateCellChangeset { let cell_changeset = DateCellChangeset {
@ -612,8 +656,9 @@ pub(crate) async fn update_date_cell_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_groups_handler( pub(crate) async fn get_groups_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedGroupPB, FlowyError> { ) -> DataResult<RepeatedGroupPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: DatabaseViewIdPB = data.into_inner(); let params: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database_with_view_id(params.as_ref()).await?; let database_editor = manager.get_database_with_view_id(params.as_ref()).await?;
let groups = database_editor.load_groups(params.as_ref()).await?; let groups = database_editor.load_groups(params.as_ref()).await?;
@ -623,8 +668,9 @@ pub(crate) async fn get_groups_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn get_group_handler( pub(crate) async fn get_group_handler(
data: AFPluginData<DatabaseGroupIdPB>, data: AFPluginData<DatabaseGroupIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<GroupPB, FlowyError> { ) -> DataResult<GroupPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: DatabaseGroupIdParams = data.into_inner().try_into()?; let params: DatabaseGroupIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let group = database_editor let group = database_editor
@ -636,8 +682,9 @@ pub(crate) async fn get_group_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn set_group_by_field_handler( pub(crate) async fn set_group_by_field_handler(
data: AFPluginData<GroupByFieldPayloadPB>, data: AFPluginData<GroupByFieldPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?;
let params: GroupByFieldParams = data.into_inner().try_into()?; let params: GroupByFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -649,8 +696,9 @@ pub(crate) async fn set_group_by_field_handler(
#[tracing::instrument(level = "trace", skip_all, err)] #[tracing::instrument(level = "trace", skip_all, err)]
pub(crate) async fn update_group_handler( pub(crate) async fn update_group_handler(
data: AFPluginData<UpdateGroupPB>, data: AFPluginData<UpdateGroupPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?;
let params: UpdateGroupParams = data.into_inner().try_into()?; let params: UpdateGroupParams = data.into_inner().try_into()?;
let view_id = params.view_id.clone(); let view_id = params.view_id.clone();
let database_editor = manager.get_database_with_view_id(&view_id).await?; let database_editor = manager.get_database_with_view_id(&view_id).await?;
@ -666,8 +714,9 @@ pub(crate) async fn update_group_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_handler( pub(crate) async fn move_group_handler(
data: AFPluginData<MoveGroupPayloadPB>, data: AFPluginData<MoveGroupPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?;
let params: MoveGroupParams = data.into_inner().try_into()?; let params: MoveGroupParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -679,8 +728,9 @@ pub(crate) async fn move_group_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_group_row_handler( pub(crate) async fn move_group_row_handler(
data: AFPluginData<MoveGroupRowPayloadPB>, data: AFPluginData<MoveGroupRowPayloadPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?;
let params: MoveGroupRowParams = data.into_inner().try_into()?; let params: MoveGroupRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
@ -696,8 +746,9 @@ pub(crate) async fn move_group_row_handler(
#[tracing::instrument(level = "debug", skip(manager), err)] #[tracing::instrument(level = "debug", skip(manager), err)]
pub(crate) async fn get_databases_handler( pub(crate) async fn get_databases_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedDatabaseDescriptionPB, FlowyError> { ) -> DataResult<RepeatedDatabaseDescriptionPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let data = manager.get_all_databases_description().await; let data = manager.get_all_databases_description().await;
data_result_ok(data) data_result_ok(data)
} }
@ -705,8 +756,9 @@ pub(crate) async fn get_databases_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn set_layout_setting_handler( pub(crate) async fn set_layout_setting_handler(
data: AFPluginData<LayoutSettingChangesetPB>, data: AFPluginData<LayoutSettingChangesetPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?;
let params: LayoutSettingChangeset = data.into_inner().try_into()?; let params: LayoutSettingChangeset = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let layout_params = LayoutSettingParams { let layout_params = LayoutSettingParams {
@ -721,8 +773,9 @@ pub(crate) async fn set_layout_setting_handler(
pub(crate) async fn get_layout_setting_handler( pub(crate) async fn get_layout_setting_handler(
data: AFPluginData<DatabaseLayoutMetaPB>, data: AFPluginData<DatabaseLayoutMetaPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<DatabaseLayoutSettingPB, FlowyError> { ) -> DataResult<DatabaseLayoutSettingPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: DatabaseLayoutMeta = data.into_inner().try_into()?; let params: DatabaseLayoutMeta = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let layout_setting_pb = database_editor let layout_setting_pb = database_editor
@ -736,8 +789,9 @@ pub(crate) async fn get_layout_setting_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_calendar_events_handler( pub(crate) async fn get_calendar_events_handler(
data: AFPluginData<CalendarEventRequestPB>, data: AFPluginData<CalendarEventRequestPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedCalendarEventPB, FlowyError> { ) -> DataResult<RepeatedCalendarEventPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CalendarEventRequestParams = data.into_inner().try_into()?; let params: CalendarEventRequestParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let events = database_editor let events = database_editor
@ -749,8 +803,9 @@ pub(crate) async fn get_calendar_events_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_no_date_calendar_events_handler( pub(crate) async fn get_no_date_calendar_events_handler(
data: AFPluginData<CalendarEventRequestPB>, data: AFPluginData<CalendarEventRequestPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedNoDateCalendarEventPB, FlowyError> { ) -> DataResult<RepeatedNoDateCalendarEventPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: CalendarEventRequestParams = data.into_inner().try_into()?; let params: CalendarEventRequestParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let _events = database_editor let _events = database_editor
@ -762,8 +817,9 @@ pub(crate) async fn get_no_date_calendar_events_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn get_calendar_event_handler( pub(crate) async fn get_calendar_event_handler(
data: AFPluginData<RowIdPB>, data: AFPluginData<RowIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<CalendarEventPB, FlowyError> { ) -> DataResult<CalendarEventPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database_with_view_id(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let event = database_editor let event = database_editor
@ -778,8 +834,9 @@ pub(crate) async fn get_calendar_event_handler(
#[tracing::instrument(level = "debug", skip(data, manager), err)] #[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn move_calendar_event_handler( pub(crate) async fn move_calendar_event_handler(
data: AFPluginData<MoveCalendarEventPB>, data: AFPluginData<MoveCalendarEventPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_manager(manager)?;
let data = data.into_inner(); let data = data.into_inner();
let cell_id: CellIdParams = data.cell_path.try_into()?; let cell_id: CellIdParams = data.cell_path.try_into()?;
let cell_changeset = DateCellChangeset { let cell_changeset = DateCellChangeset {
@ -801,7 +858,7 @@ pub(crate) async fn move_calendar_event_handler(
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn create_database_view( pub(crate) async fn create_database_view(
_data: AFPluginData<CreateDatabaseViewPayloadPB>, _data: AFPluginData<CreateDatabaseViewPayloadPB>,
_manager: AFPluginState<Arc<DatabaseManager2>>, _manager: AFPluginState<Weak<DatabaseManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
// let data: CreateDatabaseViewParams = data.into_inner().try_into()?; // let data: CreateDatabaseViewParams = data.into_inner().try_into()?;
Ok(()) Ok(())
@ -810,8 +867,9 @@ pub(crate) async fn create_database_view(
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn export_csv_handler( pub(crate) async fn export_csv_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<DatabaseExportDataPB, FlowyError> { ) -> DataResult<DatabaseExportDataPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id = data.into_inner().value; let view_id = data.into_inner().value;
let database = manager.get_database_with_view_id(&view_id).await?; let database = manager.get_database_with_view_id(&view_id).await?;
let data = database.export_csv(CSVFormat::Original).await?; let data = database.export_csv(CSVFormat::Original).await?;
@ -824,8 +882,9 @@ pub(crate) async fn export_csv_handler(
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_snapshots_handler( pub(crate) async fn get_snapshots_handler(
data: AFPluginData<DatabaseViewIdPB>, data: AFPluginData<DatabaseViewIdPB>,
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Weak<DatabaseManager>>,
) -> DataResult<RepeatedDatabaseSnapshotPB, FlowyError> { ) -> DataResult<RepeatedDatabaseSnapshotPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id = data.into_inner().value; let view_id = data.into_inner().value;
let snapshots = manager.get_database_snapshots(&view_id).await?; let snapshots = manager.get_database_snapshots(&view_id).await?;
data_result_ok(RepeatedDatabaseSnapshotPB { items: snapshots }) data_result_ok(RepeatedDatabaseSnapshotPB { items: snapshots })

View File

@ -1,4 +1,4 @@
use std::sync::Arc; use std::sync::Weak;
use strum_macros::Display; use strum_macros::Display;
@ -6,9 +6,9 @@ use flowy_derive::{Flowy_Event, ProtoBuf_Enum};
use lib_dispatch::prelude::*; use lib_dispatch::prelude::*;
use crate::event_handler::*; use crate::event_handler::*;
use crate::manager::DatabaseManager2; use crate::manager::DatabaseManager;
pub fn init(database_manager: Arc<DatabaseManager2>) -> AFPlugin { pub fn init(database_manager: Weak<DatabaseManager>) -> AFPlugin {
let plugin = AFPlugin::new() let plugin = AFPlugin::new()
.name(env!("CARGO_PKG_NAME")) .name(env!("CARGO_PKG_NAME"))
.state(database_manager); .state(database_manager);

View File

@ -1,11 +1,10 @@
pub use manager::*; pub use manager::*;
pub mod deps;
pub mod entities; pub mod entities;
mod event_handler; mod event_handler;
pub mod event_map; pub mod event_map;
mod manager; mod manager;
mod notification; pub mod notification;
mod protobuf; mod protobuf;
pub mod services; pub mod services;
pub mod template; pub mod template;

View File

@ -1,23 +1,23 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::{Arc, Weak};
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder; use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::{CollabPersistenceConfig, RocksCollabDB}; use appflowy_integrate::{CollabPersistenceConfig, CollabType, RocksCollabDB};
use collab::core::collab::{CollabRawData, MutexCollab}; use collab::core::collab::{CollabRawData, MutexCollab};
use collab_database::blocks::BlockEvent; use collab_database::blocks::BlockEvent;
use collab_database::database::{DatabaseData, YrsDocAction}; use collab_database::database::{DatabaseData, YrsDocAction};
use collab_database::error::DatabaseError; use collab_database::error::DatabaseError;
use collab_database::user::{ use collab_database::user::{
make_workspace_database_id, CollabFuture, CollabObjectUpdate, CollabObjectUpdateByOid, CollabFuture, CollabObjectUpdate, CollabObjectUpdateByOid, DatabaseCollabService,
DatabaseCollabService, WorkspaceDatabase, WorkspaceDatabase,
}; };
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout}; use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use flowy_database_deps::cloud::DatabaseCloudService;
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_task::TaskDispatcher; use flowy_task::TaskDispatcher;
use crate::deps::{DatabaseCloudService, DatabaseUser2};
use crate::entities::{ use crate::entities::{
DatabaseDescriptionPB, DatabaseLayoutPB, DatabaseSnapshotPB, DidFetchRowPB, DatabaseDescriptionPB, DatabaseLayoutPB, DatabaseSnapshotPB, DidFetchRowPB,
RepeatedDatabaseDescriptionPB, RepeatedDatabaseDescriptionPB,
@ -27,8 +27,14 @@ use crate::services::database::DatabaseEditor;
use crate::services::database_view::DatabaseLayoutDepsResolver; use crate::services::database_view::DatabaseLayoutDepsResolver;
use crate::services::share::csv::{CSVFormat, CSVImporter, ImportResult}; use crate::services::share::csv::{CSVFormat, CSVImporter, ImportResult};
pub struct DatabaseManager2 { pub trait DatabaseUser: Send + Sync {
user: Arc<dyn DatabaseUser2>, fn user_id(&self) -> Result<i64, FlowyError>;
fn token(&self) -> Result<Option<String>, FlowyError>;
fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError>;
}
pub struct DatabaseManager {
user: Arc<dyn DatabaseUser>,
workspace_database: Arc<RwLock<Option<Arc<WorkspaceDatabase>>>>, workspace_database: Arc<RwLock<Option<Arc<WorkspaceDatabase>>>>,
task_scheduler: Arc<RwLock<TaskDispatcher>>, task_scheduler: Arc<RwLock<TaskDispatcher>>,
editors: RwLock<HashMap<String, Arc<DatabaseEditor>>>, editors: RwLock<HashMap<String, Arc<DatabaseEditor>>>,
@ -36,9 +42,9 @@ pub struct DatabaseManager2 {
cloud_service: Arc<dyn DatabaseCloudService>, cloud_service: Arc<dyn DatabaseCloudService>,
} }
impl DatabaseManager2 { impl DatabaseManager {
pub fn new( pub fn new(
database_user: Arc<dyn DatabaseUser2>, database_user: Arc<dyn DatabaseUser>,
task_scheduler: Arc<RwLock<TaskDispatcher>>, task_scheduler: Arc<RwLock<TaskDispatcher>>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
cloud_service: Arc<dyn DatabaseCloudService>, cloud_service: Arc<dyn DatabaseCloudService>,
@ -53,14 +59,23 @@ impl DatabaseManager2 {
} }
} }
fn is_collab_exist(&self, uid: i64, collab_db: &Arc<RocksCollabDB>, object_id: &str) -> bool { fn is_collab_exist(&self, uid: i64, collab_db: &Weak<RocksCollabDB>, object_id: &str) -> bool {
let read_txn = collab_db.read_txn(); match collab_db.upgrade() {
read_txn.is_exist(uid, object_id) None => false,
Some(collab_db) => {
let read_txn = collab_db.read_txn();
read_txn.is_exist(uid, object_id)
},
}
} }
pub async fn initialize(&self, uid: i64) -> FlowyResult<()> { pub async fn initialize(
&self,
uid: i64,
_workspace_id: String,
workspace_database_id: String,
) -> FlowyResult<()> {
let collab_db = self.user.collab_db(uid)?; let collab_db = self.user.collab_db(uid)?;
let workspace_database_id = make_workspace_database_id(uid);
let collab_builder = UserDatabaseCollabServiceImpl { let collab_builder = UserDatabaseCollabServiceImpl {
collab_builder: self.collab_builder.clone(), collab_builder: self.collab_builder.clone(),
cloud_service: self.cloud_service.clone(), cloud_service: self.cloud_service.clone(),
@ -73,7 +88,7 @@ impl DatabaseManager2 {
tracing::trace!("workspace database not exist, try to fetch from remote"); tracing::trace!("workspace database not exist, try to fetch from remote");
match self match self
.cloud_service .cloud_service
.get_collab_update(&workspace_database_id) .get_collab_update(&workspace_database_id, CollabType::WorkspaceDatabase)
.await .await
{ {
Ok(updates) => collab_raw_data = updates, Ok(updates) => collab_raw_data = updates,
@ -91,7 +106,7 @@ impl DatabaseManager2 {
let collab = collab_builder.build_collab_with_config( let collab = collab_builder.build_collab_with_config(
uid, uid,
&workspace_database_id, &workspace_database_id,
"databases", CollabType::WorkspaceDatabase,
collab_db.clone(), collab_db.clone(),
collab_raw_data, collab_raw_data,
&config, &config,
@ -100,11 +115,21 @@ impl DatabaseManager2 {
WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder); WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder);
subscribe_block_event(&workspace_database); subscribe_block_event(&workspace_database);
*self.workspace_database.write().await = Some(Arc::new(workspace_database)); *self.workspace_database.write().await = Some(Arc::new(workspace_database));
// Remove all existing editors
self.editors.write().await.clear();
Ok(()) Ok(())
} }
pub async fn initialize_with_new_user(&self, user_id: i64, _token: &str) -> FlowyResult<()> { pub async fn initialize_with_new_user(
self.initialize(user_id).await?; &self,
user_id: i64,
workspace_id: String,
database_storage_id: String,
) -> FlowyResult<()> {
self
.initialize(user_id, workspace_id, database_storage_id)
.await?;
Ok(()) Ok(())
} }
@ -346,6 +371,7 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
fn get_collab_update( fn get_collab_update(
&self, &self,
object_id: &str, object_id: &str,
object_ty: CollabType,
) -> CollabFuture<Result<CollabObjectUpdate, DatabaseError>> { ) -> CollabFuture<Result<CollabObjectUpdate, DatabaseError>> {
let object_id = object_id.to_string(); let object_id = object_id.to_string();
let weak_cloud_service = Arc::downgrade(&self.cloud_service); let weak_cloud_service = Arc::downgrade(&self.cloud_service);
@ -357,9 +383,8 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
}, },
Some(cloud_service) => { Some(cloud_service) => {
let updates = cloud_service let updates = cloud_service
.get_collab_update(&object_id) .get_collab_update(&object_id, object_ty)
.await .await?;
.map_err(|e| DatabaseError::Internal(Box::new(e)))?;
Ok(updates) Ok(updates)
}, },
} }
@ -369,6 +394,7 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
fn batch_get_collab_update( fn batch_get_collab_update(
&self, &self,
object_ids: Vec<String>, object_ids: Vec<String>,
object_ty: CollabType,
) -> CollabFuture<Result<CollabObjectUpdateByOid, DatabaseError>> { ) -> CollabFuture<Result<CollabObjectUpdateByOid, DatabaseError>> {
let weak_cloud_service = Arc::downgrade(&self.cloud_service); let weak_cloud_service = Arc::downgrade(&self.cloud_service);
Box::pin(async move { Box::pin(async move {
@ -379,9 +405,8 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
}, },
Some(cloud_service) => { Some(cloud_service) => {
let updates = cloud_service let updates = cloud_service
.batch_get_collab_updates(object_ids) .batch_get_collab_updates(object_ids, object_ty)
.await .await?;
.map_err(|e| DatabaseError::Internal(Box::new(e)))?;
Ok(updates) Ok(updates)
}, },
} }
@ -392,8 +417,8 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
&self, &self,
uid: i64, uid: i64,
object_id: &str, object_id: &str,
object_name: &str, object_type: CollabType,
collab_db: Arc<RocksCollabDB>, collab_db: Weak<RocksCollabDB>,
collab_raw_data: CollabRawData, collab_raw_data: CollabRawData,
config: &CollabPersistenceConfig, config: &CollabPersistenceConfig,
) -> Arc<MutexCollab> { ) -> Arc<MutexCollab> {
@ -402,7 +427,7 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
.build_with_config( .build_with_config(
uid, uid,
object_id, object_id,
object_name, object_type,
collab_db, collab_db,
collab_raw_data, collab_raw_data,
config, config,

View File

@ -74,7 +74,11 @@ impl DatabaseEditor {
tokio::spawn(async move { tokio::spawn(async move {
while let Some(snapshot_state) = snapshot_state.next().await { while let Some(snapshot_state) = snapshot_state.next().await {
if let Some(new_snapshot_id) = snapshot_state.snapshot_id() { if let Some(new_snapshot_id) = snapshot_state.snapshot_id() {
tracing::debug!("Did create database remote snapshot: {}", new_snapshot_id); tracing::debug!(
"Did create {} database remote snapshot: {}",
database_id,
new_snapshot_id
);
send_notification( send_notification(
&database_id, &database_id,
DatabaseNotification::DidUpdateDatabaseSnapshotState, DatabaseNotification::DidUpdateDatabaseSnapshotState,

View File

@ -0,0 +1,12 @@
[package]
name = "flowy-document-deps"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-error = { path = "../flowy-error" }
collab-document = { version = "0.1.0" }
anyhow = "1.0.71"

View File

@ -0,0 +1,25 @@
use anyhow::Error;
pub use collab_document::blocks::DocumentData;
use lib_infra::future::FutureResult;
/// A trait for document cloud service.
/// Each kind of server should implement this trait. Check out the [AppFlowyServerProvider] of
/// [flowy-server] crate for more information.
pub trait DocumentCloudService: Send + Sync + 'static {
fn get_document_updates(&self, document_id: &str) -> FutureResult<Vec<Vec<u8>>, Error>;
fn get_document_latest_snapshot(
&self,
document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, Error>;
fn get_document_data(&self, document_id: &str) -> FutureResult<Option<DocumentData>, Error>;
}
pub struct DocumentSnapshot {
pub snapshot_id: i64,
pub document_id: String,
pub data: Vec<u8>,
pub created_at: i64,
}

View File

@ -0,0 +1 @@
pub mod cloud;

View File

@ -9,10 +9,11 @@ edition = "2021"
collab = { version = "0.1.0" } collab = { version = "0.1.0" }
collab-document = { version = "0.1.0" } collab-document = { version = "0.1.0" }
appflowy-integrate = {version = "0.1.0" } appflowy-integrate = {version = "0.1.0" }
flowy-document-deps = { path = "../flowy-document-deps" }
flowy-derive = { path = "../../../shared-lib/flowy-derive" } flowy-derive = { path = "../../../shared-lib/flowy-derive" }
flowy-notification = { path = "../flowy-notification" } flowy-notification = { path = "../flowy-notification" }
flowy-error = { path = "../flowy-error", features = ["adaptor_serde", "adaptor_database", "adaptor_dispatch", "collab"] } flowy-error = { path = "../flowy-error", features = ["impl_from_serde", "impl_from_sqlite", "impl_from_dispatch_error", "impl_from_collab"] }
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
lib-infra = { path = "../../../shared-lib/lib-infra" } lib-infra = { path = "../../../shared-lib/lib-infra" }
@ -20,7 +21,6 @@ protobuf = {version = "2.28.0"}
bytes = { version = "1.4" } bytes = { version = "1.4" }
nanoid = "0.4.0" nanoid = "0.4.0"
parking_lot = "0.12.1" parking_lot = "0.12.1"
strum = "0.21"
strum_macros = "0.21" strum_macros = "0.21"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = {version = "1.0"} serde_json = {version = "1.0"}

View File

@ -1,34 +1 @@
use std::sync::Arc;
use appflowy_integrate::RocksCollabDB;
pub use collab_document::blocks::DocumentData; pub use collab_document::blocks::DocumentData;
use flowy_error::FlowyError;
use lib_infra::future::FutureResult;
pub trait DocumentUser: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
fn token(&self) -> Result<Option<String>, FlowyError>; // unused now.
fn collab_db(&self, uid: i64) -> Result<Arc<RocksCollabDB>, FlowyError>;
}
/// A trait for document cloud service.
/// Each kind of server should implement this trait. Check out the [AppFlowyServerProvider] of
/// [flowy-server] crate for more information.
pub trait DocumentCloudService: Send + Sync + 'static {
fn get_document_updates(&self, document_id: &str) -> FutureResult<Vec<Vec<u8>>, FlowyError>;
fn get_document_latest_snapshot(
&self,
document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, FlowyError>;
fn get_document_data(&self, document_id: &str) -> FutureResult<Option<DocumentData>, FlowyError>;
}
pub struct DocumentSnapshot {
pub snapshot_id: i64,
pub document_id: String,
pub data: Vec<u8>,
pub created_at: i64,
}

View File

@ -4,7 +4,7 @@
* which you can think of as a higher-level interface to interact with documents. * which you can think of as a higher-level interface to interact with documents.
*/ */
use std::sync::Arc; use std::sync::{Arc, Weak};
use collab_document::blocks::{ use collab_document::blocks::{
json_str_to_hashmap, Block, BlockAction, BlockActionPayload, BlockActionType, BlockEvent, json_str_to_hashmap, Block, BlockAction, BlockActionPayload, BlockActionType, BlockEvent,
@ -17,11 +17,21 @@ use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataRes
use crate::entities::*; use crate::entities::*;
use crate::{manager::DocumentManager, parser::json::parser::JsonToDocumentParser}; use crate::{manager::DocumentManager, parser::json::parser::JsonToDocumentParser};
fn upgrade_document(
document_manager: AFPluginState<Weak<DocumentManager>>,
) -> FlowyResult<Arc<DocumentManager>> {
let manager = document_manager
.upgrade()
.ok_or(FlowyError::internal().context("The document manager is already dropped"))?;
Ok(manager)
}
// Handler for creating a new document // Handler for creating a new document
pub(crate) async fn create_document_handler( pub(crate) async fn create_document_handler(
data: AFPluginData<CreateDocumentPayloadPB>, data: AFPluginData<CreateDocumentPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_document(manager)?;
let params: CreateDocumentParams = data.into_inner().try_into()?; let params: CreateDocumentParams = data.into_inner().try_into()?;
manager.create_document(&params.document_id, params.initial_data)?; manager.create_document(&params.document_id, params.initial_data)?;
Ok(()) Ok(())
@ -30,8 +40,9 @@ pub(crate) async fn create_document_handler(
// Handler for opening an existing document // Handler for opening an existing document
pub(crate) async fn open_document_handler( pub(crate) async fn open_document_handler(
data: AFPluginData<OpenDocumentPayloadPB>, data: AFPluginData<OpenDocumentPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> DataResult<DocumentDataPB, FlowyError> { ) -> DataResult<DocumentDataPB, FlowyError> {
let manager = upgrade_document(manager)?;
let params: OpenDocumentParams = data.into_inner().try_into()?; let params: OpenDocumentParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
let document = manager.get_document(&doc_id).await?; let document = manager.get_document(&doc_id).await?;
@ -41,8 +52,9 @@ pub(crate) async fn open_document_handler(
pub(crate) async fn close_document_handler( pub(crate) async fn close_document_handler(
data: AFPluginData<CloseDocumentPayloadPB>, data: AFPluginData<CloseDocumentPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_document(manager)?;
let params: CloseDocumentParams = data.into_inner().try_into()?; let params: CloseDocumentParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
manager.close_document(&doc_id)?; manager.close_document(&doc_id)?;
@ -53,8 +65,9 @@ pub(crate) async fn close_document_handler(
// if the document does not exist, return an error. // if the document does not exist, return an error.
pub(crate) async fn get_document_data_handler( pub(crate) async fn get_document_data_handler(
data: AFPluginData<OpenDocumentPayloadPB>, data: AFPluginData<OpenDocumentPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> DataResult<DocumentDataPB, FlowyError> { ) -> DataResult<DocumentDataPB, FlowyError> {
let manager = upgrade_document(manager)?;
let params: OpenDocumentParams = data.into_inner().try_into()?; let params: OpenDocumentParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
let document_data = manager.get_document_data(&doc_id).await?; let document_data = manager.get_document_data(&doc_id).await?;
@ -64,8 +77,9 @@ pub(crate) async fn get_document_data_handler(
// Handler for applying an action to a document // Handler for applying an action to a document
pub(crate) async fn apply_action_handler( pub(crate) async fn apply_action_handler(
data: AFPluginData<ApplyActionPayloadPB>, data: AFPluginData<ApplyActionPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let manager = upgrade_document(manager)?;
let params: ApplyActionParams = data.into_inner().try_into()?; let params: ApplyActionParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
let document = manager.get_document(&doc_id).await?; let document = manager.get_document(&doc_id).await?;
@ -76,7 +90,6 @@ pub(crate) async fn apply_action_handler(
pub(crate) async fn convert_data_to_document( pub(crate) async fn convert_data_to_document(
data: AFPluginData<ConvertDataPayloadPB>, data: AFPluginData<ConvertDataPayloadPB>,
_manager: AFPluginState<Arc<DocumentManager>>,
) -> DataResult<DocumentDataPB, FlowyError> { ) -> DataResult<DocumentDataPB, FlowyError> {
let payload = data.into_inner(); let payload = data.into_inner();
let document = convert_data_to_document_internal(payload)?; let document = convert_data_to_document_internal(payload)?;
@ -100,8 +113,9 @@ pub fn convert_data_to_document_internal(
pub(crate) async fn redo_handler( pub(crate) async fn redo_handler(
data: AFPluginData<DocumentRedoUndoPayloadPB>, data: AFPluginData<DocumentRedoUndoPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> DataResult<DocumentRedoUndoResponsePB, FlowyError> { ) -> DataResult<DocumentRedoUndoResponsePB, FlowyError> {
let manager = upgrade_document(manager)?;
let params: DocumentRedoUndoParams = data.into_inner().try_into()?; let params: DocumentRedoUndoParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
let document = manager.get_document(&doc_id).await?; let document = manager.get_document(&doc_id).await?;
@ -118,8 +132,9 @@ pub(crate) async fn redo_handler(
pub(crate) async fn undo_handler( pub(crate) async fn undo_handler(
data: AFPluginData<DocumentRedoUndoPayloadPB>, data: AFPluginData<DocumentRedoUndoPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> DataResult<DocumentRedoUndoResponsePB, FlowyError> { ) -> DataResult<DocumentRedoUndoResponsePB, FlowyError> {
let manager = upgrade_document(manager)?;
let params: DocumentRedoUndoParams = data.into_inner().try_into()?; let params: DocumentRedoUndoParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
let document = manager.get_document(&doc_id).await?; let document = manager.get_document(&doc_id).await?;
@ -136,8 +151,9 @@ pub(crate) async fn undo_handler(
pub(crate) async fn can_undo_redo_handler( pub(crate) async fn can_undo_redo_handler(
data: AFPluginData<DocumentRedoUndoPayloadPB>, data: AFPluginData<DocumentRedoUndoPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> DataResult<DocumentRedoUndoResponsePB, FlowyError> { ) -> DataResult<DocumentRedoUndoResponsePB, FlowyError> {
let manager = upgrade_document(manager)?;
let params: DocumentRedoUndoParams = data.into_inner().try_into()?; let params: DocumentRedoUndoParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
let document = manager.get_document(&doc_id).await?; let document = manager.get_document(&doc_id).await?;
@ -154,8 +170,9 @@ pub(crate) async fn can_undo_redo_handler(
pub(crate) async fn get_snapshot_handler( pub(crate) async fn get_snapshot_handler(
data: AFPluginData<OpenDocumentPayloadPB>, data: AFPluginData<OpenDocumentPayloadPB>,
manager: AFPluginState<Arc<DocumentManager>>, manager: AFPluginState<Weak<DocumentManager>>,
) -> DataResult<RepeatedDocumentSnapshotPB, FlowyError> { ) -> DataResult<RepeatedDocumentSnapshotPB, FlowyError> {
let manager = upgrade_document(manager)?;
let params: OpenDocumentParams = data.into_inner().try_into()?; let params: OpenDocumentParams = data.into_inner().try_into()?;
let doc_id = params.document_id; let doc_id = params.document_id;
let snapshots = manager.get_document_snapshots(&doc_id).await?; let snapshots = manager.get_document_snapshots(&doc_id).await?;

View File

@ -1,4 +1,5 @@
use std::sync::Arc; use std::sync::Weak;
use strum_macros::Display; use strum_macros::Display;
use flowy_derive::{Flowy_Event, ProtoBuf_Enum}; use flowy_derive::{Flowy_Event, ProtoBuf_Enum};
@ -7,7 +8,7 @@ use lib_dispatch::prelude::AFPlugin;
use crate::event_handler::get_snapshot_handler; use crate::event_handler::get_snapshot_handler;
use crate::{event_handler::*, manager::DocumentManager}; use crate::{event_handler::*, manager::DocumentManager};
pub fn init(document_manager: Arc<DocumentManager>) -> AFPlugin { pub fn init(document_manager: Weak<DocumentManager>) -> AFPlugin {
AFPlugin::new() AFPlugin::new()
.name(env!("CARGO_PKG_NAME")) .name(env!("CARGO_PKG_NAME"))
.state(document_manager) .state(document_manager)

View File

@ -8,5 +8,5 @@ pub mod parser;
pub mod protobuf; pub mod protobuf;
pub mod deps; pub mod deps;
mod notification; pub mod notification;
mod parse; mod parse;

View File

@ -1,18 +1,26 @@
use std::sync::Weak;
use std::{collections::HashMap, sync::Arc}; use std::{collections::HashMap, sync::Arc};
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder; use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::{CollabType, RocksCollabDB};
use collab::core::collab::MutexCollab; use collab::core::collab::MutexCollab;
use collab_document::blocks::DocumentData; use collab_document::blocks::DocumentData;
use collab_document::document::Document; use collab_document::document::Document;
use collab_document::YrsDocAction; use collab_document::YrsDocAction;
use parking_lot::RwLock; use parking_lot::RwLock;
use flowy_document_deps::cloud::DocumentCloudService;
use flowy_error::{internal_error, FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use crate::deps::{DocumentCloudService, DocumentUser};
use crate::entities::DocumentSnapshotPB; use crate::entities::DocumentSnapshotPB;
use crate::{document::MutexDocument, document_data::default_document_data}; use crate::{document::MutexDocument, document_data::default_document_data};
pub trait DocumentUser: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
fn token(&self) -> Result<Option<String>, FlowyError>; // unused now.
fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError>;
}
pub struct DocumentManager { pub struct DocumentManager {
user: Arc<dyn DocumentUser>, user: Arc<dyn DocumentUser>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
@ -35,6 +43,15 @@ impl DocumentManager {
} }
} }
pub async fn initialize(&self, _uid: i64, _workspace_id: String) -> FlowyResult<()> {
self.documents.write().clear();
Ok(())
}
pub async fn initialize_with_new_user(&self, uid: i64, workspace_id: String) -> FlowyResult<()> {
self.initialize(uid, workspace_id).await?;
Ok(())
}
/// Create a new document. /// Create a new document.
/// ///
/// if the document already exists, return the existing document. /// if the document already exists, return the existing document.
@ -73,7 +90,7 @@ impl DocumentManager {
let db = self.user.collab_db(uid)?; let db = self.user.collab_db(uid)?;
let collab = self let collab = self
.collab_builder .collab_builder
.build(uid, doc_id, "document", updates, db)?; .build(uid, doc_id, CollabType::Document, updates, db)?;
let document = Arc::new(MutexDocument::open(doc_id, collab)?); let document = Arc::new(MutexDocument::open(doc_id, collab)?);
// save the document to the memory and read it from the memory if we open the same document again. // save the document to the memory and read it from the memory if we open the same document again.
@ -110,12 +127,13 @@ impl DocumentManager {
pub fn delete_document(&self, doc_id: &str) -> FlowyResult<()> { pub fn delete_document(&self, doc_id: &str) -> FlowyResult<()> {
let uid = self.user.user_id()?; let uid = self.user.user_id()?;
let db = self.user.collab_db(uid)?; if let Some(db) = self.user.collab_db(uid)?.upgrade() {
let _ = db.with_write_txn(|txn| { let _ = db.with_write_txn(|txn| {
txn.delete_doc(uid, &doc_id)?; txn.delete_doc(uid, &doc_id)?;
Ok(()) Ok(())
}); });
self.documents.write().remove(doc_id); self.documents.write().remove(doc_id);
}
Ok(()) Ok(())
} }
@ -151,15 +169,18 @@ impl DocumentManager {
let db = self.user.collab_db(uid)?; let db = self.user.collab_db(uid)?;
let collab = self let collab = self
.collab_builder .collab_builder
.build(uid, doc_id, "document", updates, db)?; .build(uid, doc_id, CollabType::Document, updates, db)?;
Ok(collab) Ok(collab)
} }
fn is_doc_exist(&self, doc_id: &str) -> FlowyResult<bool> { fn is_doc_exist(&self, doc_id: &str) -> FlowyResult<bool> {
let uid = self.user.user_id()?; let uid = self.user.user_id()?;
let db = self.user.collab_db(uid)?; if let Some(collab_db) = self.user.collab_db(uid)?.upgrade() {
let read_txn = db.read_txn(); let read_txn = collab_db.read_txn();
Ok(read_txn.is_exist(uid, doc_id)) Ok(read_txn.is_exist(uid, doc_id))
} else {
Ok(false)
}
} }
/// Only expose this method for testing /// Only expose this method for testing

View File

@ -4,7 +4,7 @@ use flowy_notification::NotificationBuilder;
const DOCUMENT_OBSERVABLE_SOURCE: &str = "Document"; const DOCUMENT_OBSERVABLE_SOURCE: &str = "Document";
#[derive(ProtoBuf_Enum, Debug, Default)] #[derive(ProtoBuf_Enum, Debug, Default)]
pub(crate) enum DocumentNotification { pub enum DocumentNotification {
#[default] #[default]
Unknown = 0, Unknown = 0,

View File

@ -1,3 +1,4 @@
use anyhow::Error;
use std::ops::Deref; use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
@ -9,11 +10,11 @@ use parking_lot::Once;
use tempfile::TempDir; use tempfile::TempDir;
use tracing_subscriber::{fmt::Subscriber, util::SubscriberInitExt, EnvFilter}; use tracing_subscriber::{fmt::Subscriber, util::SubscriberInitExt, EnvFilter};
use flowy_document2::deps::{DocumentCloudService, DocumentSnapshot, DocumentUser};
use flowy_document2::document::MutexDocument; use flowy_document2::document::MutexDocument;
use flowy_document2::document_data::default_document_data; use flowy_document2::document_data::default_document_data;
use flowy_document2::manager::DocumentManager; use flowy_document2::manager::{DocumentManager, DocumentUser};
use flowy_error::FlowyError; use flowy_document_deps::cloud::*;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
pub struct DocumentTest { pub struct DocumentTest {
@ -38,12 +39,12 @@ impl Deref for DocumentTest {
} }
pub struct FakeUser { pub struct FakeUser {
kv: Arc<RocksCollabDB>, collab_db: Arc<RocksCollabDB>,
} }
impl FakeUser { impl FakeUser {
pub fn new() -> Self { pub fn new() -> Self {
Self { kv: db() } Self { collab_db: db() }
} }
} }
@ -56,8 +57,11 @@ impl DocumentUser for FakeUser {
Ok(None) Ok(None)
} }
fn collab_db(&self, _uid: i64) -> Result<std::sync::Arc<RocksCollabDB>, flowy_error::FlowyError> { fn collab_db(
Ok(self.kv.clone()) &self,
_uid: i64,
) -> Result<std::sync::Weak<RocksCollabDB>, flowy_error::FlowyError> {
Ok(Arc::downgrade(&self.collab_db))
} }
} }
@ -106,21 +110,18 @@ pub fn gen_id() -> String {
pub struct LocalTestDocumentCloudServiceImpl(); pub struct LocalTestDocumentCloudServiceImpl();
impl DocumentCloudService for LocalTestDocumentCloudServiceImpl { impl DocumentCloudService for LocalTestDocumentCloudServiceImpl {
fn get_document_updates(&self, _document_id: &str) -> FutureResult<Vec<Vec<u8>>, FlowyError> { fn get_document_updates(&self, _document_id: &str) -> FutureResult<Vec<Vec<u8>>, Error> {
FutureResult::new(async move { Ok(vec![]) }) FutureResult::new(async move { Ok(vec![]) })
} }
fn get_document_latest_snapshot( fn get_document_latest_snapshot(
&self, &self,
_document_id: &str, _document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, FlowyError> { ) -> FutureResult<Option<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(None) }) FutureResult::new(async move { Ok(None) })
} }
fn get_document_data( fn get_document_data(&self, _document_id: &str) -> FutureResult<Option<DocumentData>, Error> {
&self,
_document_id: &str,
) -> FutureResult<Option<DocumentData>, FlowyError> {
FutureResult::new(async move { Ok(None) }) FutureResult::new(async move { Ok(None) })
} }
} }

View File

@ -22,16 +22,20 @@ flowy-sqlite = { path = "../flowy-sqlite", optional = true}
r2d2 = { version = "0.8", optional = true} r2d2 = { version = "0.8", optional = true}
collab-database = { version = "0.1.0", optional = true } collab-database = { version = "0.1.0", optional = true }
collab-document = { version = "0.1.0", optional = true } collab-document = { version = "0.1.0", optional = true }
tokio-postgres = { version = "0.7.8", optional = true }
tokio = { version = "1.0", optional = true }
[features] [features]
adaptor_dispatch = ["lib-dispatch"] impl_from_dispatch_error = ["lib-dispatch"]
adaptor_serde = ["serde_json"] impl_from_serde = ["serde_json"]
adaptor_reqwest = ["reqwest"] impl_from_reqwest = ["reqwest"]
adaptor_database = ["flowy-sqlite", "r2d2"] impl_from_sqlite = ["flowy-sqlite", "r2d2"]
adaptor_server_error = ["http-error-code"] impl_from_appflowy_cloud = ["http-error-code"]
impl_from_collab = ["collab-database", "collab-document", "impl_from_reqwest"]
impl_from_postgres = ["tokio-postgres"]
impl_from_tokio= ["tokio"]
dart = ["flowy-codegen/dart"] dart = ["flowy-codegen/dart"]
ts = ["flowy-codegen/ts"] ts = ["flowy-codegen/ts"]
collab = ["collab-database", "collab-document"]
[build-dependencies] [build-dependencies]
flowy-codegen = { path = "../../../shared-lib/flowy-codegen", features = ["proto_gen"]} flowy-codegen = { path = "../../../shared-lib/flowy-codegen", features = ["proto_gen"]}

View File

@ -57,9 +57,6 @@ pub enum ErrorCode {
#[error("View name too long")] #[error("View name too long")]
ViewNameTooLong = 17, ViewNameTooLong = 17,
#[error("Http server connection error")]
HttpServerConnectError = 18,
#[error("Email can not be empty or whitespace")] #[error("Email can not be empty or whitespace")]
EmailIsEmpty = 19, EmailIsEmpty = 19,
@ -179,7 +176,7 @@ pub enum ErrorCode {
#[error("Sql error")] #[error("Sql error")]
SqlError = 58, SqlError = 58,
#[error("Http request error")] #[error("Http error")]
HttpError = 59, HttpError = 59,
#[error("The content should not be empty")] #[error("The content should not be empty")]
@ -215,8 +212,14 @@ pub enum ErrorCode {
#[error("Postgres database error")] #[error("Postgres database error")]
PgDatabaseError = 70, PgDatabaseError = 70,
#[error("Postgres transaction error")]
PgTransactionError = 71,
#[error("Enable supabase sync")] #[error("Enable supabase sync")]
SupabaseSyncRequired = 71, SupabaseSyncRequired = 72,
#[error("Conflict")]
Conflict = 73,
} }
impl ErrorCode { impl ErrorCode {

View File

@ -58,7 +58,6 @@ impl FlowyError {
static_flowy_error!(view_desc, ErrorCode::ViewDescTooLong); static_flowy_error!(view_desc, ErrorCode::ViewDescTooLong);
static_flowy_error!(view_data, ErrorCode::ViewDataInvalid); static_flowy_error!(view_data, ErrorCode::ViewDataInvalid);
static_flowy_error!(unauthorized, ErrorCode::UserUnauthorized); static_flowy_error!(unauthorized, ErrorCode::UserUnauthorized);
static_flowy_error!(connection, ErrorCode::HttpServerConnectError);
static_flowy_error!(email_empty, ErrorCode::EmailIsEmpty); static_flowy_error!(email_empty, ErrorCode::EmailIsEmpty);
static_flowy_error!(email_format, ErrorCode::EmailFormatInvalid); static_flowy_error!(email_format, ErrorCode::EmailFormatInvalid);
static_flowy_error!(email_exist, ErrorCode::EmailAlreadyExists); static_flowy_error!(email_exist, ErrorCode::EmailAlreadyExists);
@ -121,6 +120,7 @@ impl std::convert::From<protobuf::ProtobufError> for FlowyError {
impl From<anyhow::Error> for FlowyError { impl From<anyhow::Error> for FlowyError {
fn from(e: anyhow::Error) -> Self { fn from(e: anyhow::Error) -> Self {
FlowyError::internal().context(e) e.downcast::<FlowyError>()
.unwrap_or_else(|err| FlowyError::new(ErrorCode::Internal, err))
} }
} }

View File

@ -1,20 +0,0 @@
// #[cfg(feature = "adaptor_ot")]
// pub mod ot;
#[cfg(feature = "adaptor_serde")]
pub mod serde;
#[cfg(feature = "adaptor_dispatch")]
pub mod dispatch;
#[cfg(feature = "adaptor_reqwest")]
pub mod reqwest;
#[cfg(feature = "adaptor_database")]
pub mod database;
#[cfg(feature = "adaptor_server_error")]
pub mod http_server;
#[cfg(feature = "collab")]
pub mod collab;

View File

@ -9,7 +9,7 @@ impl std::convert::From<ServerErrorCode> for ErrorCode {
ServerErrorCode::RecordNotFound => ErrorCode::RecordNotFound, ServerErrorCode::RecordNotFound => ErrorCode::RecordNotFound,
ServerErrorCode::ConnectRefused ServerErrorCode::ConnectRefused
| ServerErrorCode::ConnectTimeout | ServerErrorCode::ConnectTimeout
| ServerErrorCode::ConnectClose => ErrorCode::HttpServerConnectError, | ServerErrorCode::ConnectClose => ErrorCode::HttpError,
_ => ErrorCode::Internal, _ => ErrorCode::Internal,
} }
} }

View File

@ -0,0 +1,26 @@
// #[cfg(feature = "adaptor_ot")]
// pub mod ot;
#[cfg(feature = "impl_from_serde")]
pub mod serde;
#[cfg(feature = "impl_from_dispatch_error")]
pub mod dispatch;
#[cfg(feature = "impl_from_reqwest")]
pub mod reqwest;
#[cfg(feature = "impl_from_sqlite")]
pub mod database;
#[cfg(feature = "impl_from_appflowy_cloud")]
pub mod http_server;
#[cfg(feature = "impl_from_collab")]
pub mod collab;
#[cfg(feature = "impl_from_postgres")]
mod postgres;
#[cfg(feature = "impl_from_tokio")]
mod tokio;

View File

@ -0,0 +1,7 @@
use crate::FlowyError;
impl std::convert::From<tokio_postgres::Error> for FlowyError {
fn from(error: tokio_postgres::Error) -> Self {
FlowyError::internal().context(error)
}
}

View File

@ -3,6 +3,6 @@ use reqwest::Error;
impl std::convert::From<reqwest::Error> for FlowyError { impl std::convert::From<reqwest::Error> for FlowyError {
fn from(error: Error) -> Self { fn from(error: Error) -> Self {
FlowyError::connection().context(error) FlowyError::http().context(error)
} }
} }

View File

@ -0,0 +1,7 @@
use crate::FlowyError;
// impl<T> std::convert::From<tokio::sync::mpsc::error::SendError<T>> for FlowyError {
// fn from(error: tokio::sync::mpsc::error::SendError<T>) -> Self {
// FlowyError::internal().context(error)
// }
// }

View File

@ -1,6 +1,6 @@
pub mod code; pub mod code;
mod errors; mod errors;
mod ext; mod impl_from;
pub mod protobuf; pub mod protobuf;
pub use code::*; pub use code::*;

View File

@ -0,0 +1,13 @@
[package]
name = "flowy-folder-deps"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-error = { path = "../flowy-error" }
collab-folder = { version = "0.1.0" }
uuid = { version = "1.3.3", features = ["v4"] }
anyhow = "1.0.71"

View File

@ -0,0 +1,33 @@
pub use collab_folder::core::{Folder, FolderData, Workspace};
pub use anyhow::Error;
use lib_infra::future::FutureResult;
use uuid::Uuid;
/// [FolderCloudService] represents the cloud service for folder.
pub trait FolderCloudService: Send + Sync + 'static {
fn create_workspace(&self, uid: i64, name: &str) -> FutureResult<Workspace, Error>;
fn get_folder_data(&self, workspace_id: &str) -> FutureResult<Option<FolderData>, Error>;
fn get_folder_latest_snapshot(
&self,
workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, Error>;
fn get_folder_updates(&self, workspace_id: &str, uid: i64) -> FutureResult<Vec<Vec<u8>>, Error>;
fn service_name(&self) -> String;
}
pub struct FolderSnapshot {
pub snapshot_id: i64,
pub database_id: String,
pub data: Vec<u8>,
pub created_at: i64,
}
pub fn gen_workspace_id() -> Uuid {
uuid::Uuid::new_v4()
}

View File

@ -0,0 +1 @@
pub mod cloud;

View File

@ -9,13 +9,14 @@ edition = "2021"
collab = { version = "0.1.0" } collab = { version = "0.1.0" }
collab-folder = { version = "0.1.0" } collab-folder = { version = "0.1.0" }
appflowy-integrate = {version = "0.1.0" } appflowy-integrate = {version = "0.1.0" }
flowy-folder-deps = { path = "../flowy-folder-deps" }
flowy-derive = { path = "../../../shared-lib/flowy-derive" } flowy-derive = { path = "../../../shared-lib/flowy-derive" }
flowy-notification = { path = "../flowy-notification" } flowy-notification = { path = "../flowy-notification" }
parking_lot = "0.12.1" parking_lot = "0.12.1"
unicode-segmentation = "1.10" unicode-segmentation = "1.10"
tracing = { version = "0.1", features = ["log"] } tracing = { version = "0.1", features = ["log"] }
flowy-error = { path = "../flowy-error", features = ["adaptor_dispatch"]} flowy-error = { path = "../flowy-error", features = ["impl_from_dispatch_error"]}
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
bytes = { version = "1.4" } bytes = { version = "1.4" }
lib-infra = { path = "../../../shared-lib/lib-infra" } lib-infra = { path = "../../../shared-lib/lib-infra" }
@ -23,7 +24,6 @@ tokio = { version = "1.26", features = ["full"] }
nanoid = "0.4.0" nanoid = "0.4.0"
lazy_static = "1.4.0" lazy_static = "1.4.0"
chrono = { version = "0.4.22", default-features = false, features = ["clock"] } chrono = { version = "0.4.22", default-features = false, features = ["clock"] }
strum = "0.21"
strum_macros = "0.21" strum_macros = "0.21"
protobuf = {version = "2.28.0"} protobuf = {version = "2.28.0"}
uuid = { version = "1.3.3", features = ["v4"] } uuid = { version = "1.3.3", features = ["v4"] }

View File

@ -1,42 +0,0 @@
use std::sync::Arc;
use appflowy_integrate::RocksCollabDB;
pub use collab_folder::core::FolderData;
pub use collab_folder::core::Workspace;
use flowy_error::FlowyError;
use lib_infra::future::FutureResult;
/// [FolderUser] represents the user for folder.
pub trait FolderUser: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
fn token(&self) -> Result<Option<String>, FlowyError>;
fn collab_db(&self, uid: i64) -> Result<Arc<RocksCollabDB>, FlowyError>;
}
/// [FolderCloudService] represents the cloud service for folder.
pub trait FolderCloudService: Send + Sync + 'static {
fn create_workspace(&self, uid: i64, name: &str) -> FutureResult<Workspace, FlowyError>;
fn get_folder_data(&self, workspace_id: &str) -> FutureResult<Option<FolderData>, FlowyError>;
fn get_folder_latest_snapshot(
&self,
workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, FlowyError>;
fn get_folder_updates(
&self,
workspace_id: &str,
uid: i64,
) -> FutureResult<Vec<Vec<u8>>, FlowyError>;
fn service_name(&self) -> String;
}
pub struct FolderSnapshot {
pub snapshot_id: i64,
pub database_id: String,
pub data: Vec<u8>,
pub created_at: i64,
}

View File

@ -1,17 +1,27 @@
use std::sync::Arc; use std::sync::{Arc, Weak};
use flowy_error::FlowyError; use flowy_error::{FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult}; use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use crate::entities::*; use crate::entities::*;
use crate::manager::FolderManager; use crate::manager::FolderManager;
use crate::share::ImportParams; use crate::share::ImportParams;
fn upgrade_folder(
folder_manager: AFPluginState<Weak<FolderManager>>,
) -> FlowyResult<Arc<FolderManager>> {
let folder = folder_manager
.upgrade()
.ok_or(FlowyError::internal().context("The folder manager is already dropped"))?;
Ok(folder)
}
#[tracing::instrument(level = "debug", skip(data, folder), err)] #[tracing::instrument(level = "debug", skip(data, folder), err)]
pub(crate) async fn create_workspace_handler( pub(crate) async fn create_workspace_handler(
data: AFPluginData<CreateWorkspacePayloadPB>, data: AFPluginData<CreateWorkspacePayloadPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<WorkspacePB, FlowyError> { ) -> DataResult<WorkspacePB, FlowyError> {
let folder = upgrade_folder(folder)?;
let params: CreateWorkspaceParams = data.into_inner().try_into()?; let params: CreateWorkspaceParams = data.into_inner().try_into()?;
let workspace = folder.create_workspace(params).await?; let workspace = folder.create_workspace(params).await?;
data_result_ok(workspace.into()) data_result_ok(workspace.into())
@ -19,8 +29,9 @@ pub(crate) async fn create_workspace_handler(
#[tracing::instrument(level = "debug", skip(folder), err)] #[tracing::instrument(level = "debug", skip(folder), err)]
pub(crate) async fn get_workspace_views_handler( pub(crate) async fn get_workspace_views_handler(
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<RepeatedViewPB, FlowyError> { ) -> DataResult<RepeatedViewPB, FlowyError> {
let folder = upgrade_folder(folder)?;
let child_views = folder.get_current_workspace_views().await?; let child_views = folder.get_current_workspace_views().await?;
let repeated_view: RepeatedViewPB = child_views.into(); let repeated_view: RepeatedViewPB = child_views.into();
data_result_ok(repeated_view) data_result_ok(repeated_view)
@ -29,8 +40,9 @@ pub(crate) async fn get_workspace_views_handler(
#[tracing::instrument(level = "debug", skip(data, folder), err)] #[tracing::instrument(level = "debug", skip(data, folder), err)]
pub(crate) async fn open_workspace_handler( pub(crate) async fn open_workspace_handler(
data: AFPluginData<WorkspaceIdPB>, data: AFPluginData<WorkspaceIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<WorkspacePB, FlowyError> { ) -> DataResult<WorkspacePB, FlowyError> {
let folder = upgrade_folder(folder)?;
let params: WorkspaceIdPB = data.into_inner(); let params: WorkspaceIdPB = data.into_inner();
match params.value { match params.value {
None => Err(FlowyError::workspace_id().context("workspace id should not be empty")), None => Err(FlowyError::workspace_id().context("workspace id should not be empty")),
@ -50,8 +62,9 @@ pub(crate) async fn open_workspace_handler(
#[tracing::instrument(level = "debug", skip(data, folder), err)] #[tracing::instrument(level = "debug", skip(data, folder), err)]
pub(crate) async fn read_workspaces_handler( pub(crate) async fn read_workspaces_handler(
data: AFPluginData<WorkspaceIdPB>, data: AFPluginData<WorkspaceIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<RepeatedWorkspacePB, FlowyError> { ) -> DataResult<RepeatedWorkspacePB, FlowyError> {
let folder = upgrade_folder(folder)?;
let params: WorkspaceIdPB = data.into_inner(); let params: WorkspaceIdPB = data.into_inner();
let workspaces = match params.value { let workspaces = match params.value {
None => folder.get_all_workspaces().await, None => folder.get_all_workspaces().await,
@ -67,8 +80,9 @@ pub(crate) async fn read_workspaces_handler(
#[tracing::instrument(level = "debug", skip(folder), err)] #[tracing::instrument(level = "debug", skip(folder), err)]
pub async fn get_current_workspace_setting_handler( pub async fn get_current_workspace_setting_handler(
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<WorkspaceSettingPB, FlowyError> { ) -> DataResult<WorkspaceSettingPB, FlowyError> {
let folder = upgrade_folder(folder)?;
let workspace = folder.get_current_workspace().await?; let workspace = folder.get_current_workspace().await?;
let latest_view: Option<ViewPB> = folder.get_current_view().await; let latest_view: Option<ViewPB> = folder.get_current_view().await;
data_result_ok(WorkspaceSettingPB { data_result_ok(WorkspaceSettingPB {
@ -79,8 +93,9 @@ pub async fn get_current_workspace_setting_handler(
pub(crate) async fn create_view_handler( pub(crate) async fn create_view_handler(
data: AFPluginData<CreateViewPayloadPB>, data: AFPluginData<CreateViewPayloadPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<ViewPB, FlowyError> { ) -> DataResult<ViewPB, FlowyError> {
let folder = upgrade_folder(folder)?;
let params: CreateViewParams = data.into_inner().try_into()?; let params: CreateViewParams = data.into_inner().try_into()?;
let set_as_current = params.set_as_current; let set_as_current = params.set_as_current;
let view = folder.create_view_with_params(params).await?; let view = folder.create_view_with_params(params).await?;
@ -92,8 +107,9 @@ pub(crate) async fn create_view_handler(
pub(crate) async fn create_orphan_view_handler( pub(crate) async fn create_orphan_view_handler(
data: AFPluginData<CreateOrphanViewPayloadPB>, data: AFPluginData<CreateOrphanViewPayloadPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<ViewPB, FlowyError> { ) -> DataResult<ViewPB, FlowyError> {
let folder = upgrade_folder(folder)?;
let params: CreateViewParams = data.into_inner().try_into()?; let params: CreateViewParams = data.into_inner().try_into()?;
let set_as_current = params.set_as_current; let set_as_current = params.set_as_current;
let view = folder.create_orphan_view_with_params(params).await?; let view = folder.create_orphan_view_with_params(params).await?;
@ -105,8 +121,9 @@ pub(crate) async fn create_orphan_view_handler(
pub(crate) async fn read_view_handler( pub(crate) async fn read_view_handler(
data: AFPluginData<ViewIdPB>, data: AFPluginData<ViewIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<ViewPB, FlowyError> { ) -> DataResult<ViewPB, FlowyError> {
let folder = upgrade_folder(folder)?;
let view_id: ViewIdPB = data.into_inner(); let view_id: ViewIdPB = data.into_inner();
let view_pb = folder.get_view(&view_id.value).await?; let view_pb = folder.get_view(&view_id.value).await?;
data_result_ok(view_pb) data_result_ok(view_pb)
@ -115,8 +132,9 @@ pub(crate) async fn read_view_handler(
#[tracing::instrument(level = "debug", skip(data, folder), err)] #[tracing::instrument(level = "debug", skip(data, folder), err)]
pub(crate) async fn update_view_handler( pub(crate) async fn update_view_handler(
data: AFPluginData<UpdateViewPayloadPB>, data: AFPluginData<UpdateViewPayloadPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let params: UpdateViewParams = data.into_inner().try_into()?; let params: UpdateViewParams = data.into_inner().try_into()?;
folder.update_view_with_params(params).await?; folder.update_view_with_params(params).await?;
Ok(()) Ok(())
@ -124,8 +142,9 @@ pub(crate) async fn update_view_handler(
pub(crate) async fn delete_view_handler( pub(crate) async fn delete_view_handler(
data: AFPluginData<RepeatedViewIdPB>, data: AFPluginData<RepeatedViewIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let params: RepeatedViewIdPB = data.into_inner(); let params: RepeatedViewIdPB = data.into_inner();
for view_id in &params.items { for view_id in &params.items {
let _ = folder.move_view_to_trash(view_id).await; let _ = folder.move_view_to_trash(view_id).await;
@ -135,8 +154,9 @@ pub(crate) async fn delete_view_handler(
pub(crate) async fn set_latest_view_handler( pub(crate) async fn set_latest_view_handler(
data: AFPluginData<ViewIdPB>, data: AFPluginData<ViewIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let view_id: ViewIdPB = data.into_inner(); let view_id: ViewIdPB = data.into_inner();
let _ = folder.set_current_view(&view_id.value).await; let _ = folder.set_current_view(&view_id.value).await;
Ok(()) Ok(())
@ -144,8 +164,9 @@ pub(crate) async fn set_latest_view_handler(
pub(crate) async fn close_view_handler( pub(crate) async fn close_view_handler(
data: AFPluginData<ViewIdPB>, data: AFPluginData<ViewIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let view_id: ViewIdPB = data.into_inner(); let view_id: ViewIdPB = data.into_inner();
let _ = folder.close_view(&view_id.value).await; let _ = folder.close_view(&view_id.value).await;
Ok(()) Ok(())
@ -154,8 +175,9 @@ pub(crate) async fn close_view_handler(
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn move_view_handler( pub(crate) async fn move_view_handler(
data: AFPluginData<MoveViewPayloadPB>, data: AFPluginData<MoveViewPayloadPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let params: MoveViewParams = data.into_inner().try_into()?; let params: MoveViewParams = data.into_inner().try_into()?;
folder folder
.move_view(&params.view_id, params.from, params.to) .move_view(&params.view_id, params.from, params.to)
@ -165,8 +187,9 @@ pub(crate) async fn move_view_handler(
pub(crate) async fn move_nested_view_handler( pub(crate) async fn move_nested_view_handler(
data: AFPluginData<MoveNestedViewPayloadPB>, data: AFPluginData<MoveNestedViewPayloadPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let params: MoveNestedViewParams = data.into_inner().try_into()?; let params: MoveNestedViewParams = data.into_inner().try_into()?;
folder folder
.move_nested_view(params.view_id, params.new_parent_id, params.prev_view_id) .move_nested_view(params.view_id, params.new_parent_id, params.prev_view_id)
@ -177,8 +200,9 @@ pub(crate) async fn move_nested_view_handler(
#[tracing::instrument(level = "debug", skip(data, folder), err)] #[tracing::instrument(level = "debug", skip(data, folder), err)]
pub(crate) async fn duplicate_view_handler( pub(crate) async fn duplicate_view_handler(
data: AFPluginData<ViewPB>, data: AFPluginData<ViewPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let view: ViewPB = data.into_inner(); let view: ViewPB = data.into_inner();
folder.duplicate_view(&view.id).await?; folder.duplicate_view(&view.id).await?;
Ok(()) Ok(())
@ -186,8 +210,9 @@ pub(crate) async fn duplicate_view_handler(
#[tracing::instrument(level = "debug", skip(folder), err)] #[tracing::instrument(level = "debug", skip(folder), err)]
pub(crate) async fn read_trash_handler( pub(crate) async fn read_trash_handler(
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<RepeatedTrashPB, FlowyError> { ) -> DataResult<RepeatedTrashPB, FlowyError> {
let folder = upgrade_folder(folder)?;
let trash = folder.get_all_trash().await; let trash = folder.get_all_trash().await;
data_result_ok(trash.into()) data_result_ok(trash.into())
} }
@ -195,8 +220,9 @@ pub(crate) async fn read_trash_handler(
#[tracing::instrument(level = "debug", skip(identifier, folder), err)] #[tracing::instrument(level = "debug", skip(identifier, folder), err)]
pub(crate) async fn putback_trash_handler( pub(crate) async fn putback_trash_handler(
identifier: AFPluginData<TrashIdPB>, identifier: AFPluginData<TrashIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
folder.restore_trash(&identifier.id).await; folder.restore_trash(&identifier.id).await;
Ok(()) Ok(())
} }
@ -204,8 +230,9 @@ pub(crate) async fn putback_trash_handler(
#[tracing::instrument(level = "debug", skip(identifiers, folder), err)] #[tracing::instrument(level = "debug", skip(identifiers, folder), err)]
pub(crate) async fn delete_trash_handler( pub(crate) async fn delete_trash_handler(
identifiers: AFPluginData<RepeatedTrashIdPB>, identifiers: AFPluginData<RepeatedTrashIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let trash_ids = identifiers.into_inner().items; let trash_ids = identifiers.into_inner().items;
for trash_id in trash_ids { for trash_id in trash_ids {
let _ = folder.delete_trash(&trash_id.id).await; let _ = folder.delete_trash(&trash_id.id).await;
@ -215,16 +242,18 @@ pub(crate) async fn delete_trash_handler(
#[tracing::instrument(level = "debug", skip(folder), err)] #[tracing::instrument(level = "debug", skip(folder), err)]
pub(crate) async fn restore_all_trash_handler( pub(crate) async fn restore_all_trash_handler(
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
folder.restore_all_trash().await; folder.restore_all_trash().await;
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip(folder), err)] #[tracing::instrument(level = "debug", skip(folder), err)]
pub(crate) async fn delete_all_trash_handler( pub(crate) async fn delete_all_trash_handler(
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
folder.delete_all_trash().await; folder.delete_all_trash().await;
Ok(()) Ok(())
} }
@ -232,8 +261,9 @@ pub(crate) async fn delete_all_trash_handler(
#[tracing::instrument(level = "debug", skip(data, folder), err)] #[tracing::instrument(level = "debug", skip(data, folder), err)]
pub(crate) async fn import_data_handler( pub(crate) async fn import_data_handler(
data: AFPluginData<ImportPB>, data: AFPluginData<ImportPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let folder = upgrade_folder(folder)?;
let params: ImportParams = data.into_inner().try_into()?; let params: ImportParams = data.into_inner().try_into()?;
folder.import(params).await?; folder.import(params).await?;
Ok(()) Ok(())
@ -242,8 +272,9 @@ pub(crate) async fn import_data_handler(
#[tracing::instrument(level = "debug", skip(folder), err)] #[tracing::instrument(level = "debug", skip(folder), err)]
pub(crate) async fn get_folder_snapshots_handler( pub(crate) async fn get_folder_snapshots_handler(
data: AFPluginData<WorkspaceIdPB>, data: AFPluginData<WorkspaceIdPB>,
folder: AFPluginState<Arc<FolderManager>>, folder: AFPluginState<Weak<FolderManager>>,
) -> DataResult<RepeatedFolderSnapshotPB, FlowyError> { ) -> DataResult<RepeatedFolderSnapshotPB, FlowyError> {
let folder = upgrade_folder(folder)?;
if let Some(workspace_id) = &data.value { if let Some(workspace_id) = &data.value {
let snapshots = folder.get_folder_snapshots(workspace_id).await?; let snapshots = folder.get_folder_snapshots(workspace_id).await?;
data_result_ok(RepeatedFolderSnapshotPB { items: snapshots }) data_result_ok(RepeatedFolderSnapshotPB { items: snapshots })

View File

@ -1,4 +1,4 @@
use std::sync::Arc; use std::sync::Weak;
use strum_macros::Display; use strum_macros::Display;
@ -8,7 +8,7 @@ use lib_dispatch::prelude::*;
use crate::event_handler::*; use crate::event_handler::*;
use crate::manager::FolderManager; use crate::manager::FolderManager;
pub fn init(folder: Arc<FolderManager>) -> AFPlugin { pub fn init(folder: Weak<FolderManager>) -> AFPlugin {
AFPlugin::new().name("Flowy-Folder").state(folder) AFPlugin::new().name("Flowy-Folder").state(folder)
// Workspace // Workspace
.event(FolderEvent::CreateWorkspace, create_workspace_handler) .event(FolderEvent::CreateWorkspace, create_workspace_handler)

View File

@ -1,16 +1,14 @@
pub use collab_folder::core::ViewLayout;
pub mod entities; pub mod entities;
pub mod event_handler; pub mod event_handler;
pub mod event_map; pub mod event_map;
pub mod manager; pub mod manager;
mod notification; pub mod notification;
pub mod protobuf; pub mod protobuf;
mod user_default; mod user_default;
pub mod view_operation; pub mod view_operation;
pub mod deps;
pub mod share; pub mod share;
#[cfg(feature = "test_helper")] #[cfg(feature = "test_helper")]
mod test_helper; mod test_helper;
pub use collab_folder::core::ViewLayout;
pub use user_default::gen_workspace_id;

View File

@ -3,7 +3,7 @@ use std::ops::Deref;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder; use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::{CollabPersistenceConfig, RocksCollabDB}; use appflowy_integrate::{CollabPersistenceConfig, CollabType, RocksCollabDB};
use collab::core::collab::{CollabRawData, MutexCollab}; use collab::core::collab::{CollabRawData, MutexCollab};
use collab::core::collab_state::SyncState; use collab::core::collab_state::SyncState;
use collab_folder::core::{ use collab_folder::core::{
@ -16,8 +16,8 @@ use tokio_stream::StreamExt;
use tracing::{event, Level}; use tracing::{event, Level};
use flowy_error::{ErrorCode, FlowyError, FlowyResult}; use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use flowy_folder_deps::cloud::FolderCloudService;
use crate::deps::{FolderCloudService, FolderUser};
use crate::entities::{ use crate::entities::{
view_pb_with_child_views, view_pb_without_child_views, ChildViewUpdatePB, CreateViewParams, view_pb_with_child_views, view_pb_without_child_views, ChildViewUpdatePB, CreateViewParams,
CreateWorkspaceParams, DeletedViewPB, FolderSnapshotPB, FolderSnapshotStatePB, FolderSyncStatePB, CreateWorkspaceParams, DeletedViewPB, FolderSnapshotPB, FolderSnapshotStatePB, FolderSyncStatePB,
@ -33,6 +33,13 @@ use crate::view_operation::{
create_view, gen_view_id, FolderOperationHandler, FolderOperationHandlers, create_view, gen_view_id, FolderOperationHandler, FolderOperationHandlers,
}; };
/// [FolderUser] represents the user for folder.
pub trait FolderUser: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
fn token(&self) -> Result<Option<String>, FlowyError>;
fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError>;
}
pub struct FolderManager { pub struct FolderManager {
mutex_folder: Arc<MutexFolder>, mutex_folder: Arc<MutexFolder>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
@ -172,13 +179,13 @@ impl FolderManager {
&self, &self,
uid: i64, uid: i64,
workspace_id: &str, workspace_id: &str,
collab_db: Arc<RocksCollabDB>, collab_db: Weak<RocksCollabDB>,
raw_data: CollabRawData, raw_data: CollabRawData,
) -> Result<Arc<MutexCollab>, FlowyError> { ) -> Result<Arc<MutexCollab>, FlowyError> {
let collab = self.collab_builder.build_with_config( let collab = self.collab_builder.build_with_config(
uid, uid,
workspace_id, workspace_id,
"workspace", CollabType::Folder,
collab_db, collab_db,
raw_data, raw_data,
&CollabPersistenceConfig::new().enable_snapshot(true), &CollabPersistenceConfig::new().enable_snapshot(true),
@ -186,8 +193,14 @@ impl FolderManager {
Ok(collab) Ok(collab)
} }
/// Initialize the folder with the given workspace id.
/// Fetch the folder updates from the cloud service and initialize the folder.
#[tracing::instrument(level = "debug", skip(self, user_id), err)] #[tracing::instrument(level = "debug", skip(self, user_id), err)]
pub async fn initialize_when_sign_in(&self, user_id: i64, workspace_id: &str) -> FlowyResult<()> { pub async fn initialize_with_workspace_id(
&self,
user_id: i64,
workspace_id: &str,
) -> FlowyResult<()> {
let folder_updates = self let folder_updates = self
.cloud_service .cloud_service
.get_folder_updates(workspace_id, user_id) .get_folder_updates(workspace_id, user_id)
@ -209,7 +222,9 @@ impl FolderManager {
Ok(()) Ok(())
} }
pub async fn initialize_when_sign_up( /// Initialize the folder for the new user.
/// Using the [DefaultFolderBuilder] to create the default workspace for the new user.
pub async fn initialize_with_new_user(
&self, &self,
user_id: i64, user_id: i64,
_token: &str, _token: &str,
@ -239,11 +254,6 @@ impl FolderManager {
FolderInitializeData::Data(folder_data), FolderInitializeData::Data(folder_data),
) )
.await?; .await?;
// send_notification(token, FolderNotification::DidCreateWorkspace)
// .payload(RepeatedWorkspacePB {
// items: vec![workspace_pb],
// })
// .send();
} else { } else {
// The folder data is loaded through the [FolderCloudService]. If the cloud service in use is // The folder data is loaded through the [FolderCloudService]. If the cloud service in use is
// [LocalServerFolderCloudServiceImpl], the folder data will be None because the Folder will load // [LocalServerFolderCloudServiceImpl], the folder data will be None because the Folder will load
@ -310,6 +320,15 @@ impl FolderManager {
self.with_folder(None, |folder| folder.workspaces.get_workspace(workspace_id)) self.with_folder(None, |folder| folder.workspaces.get_workspace(workspace_id))
} }
async fn get_current_workspace_id(&self) -> FlowyResult<String> {
self
.mutex_folder
.lock()
.as_ref()
.and_then(|folder| folder.get_current_workspace_id())
.ok_or(FlowyError::internal().context("Unexpected empty workspace id"))
}
fn with_folder<F, Output>(&self, default_value: Output, f: F) -> Output fn with_folder<F, Output>(&self, default_value: Output, f: F) -> Output
where where
F: FnOnce(&Folder) -> Output, F: FnOnce(&Folder) -> Output,
@ -327,6 +346,7 @@ impl FolderManager {
pub async fn create_view_with_params(&self, params: CreateViewParams) -> FlowyResult<View> { pub async fn create_view_with_params(&self, params: CreateViewParams) -> FlowyResult<View> {
let view_layout: ViewLayout = params.layout.clone().into(); let view_layout: ViewLayout = params.layout.clone().into();
let _workspace_id = self.get_current_workspace_id().await?;
let handler = self.get_handler(&view_layout)?; let handler = self.get_handler(&view_layout)?;
let user_id = self.user.user_id()?; let user_id = self.user.user_id()?;
let meta = params.meta.clone(); let meta = params.meta.clone();
@ -380,13 +400,10 @@ impl FolderManager {
#[tracing::instrument(level = "debug", skip(self), err)] #[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn close_view(&self, view_id: &str) -> Result<(), FlowyError> { pub(crate) async fn close_view(&self, view_id: &str) -> Result<(), FlowyError> {
let view = self if let Some(view) = self.with_folder(None, |folder| folder.views.get_view(view_id)) {
.with_folder(None, |folder| folder.views.get_view(view_id)) let handler = self.get_handler(&view.layout)?;
.ok_or_else(|| { handler.close_view(view_id).await?;
FlowyError::record_not_found().context("Can't find the view when closing the view") }
})?;
let handler = self.get_handler(&view.layout)?;
handler.close_view(view_id).await?;
Ok(()) Ok(())
} }

View File

@ -11,7 +11,7 @@ use crate::entities::{view_pb_without_child_views, WorkspacePB, WorkspaceSetting
const FOLDER_OBSERVABLE_SOURCE: &str = "Workspace"; const FOLDER_OBSERVABLE_SOURCE: &str = "Workspace";
#[derive(ProtoBuf_Enum, Debug, Default)] #[derive(ProtoBuf_Enum, Debug, Default)]
pub(crate) enum FolderNotification { pub enum FolderNotification {
#[default] #[default]
Unknown = 0, Unknown = 0,
/// Trigger after creating a workspace /// Trigger after creating a workspace

View File

@ -59,10 +59,6 @@ impl DefaultFolderBuilder {
} }
} }
pub fn gen_workspace_id() -> String {
uuid::Uuid::new_v4().to_string()
}
impl From<&ParentChildViews> for ViewPB { impl From<&ParentChildViews> for ViewPB {
fn from(value: &ParentChildViews) -> Self { fn from(value: &ParentChildViews) -> Self {
view_pb_with_child_views( view_pb_with_child_views(

View File

@ -274,16 +274,19 @@ pub async fn move_view(
parent_id: String, parent_id: String,
prev_view_id: Option<String>, prev_view_id: Option<String>,
) { ) {
let request = MoveNestedViewPayloadPB { let payload = MoveNestedViewPayloadPB {
view_id, view_id,
new_parent_id: parent_id, new_parent_id: parent_id,
prev_view_id, prev_view_id,
}; };
EventBuilder::new(sdk.clone()) let error = EventBuilder::new(sdk.clone())
.event(MoveNestedView) .event(MoveNestedView)
.payload(request) .payload(payload)
.async_send() .async_send()
.await; .await
.error();
assert!(error.is_none());
} }
pub async fn update_view( pub async fn update_view(
sdk: &FlowyCoreTest, sdk: &FlowyCoreTest,

View File

@ -7,25 +7,18 @@ edition = "2018"
[dependencies] [dependencies]
lib-dispatch = { path = "../lib-dispatch" } lib-dispatch = { path = "../lib-dispatch" }
flowy-error = { path = "../flowy-error", features = ["adaptor_reqwest", "adaptor_server_error"] }
flowy-derive = { path = "../../../shared-lib/flowy-derive" }
protobuf = {version = "2.28.0"} protobuf = {version = "2.28.0"}
anyhow = "1.0"
thiserror = "1.0"
bytes = { version = "1.4" } bytes = { version = "1.4" }
strum_macros = "0.21"
tracing = { version = "0.1"} tracing = { version = "0.1"}
[features] [features]
http_server = [] http_server = []
dart = [ dart = [
"flowy-codegen/dart", "flowy-codegen/dart",
"flowy-error/dart",
] ]
ts = [ ts = [
"flowy-codegen/ts", "flowy-codegen/ts",
"flowy-error/ts",
] ]
[build-dependencies] [build-dependencies]

View File

@ -1,10 +1,10 @@
fn main() { fn main() {
let crate_name = env!("CARGO_PKG_NAME"); // let crate_name = env!("CARGO_PKG_NAME");
flowy_codegen::protobuf_file::gen(crate_name); // flowy_codegen::protobuf_file::gen(crate_name);
//
#[cfg(feature = "dart")] // #[cfg(feature = "dart")]
flowy_codegen::dart_event::gen(crate_name); // flowy_codegen::dart_event::gen(crate_name);
//
#[cfg(feature = "ts")] // #[cfg(feature = "ts")]
flowy_codegen::ts_event::gen(crate_name); // flowy_codegen::ts_event::gen(crate_name);
} }

View File

@ -1,29 +1,29 @@
use flowy_derive::{ProtoBuf, ProtoBuf_Enum}; // use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
//
#[derive(ProtoBuf_Enum, Debug, Clone, Eq, PartialEq, Default)] // #[derive(ProtoBuf_Enum, Debug, Clone, Eq, PartialEq, Default)]
pub enum NetworkTypePB { // pub enum NetworkTypePB {
#[default] // #[default]
Unknown = 0, // Unknown = 0,
Wifi = 1, // Wifi = 1,
Cell = 2, // Cell = 2,
Ethernet = 3, // Ethernet = 3,
Bluetooth = 4, // Bluetooth = 4,
VPN = 5, // VPN = 5,
} // }
//
impl NetworkTypePB { // impl NetworkTypePB {
pub fn is_connect(&self) -> bool { // pub fn is_connect(&self) -> bool {
match self { // match self {
NetworkTypePB::Unknown | NetworkTypePB::Bluetooth => false, // NetworkTypePB::Unknown | NetworkTypePB::Bluetooth => false,
NetworkTypePB::Wifi | NetworkTypePB::Cell | NetworkTypePB::Ethernet | NetworkTypePB::VPN => { // NetworkTypePB::Wifi | NetworkTypePB::Cell | NetworkTypePB::Ethernet | NetworkTypePB::VPN => {
true // true
}, // },
} // }
} // }
} // }
//
#[derive(ProtoBuf, Debug, Default, Clone)] // #[derive(ProtoBuf, Debug, Default, Clone)]
pub struct NetworkStatePB { // pub struct NetworkStatePB {
#[pb(index = 1)] // #[pb(index = 1)]
pub ty: NetworkTypePB, // pub ty: NetworkTypePB,
} // }

View File

@ -1,19 +1,5 @@
use strum_macros::Display;
use flowy_derive::{Flowy_Event, ProtoBuf_Enum};
use lib_dispatch::prelude::*; use lib_dispatch::prelude::*;
use crate::handlers::*;
pub fn init() -> AFPlugin { pub fn init() -> AFPlugin {
AFPlugin::new() AFPlugin::new().name("Flowy-Network")
.name("Flowy-Network")
.event(NetworkEvent::UpdateNetworkType, update_network_ty)
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
#[event_err = "FlowyError"]
pub enum NetworkEvent {
#[event(input = "NetworkStatePB")]
UpdateNetworkType = 0,
} }

View File

@ -1,8 +1,3 @@
use flowy_error::FlowyError; // pub async fn update_network_ty(_data: AFPluginData<NetworkStatePB>) -> Result<(), FlowyError> {
use lib_dispatch::prelude::AFPluginData; // Ok(())
// }
use crate::entities::NetworkStatePB;
pub async fn update_network_ty(_data: AFPluginData<NetworkStatePB>) -> Result<(), FlowyError> {
Ok(())
}

View File

@ -1,4 +1,3 @@
pub mod entities; pub mod entities;
pub mod event_map; pub mod event_map;
mod handlers; mod handlers;
pub mod protobuf;

View File

@ -5,7 +5,6 @@ use flowy_error::{ErrorCode, FlowyError};
pub const ENABLE_SUPABASE_SYNC: &str = "ENABLE_SUPABASE_SYNC"; pub const ENABLE_SUPABASE_SYNC: &str = "ENABLE_SUPABASE_SYNC";
pub const SUPABASE_URL: &str = "SUPABASE_URL"; pub const SUPABASE_URL: &str = "SUPABASE_URL";
pub const SUPABASE_ANON_KEY: &str = "SUPABASE_ANON_KEY"; pub const SUPABASE_ANON_KEY: &str = "SUPABASE_ANON_KEY";
pub const SUPABASE_KEY: &str = "SUPABASE_KEY";
pub const SUPABASE_JWT_SECRET: &str = "SUPABASE_JWT_SECRET"; pub const SUPABASE_JWT_SECRET: &str = "SUPABASE_JWT_SECRET";
pub const SUPABASE_DB: &str = "SUPABASE_DB"; pub const SUPABASE_DB: &str = "SUPABASE_DB";
@ -13,44 +12,38 @@ pub const SUPABASE_DB_USER: &str = "SUPABASE_DB_USER";
pub const SUPABASE_DB_PASSWORD: &str = "SUPABASE_DB_PASSWORD"; pub const SUPABASE_DB_PASSWORD: &str = "SUPABASE_DB_PASSWORD";
pub const SUPABASE_DB_PORT: &str = "SUPABASE_DB_PORT"; pub const SUPABASE_DB_PORT: &str = "SUPABASE_DB_PORT";
/// The configuration for the postgres database. It supports deserializing from the json string that
/// passed from the frontend application. [AppFlowyEnv::parser]
#[derive(Debug, Serialize, Deserialize, Clone, Default)] #[derive(Debug, Serialize, Deserialize, Clone, Default)]
pub struct SupabaseConfiguration { pub struct SupabaseConfiguration {
/// The url of the supabase server. /// The url of the supabase server.
pub url: String, pub url: String,
/// The key of the supabase server. /// The key of the supabase server.
pub key: String, pub anon_key: String,
/// The secret used to sign the JWT tokens. /// The secret used to sign the JWT tokens.
pub jwt_secret: String, pub jwt_secret: String,
/// Whether to enable the supabase sync.
/// User can disable it by injecting the environment variable ENABLE_SUPABASE_SYNC=false
pub enable_sync: bool, pub enable_sync: bool,
pub postgres_config: PostgresConfiguration,
} }
impl SupabaseConfiguration { impl SupabaseConfiguration {
/// Load the configuration from the environment variables.
/// SUPABASE_URL=https://<your-supabase-url>.supabase.co
/// SUPABASE_KEY=<your-supabase-key>
/// SUPABASE_JWT_SECRET=<your-supabase-jwt-secret>
///
pub fn from_env() -> Result<Self, FlowyError> { pub fn from_env() -> Result<Self, FlowyError> {
let postgres_config = PostgresConfiguration::from_env()?;
Ok(Self { Ok(Self {
enable_sync: std::env::var(ENABLE_SUPABASE_SYNC) enable_sync: std::env::var(ENABLE_SUPABASE_SYNC)
.map(|v| v == "true") .map(|v| v == "true")
.unwrap_or(false), .unwrap_or(false),
url: std::env::var(SUPABASE_URL) url: std::env::var(SUPABASE_URL)
.map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_URL"))?, .map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_URL"))?,
key: std::env::var(SUPABASE_KEY) anon_key: std::env::var(SUPABASE_ANON_KEY)
.map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_KEY"))?, .map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_ANON_KEY"))?,
jwt_secret: std::env::var(SUPABASE_JWT_SECRET).map_err(|_| { jwt_secret: std::env::var(SUPABASE_JWT_SECRET).map_err(|_| {
FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_JWT_SECRET") FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_JWT_SECRET")
})?, })?,
postgres_config,
}) })
} }
/// Write the configuration to the environment variables.
pub fn write_env(&self) { pub fn write_env(&self) {
if self.enable_sync { if self.enable_sync {
std::env::set_var(ENABLE_SUPABASE_SYNC, "true"); std::env::set_var(ENABLE_SUPABASE_SYNC, "true");
@ -58,45 +51,7 @@ impl SupabaseConfiguration {
std::env::set_var(ENABLE_SUPABASE_SYNC, "false"); std::env::set_var(ENABLE_SUPABASE_SYNC, "false");
} }
std::env::set_var(SUPABASE_URL, &self.url); std::env::set_var(SUPABASE_URL, &self.url);
std::env::set_var(SUPABASE_KEY, &self.key); std::env::set_var(SUPABASE_ANON_KEY, &self.anon_key);
std::env::set_var(SUPABASE_JWT_SECRET, &self.jwt_secret); std::env::set_var(SUPABASE_JWT_SECRET, &self.jwt_secret);
self.postgres_config.write_env();
}
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct PostgresConfiguration {
pub url: String,
pub user_name: String,
pub password: String,
pub port: u16,
}
impl PostgresConfiguration {
pub fn from_env() -> Result<Self, FlowyError> {
let url = std::env::var(SUPABASE_DB)
.map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_DB"))?;
let user_name = std::env::var(SUPABASE_DB_USER)
.map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_DB_USER"))?;
let password = std::env::var(SUPABASE_DB_PASSWORD)
.map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_DB_PASSWORD"))?;
let port = std::env::var(SUPABASE_DB_PORT)
.map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_DB_PORT"))?
.parse::<u16>()
.map_err(|_e| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_DB_PORT"))?;
Ok(Self {
url,
user_name,
password,
port,
})
}
pub fn write_env(&self) {
std::env::set_var(SUPABASE_DB, &self.url);
std::env::set_var(SUPABASE_DB_USER, &self.user_name);
std::env::set_var(SUPABASE_DB_PASSWORD, &self.password);
std::env::set_var(SUPABASE_DB_PORT, self.port.to_string());
} }
} }

View File

@ -7,6 +7,7 @@ edition = "2021"
[dependencies] [dependencies]
tracing = { version = "0.1" } tracing = { version = "0.1" }
futures = "0.3.26"
futures-util = "0.3.26" futures-util = "0.3.26"
reqwest = "0.11.14" reqwest = "0.11.14"
hyper = "0.14" hyper = "0.14"
@ -14,40 +15,31 @@ config = { version = "0.10.1", default-features = false, features = ["yaml"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
serde-aux = "4.2.0" serde-aux = "4.2.0"
nanoid = "0.4.0"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1.26", features = ["sync"]} tokio = { version = "1.26", features = ["sync"]}
parking_lot = "0.12" parking_lot = "0.12"
lazy_static = "1.4.0" lazy_static = "1.4.0"
bytes = "1.0.1" bytes = { version = "1.0.1", features = ["serde"] }
tokio-retry = "0.3" tokio-retry = "0.3"
anyhow = "1.0" anyhow = "1.0"
uuid = { version = "1.3.3", features = ["v4"] } uuid = { version = "1.3.3", features = ["v4"] }
chrono = { version = "0.4.22", default-features = false, features = ["clock"] } chrono = { version = "0.4.22", default-features = false, features = ["clock"] }
appflowy-integrate = { version = "0.1.0" } collab = { version = "0.1.0" }
collab-plugins = { version = "0.1.0" }
postgrest = "1.0"
tokio-postgres = { version = "0.7.8", optional = true, features = ["with-uuid-1","with-chrono-0_4"] }
deadpool-postgres = "0.10.5"
refinery= { version = "0.8.10", optional = true, features = ["tokio-postgres"] }
async-stream = "0.3.4"
futures = "0.3.26"
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-user = { path = "../flowy-user" }
flowy-folder2 = { path = "../flowy-folder2" }
flowy-database2 = { path = "../flowy-database2" }
flowy-document2 = { path = "../flowy-document2" }
flowy-error = { path = "../flowy-error" }
flowy-server-config = { path = "../flowy-server-config" }
collab-folder = { version = "0.1.0" }
collab-document = { version = "0.1.0" } collab-document = { version = "0.1.0" }
hex = "0.4.3"
postgrest = "1.0"
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-user-deps = { path = "../flowy-user-deps" }
flowy-folder-deps = { path = "../flowy-folder-deps" }
flowy-database-deps = { path = "../flowy-database-deps" }
flowy-document-deps = { path = "../flowy-document-deps" }
flowy-error = { path = "../flowy-error", features = ["impl_from_postgres", "impl_from_serde", "impl_from_reqwest"] }
flowy-server-config = { path = "../flowy-server-config" }
[dev-dependencies] [dev-dependencies]
uuid = { version = "1.3.3", features = ["v4"] } uuid = { version = "1.3.3", features = ["v4"] }
tracing-subscriber = { version = "0.3.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3.3", features = ["env-filter"] }
dotenv = "0.15.0" dotenv = "0.15.0"
yrs = "0.16.5"
[features]
default = ["postgres_storage"]
postgres_storage = ["tokio-postgres", "refinery", ]

View File

@ -0,0 +1,108 @@
# AppFlowy Cloud Architecture
AppFlowy supports multiple cloud solutions. Users can choose their preferred cloud provider, such as Supabase, Firebase,
AWS, or our own AppFlowyCloud (Self-hosted server).
![](architecture-Application.png)
## Design
AppFlowy use the traits [AppFlowyServer] to abstract the cloud provider. Each cloud provider implements the [AppFlowyServer]
trait. As the image below shows. Users can choose their preferred cloud provider or simply use the default option, which is the LocalServer. When using the
LocalServer, data is stored on the local file system. Users can migrate to a cloud provider if needed. For instance, one
could migrate from LocalServer to AppFlowyCloud. This migration would create a new user in the cloud and transfer all the
data from the local database to the cloud.
![](architecture.png)
## AppFlowy Cloud Implementation (WIP)
### Restful API
### Table schema
## Supabase Implementation
### Table schema
![](./schema.png)
1. `af_roles` table: This table contains a list of roles that are used in your application, such as 'Owner', 'Member', and 'Guest'.
2. `af_permissions` table: This table stores permissions that are used in your application. Each permission has a name, a description, and an access level.
3. `af_role_permissions` table: This is a many-to-many relation table between roles and permissions. It represents which permissions a role has.
4. `af_user` table: This stores the details of users like uuid, email, uid, name, created_at. Here, uid is an auto-incrementing integer that uniquely identifies a user.
5. `af_workspace` table: This table contains all the workspaces. Each workspace has an owner which is associated with the uid of a user in the `af_user` table.
6. `af_workspace_member` table: This table maintains a list of all the members associated with a workspace and their roles.
7. `af_collab` and `af_collab_member` tables: These tables store the collaborations and their members respectively. Each collaboration has an owner and a workspace associated with it.
8. `af_collab_update`, `af_collab_update_document`, `af_collab_update_database`, `af_collab_update_w_database`, `af_collab_update_folder`, `af_database_row_update` tables: These tables are used for handling updates to collaborations.
9. `af_collab_statistics`, `af_collab_snapshot`, `af_collab_state`: These tables and view are used for maintaining statistics and snapshots of collaborations.
10. `af_user_profile_view` view: This view is used to get the latest workspace_id for each user.
![](./schema-Triggers_in_Database.png)
Here's a detailed description for each of these triggers:
1. `create_af_workspace_trigger`:
This trigger is designed to automate the process of workspace creation in the `af_workspace` table after a new user is inserted into the `af_user` table. When a new user is added, this trigger fires and inserts a new record into the `af_workspace` table, setting the `owner_uid` to the UID of the new user.
2. `manage_af_workspace_member_role_trigger`:
This trigger helps to manage the roles of workspace members. After an insert operation on the `af_workspace` table, this trigger automatically fires and creates a new record in the `af_workspace_member` table. The new record identifies the user as a member of the workspace with the role 'Owner'. This ensures that every new workspace has an owner.
3. `insert_into_af_collab_trigger`:
The purpose of this trigger is to ensure consistency between the `af_collab_update` and `af_collab` tables. When an insert operation is about to be performed on the `af_collab_update` table, this trigger fires before the insert operation. It checks if a corresponding collaboration exists in the `af_collab` table using the oid and uid. If a corresponding collaboration does not exist, the trigger creates one, using the oid, uid, and current timestamp. This way, every collab update operation corresponds to a valid collaboration.
4. `insert_into_af_collab_member_trigger`:
This trigger helps to manage the membership of users in collaborations. After a new collaboration is inserted into the `af_collab` table, this trigger fires. It checks if a corresponding collaboration member exists in the `af_collab_member` table. If a corresponding member does not exist, the trigger creates one, using the collaboration id and user id. This ensures that every collaboration has at least one member.
5. `af_collab_snapshot_update_edit_count_trigger`:
This trigger is designed to keep track of the number of edits on each collaboration snapshot in the `af_collab_snapshot` table. When an update operation is performed on the `af_collab_snapshot` table, this trigger fires. It increments the `edit_count` of the corresponding record in the `af_collab_snapshot` table by one. This ensures that the application can keep track of how many times each collaboration snapshot has been edited.
### Supabase configuration
#### Test
In order to run the test, you need to set up the .env.test file.
```dotenv
# Supabase configuration
SUPABASE_URL="your-supabase-url"
SUPABASE_ANON_KEY="your-supabase-anonymous-key"
SUPABASE_KEY="your-supabase-key"
SUPABASE_JWT_SECRET="your-supabase-jwt-secret"
# Supabase Database configuration
SUPABASE_DB="your-supabase-db-url"
SUPABASE_DB_USER="your-db-username"
SUPABASE_DB_PORT="your-db-port"
SUPABASE_DB_PASSWORD="your-db-password"
```
1. `SUPABASE_URL`: This is the URL of your Supabase server instance. Your application will use this URL to interact with the Supabase service.
2. `SUPABASE_ANON_KEY`: This is the anonymous API key from Supabase, used for operations that don't require user authentication. Operations performed with this key are done as the anonymous role in the database.
3. `SUPABASE_KEY`: This is the API key with higher privileges from Supabase. It is generally used for server-side operations that require more permissions than an anonymous user.
4. `SUPABASE_JWT_SECRET`: This is the secret used to verify JWT tokens generated by Supabase. JWT or JSON Web Token is a standard method for securely transferring data between parties as a JSON object.
5. `SUPABASE_DB`: This is the URL for the database your Supabase server instance is using.
6. `SUPABASE_DB_USER`: This is the username used to authenticate with the Supabase database, in this case, it's 'postgres', which is a common default for PostgreSQL.
7. `SUPABASE_DB_PORT`: This is the port number where your Supabase database service is accessible. The default PostgreSQL port is 5432, and you are using this default port.
8. `SUPABASE_DB_PASSWORD`: This is the password used to authenticate the `SUPABASE_DB_USER` with the Supabase database.
For example, if you want to run the supabase tests located in flowy-test crate. You need to put the `.env.test` file under
the flowy-test folder.

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

View File

@ -0,0 +1,78 @@
@startuml
title "Application"
left to right direction
package "AppFlowy Application" {
[User]
}
cloud "Supabase Server" {
[RESTful Component]
[Realtime Component]
[Postgres DB]
}
database "LocalServer" {
[Local Server Component]
}
cloud "AppFlowy Cloud Server" {
[RESTful Component] as [AppFlowy RESTful Component]
[Realtime Component] as [AppFlowy Realtime Component]
[Postgres DB] as [AppFlowy Postgres DB]
}
User --> [AppFlowy Application]
[AppFlowy Application] --> [Local Server Component] : Connect
[AppFlowy Application] --> [RESTful Component] : RESTful API Communication
[AppFlowy Application] <..> [Realtime Component] : WebSocket Communication
[AppFlowy Application] --> [AppFlowy RESTful Component] : RESTful API Communication
[AppFlowy Application] <..> [AppFlowy Realtime Component] : WebSocket Communication
@enduml
@startuml
left to right direction
interface AppFlowyServer {
+ enable_sync(_enable: bool)
+ user_service(): Arc<dyn UserService>
+ folder_service(): Arc<dyn FolderCloudService>
+ database_service(): Arc<dyn DatabaseCloudService>
+ document_service(): Arc<dyn DocumentCloudService>
+ collab_storage(): Option<Arc<dyn RemoteCollabStorage>>
}
class SupabaseServer {
+ enable_sync(_enable: bool)
+ user_service(): Arc<dyn UserService>
+ folder_service(): Arc<dyn FolderCloudService>
+ database_service(): Arc<dyn DatabaseCloudService>
+ document_service(): Arc<dyn DocumentCloudService>
+ collab_storage(): Option<Arc<dyn RemoteCollabStorage>>
}
class SelfHostServer {
+ user_service(): Arc<dyn UserService>
+ folder_service(): Arc<dyn FolderCloudService>
+ database_service(): Arc<dyn DatabaseCloudService>
+ document_service(): Arc<dyn DocumentCloudService>
+ collab_storage(): Option<Arc<dyn RemoteCollabStorage>>
}
class LocalServer {
+ user_service(): Arc<dyn UserService>
+ folder_service(): Arc<dyn FolderCloudService>
+ database_service(): Arc<dyn DatabaseCloudService>
+ document_service(): Arc<dyn DocumentCloudService>
+ collab_storage(): Option<Arc<dyn RemoteCollabStorage>>
}
SupabaseServer -u-|> AppFlowyServer
SelfHostServer -u-|> AppFlowyServer
LocalServer -u-|> AppFlowyServer
@enduml

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

View File

@ -0,0 +1,203 @@
@startuml
left to right direction
entity "af_roles" as roles {
id : SERIAL (PK)
name : TEXT
}
entity "af_permissions" as permissions {
id : SERIAL (PK)
name : VARCHAR(255)
access_level : INTEGER
description : TEXT
}
entity "af_role_permissions" as role_permissions {
role_id : INT (FK af_roles.id)
permission_id : INT (FK af_permissions.id)
--
(role_id, permission_id) : PK
}
entity "af_user" as user {
uuid : UUID (PK)
email : TEXT
uid : BIGSERIAL
name : TEXT
created_at : TIMESTAMP WITH TIME ZONE
}
entity "af_workspace" as workspace {
workspace_id : UUID (PK)
database_storage_id : UUID
owner_uid : BIGINT (FK af_user.uid)
created_at : TIMESTAMP WITH TIME ZONE
workspace_type : INTEGER
workspace_name : TEXT
}
entity "af_workspace_member" as workspace_member {
uid : BIGINT
role_id : INT (FK af_roles.id)
workspace_id : UUID (FK af_workspace.workspace_id)
created_at : TIMESTAMP WITH TIME ZONE
updated_at : TIMESTAMP WITH TIME ZONE
--
(uid, workspace_id) : PK
}
entity "af_collab" as collab {
oid : TEXT (PK)
owner_uid : BIGINT
workspace_id : UUID (FK af_workspace.workspace_id)
access_level : INTEGER
created_at : TIMESTAMP WITH TIME ZONE
}
entity "af_collab_update" as collab_update {
oid : TEXT (FK af_collab.oid)
key : BIGSERIAL
value : BYTEA
value_size : INTEGER
partition_key : INTEGER
uid : BIGINT
md5 : TEXT
created_at : TIMESTAMP WITH TIME ZONE
workspace_id : UUID (FK af_workspace.workspace_id)
--
(oid, key, partition_key) : PK
}
entity "af_collab_update_document" as af_collab_update_document {
Inherits af_collab_update (partition_key = 0)
}
entity "af_collab_update_database" as af_collab_update_database {
Inherits af_collab_update (partition_key = 1)
}
entity "af_collab_update_w_database" as af_collab_update_w_database {
Inherits af_collab_update (partition_key = 2)
}
entity "af_collab_update_folder" as af_collab_update_folder {
Inherits af_collab_update (partition_key = 3)
}
af_collab_update_document -u-|> collab_update
af_collab_update_database -u-|> collab_update
af_collab_update_w_database -u-|> collab_update
af_collab_update_folder -u-|> collab_update
entity "af_database_row_update" as database_row_update {
oid : TEXT
key : BIGSERIAL
value : BYTEA
value_size : INTEGER
partition_key : INTEGER
uid : BIGINT
md5 : TEXT
workspace_id : UUID (FK af_workspace.workspace_id)
--
(oid, key) : PK
}
entity "af_collab_member" as collab_member {
uid : BIGINT (FK af_user.uid)
oid : TEXT (FK af_collab.oid)
role_id : INTEGER (FK af_roles.id)
--
(uid, oid) : PK
}
entity "af_collab_statistics" as collab_statistics {
oid : TEXT (PK)
edit_count : BIGINT
}
entity "af_collab_snapshot" as collab_snapshot {
sid : BIGSERIAL (PK)
oid : TEXT (FK af_collab.oid)
name : TEXT
blob : BYTEA
blob_size : INTEGER
edit_count : BIGINT
created_at : TIMESTAMP WITH TIME ZONE
}
roles <-- role_permissions : FK
permissions <-u- role_permissions : FK
user <-- collab : FK
user <-- workspace : FK
user <-- collab_member : FK
roles <-- workspace_member : FK
workspace <-- workspace_member : FK
workspace <-- collab : FK
workspace <-- database_row_update : FK
collab <-- collab_update : FK
collab <-- collab_snapshot: FK
collab <-u- collab_member : FK
collab <-- collab_statistics : PK
roles <-- collab_member : FK
@enduml
@startuml
title Triggers in Database Schema
participant "af_user" as A
participant "af_workspace" as B
participant "af_workspace_member" as C
participant "af_collab" as D
participant "af_collab_update" as E
participant "af_collab_member" as F
participant "af_collab_statistics" as G
participant "af_collab_snapshot" as H
A -> B: create_af_workspace_trigger
note right
This trigger fires after an insert on af_user. It automatically creates a workspace
with the uid of the new user as the owner_uid.
end note
B -> C: manage_af_workspace_member_role_trigger
note right
This trigger fires after an insert on af_workspace. It automatically
creates a workspace member in the af_workspace_member table with the
role 'Owner'.
end note
E -> D: insert_into_af_collab_trigger
note right
This trigger fires before an insert on af_collab_update.
It checks if a corresponding collab exists in the af_collab table.
If not, it creates one with the oid, uid, and current timestamp.
end note
D -> F: insert_into_af_collab_member_trigger
note right
This trigger fires after an insert on af_collab.
It automatically adds the collab's owner to the af_collab_member
table with the role 'Owner'.
end note
E -> G: af_collab_update_edit_count_trigger
note right
This trigger fires after an insert on af_collab_update.
It increments the edit_count of the corresponding collab in
the af_collab_statistics table.
end note
H -> G: af_collab_snapshot_update_edit_count_trigger
note right
This trigger fires after an insert on af_collab_snapshot.
It sets the edit_count of the new snapshot to the current
edit_count of the collab in the af_collab_statistics table.
end note
@enduml

Binary file not shown.

After

Width:  |  Height:  |  Size: 192 KiB

View File

@ -1,11 +1,11 @@
use std::sync::Arc; use std::sync::Arc;
use appflowy_integrate::RemoteCollabStorage; use collab_plugins::cloud_storage::RemoteCollabStorage;
use flowy_database2::deps::DatabaseCloudService; use flowy_database_deps::cloud::DatabaseCloudService;
use flowy_document2::deps::DocumentCloudService; use flowy_document_deps::cloud::DocumentCloudService;
use flowy_folder2::deps::FolderCloudService; use flowy_folder_deps::cloud::FolderCloudService;
use flowy_user::event_map::UserAuthService; use flowy_user_deps::cloud::UserService;
pub mod local_server; pub mod local_server;
mod request; mod request;
@ -14,23 +14,9 @@ pub mod self_host;
pub mod supabase; pub mod supabase;
pub mod util; pub mod util;
/// In order to run this the supabase test, you need to create a .env file in the root directory of this project
/// and add the following environment variables:
/// - SUPABASE_URL
/// - SUPABASE_ANON_KEY
/// - SUPABASE_KEY
/// - SUPABASE_JWT_SECRET
///
/// the .env file should look like this:
/// SUPABASE_URL=https://<your-supabase-url>.supabase.co
/// SUPABASE_ANON_KEY=<your-supabase-anon-key>
/// SUPABASE_KEY=<your-supabase-key>
/// SUPABASE_JWT_SECRET=<your-supabase-jwt-secret>
///
pub trait AppFlowyServer: Send + Sync + 'static { pub trait AppFlowyServer: Send + Sync + 'static {
fn enable_sync(&self, _enable: bool) {} fn enable_sync(&self, _enable: bool) {}
fn user_service(&self) -> Arc<dyn UserAuthService>; fn user_service(&self) -> Arc<dyn UserService>;
fn folder_service(&self) -> Arc<dyn FolderCloudService>; fn folder_service(&self) -> Arc<dyn FolderCloudService>;
fn database_service(&self) -> Arc<dyn DatabaseCloudService>; fn database_service(&self) -> Arc<dyn DatabaseCloudService>;
fn document_service(&self) -> Arc<dyn DocumentCloudService>; fn document_service(&self) -> Arc<dyn DocumentCloudService>;

View File

@ -1,27 +1,35 @@
use flowy_database2::deps::{ use anyhow::Error;
use collab_plugins::cloud_storage::CollabType;
use flowy_database_deps::cloud::{
CollabObjectUpdate, CollabObjectUpdateByOid, DatabaseCloudService, DatabaseSnapshot, CollabObjectUpdate, CollabObjectUpdateByOid, DatabaseCloudService, DatabaseSnapshot,
}; };
use flowy_error::FlowyError;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
pub(crate) struct LocalServerDatabaseCloudServiceImpl(); pub(crate) struct LocalServerDatabaseCloudServiceImpl();
impl DatabaseCloudService for LocalServerDatabaseCloudServiceImpl { impl DatabaseCloudService for LocalServerDatabaseCloudServiceImpl {
fn get_collab_update(&self, _object_id: &str) -> FutureResult<CollabObjectUpdate, FlowyError> { fn get_collab_update(
&self,
_object_id: &str,
_object_ty: CollabType,
) -> FutureResult<CollabObjectUpdate, Error> {
FutureResult::new(async move { Ok(vec![]) }) FutureResult::new(async move { Ok(vec![]) })
} }
fn batch_get_collab_updates( fn batch_get_collab_updates(
&self, &self,
_object_ids: Vec<String>, _object_ids: Vec<String>,
) -> FutureResult<CollabObjectUpdateByOid, FlowyError> { _object_ty: CollabType,
) -> FutureResult<CollabObjectUpdateByOid, Error> {
FutureResult::new(async move { Ok(CollabObjectUpdateByOid::default()) }) FutureResult::new(async move { Ok(CollabObjectUpdateByOid::default()) })
} }
fn get_collab_latest_snapshot( fn get_collab_latest_snapshot(
&self, &self,
_object_id: &str, _object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, FlowyError> { ) -> FutureResult<Option<DatabaseSnapshot>, Error> {
FutureResult::new(async move { Ok(None) }) FutureResult::new(async move { Ok(None) })
} }
} }

View File

@ -1,25 +1,23 @@
use flowy_document2::deps::{DocumentCloudService, DocumentData, DocumentSnapshot}; use anyhow::Error;
use flowy_error::FlowyError; use flowy_document_deps::cloud::*;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
pub(crate) struct LocalServerDocumentCloudServiceImpl(); pub(crate) struct LocalServerDocumentCloudServiceImpl();
impl DocumentCloudService for LocalServerDocumentCloudServiceImpl { impl DocumentCloudService for LocalServerDocumentCloudServiceImpl {
fn get_document_updates(&self, _document_id: &str) -> FutureResult<Vec<Vec<u8>>, FlowyError> { fn get_document_updates(&self, _document_id: &str) -> FutureResult<Vec<Vec<u8>>, Error> {
FutureResult::new(async move { Ok(vec![]) }) FutureResult::new(async move { Ok(vec![]) })
} }
fn get_document_latest_snapshot( fn get_document_latest_snapshot(
&self, &self,
_document_id: &str, _document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, FlowyError> { ) -> FutureResult<Option<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(None) }) FutureResult::new(async move { Ok(None) })
} }
fn get_document_data( fn get_document_data(&self, _document_id: &str) -> FutureResult<Option<DocumentData>, Error> {
&self,
_document_id: &str,
) -> FutureResult<Option<DocumentData>, FlowyError> {
FutureResult::new(async move { Ok(None) }) FutureResult::new(async move { Ok(None) })
} }
} }

View File

@ -1,8 +1,9 @@
use anyhow::Error;
use std::sync::Arc; use std::sync::Arc;
use flowy_error::FlowyError; use flowy_folder_deps::cloud::{
use flowy_folder2::deps::{FolderCloudService, FolderData, FolderSnapshot, Workspace}; gen_workspace_id, FolderCloudService, FolderData, FolderSnapshot, Workspace,
use flowy_folder2::gen_workspace_id; };
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
use lib_infra::util::timestamp; use lib_infra::util::timestamp;
@ -13,11 +14,11 @@ pub(crate) struct LocalServerFolderCloudServiceImpl {
} }
impl FolderCloudService for LocalServerFolderCloudServiceImpl { impl FolderCloudService for LocalServerFolderCloudServiceImpl {
fn create_workspace(&self, _uid: i64, name: &str) -> FutureResult<Workspace, FlowyError> { fn create_workspace(&self, _uid: i64, name: &str) -> FutureResult<Workspace, Error> {
let name = name.to_string(); let name = name.to_string();
FutureResult::new(async move { FutureResult::new(async move {
Ok(Workspace { Ok(Workspace {
id: gen_workspace_id(), id: gen_workspace_id().to_string(),
name: name.to_string(), name: name.to_string(),
child_views: Default::default(), child_views: Default::default(),
created_at: timestamp(), created_at: timestamp(),
@ -25,22 +26,18 @@ impl FolderCloudService for LocalServerFolderCloudServiceImpl {
}) })
} }
fn get_folder_data(&self, _workspace_id: &str) -> FutureResult<Option<FolderData>, FlowyError> { fn get_folder_data(&self, _workspace_id: &str) -> FutureResult<Option<FolderData>, Error> {
FutureResult::new(async move { Ok(None) }) FutureResult::new(async move { Ok(None) })
} }
fn get_folder_latest_snapshot( fn get_folder_latest_snapshot(
&self, &self,
_workspace_id: &str, _workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, FlowyError> { ) -> FutureResult<Option<FolderSnapshot>, Error> {
FutureResult::new(async move { Ok(None) }) FutureResult::new(async move { Ok(None) })
} }
fn get_folder_updates( fn get_folder_updates(&self, workspace_id: &str, uid: i64) -> FutureResult<Vec<Vec<u8>>, Error> {
&self,
workspace_id: &str,
uid: i64,
) -> FutureResult<Vec<Vec<u8>>, FlowyError> {
let weak_db = Arc::downgrade(&self.db); let weak_db = Arc::downgrade(&self.db);
let workspace_id = workspace_id.to_string(); let workspace_id = workspace_id.to_string();
FutureResult::new(async move { FutureResult::new(async move {

View File

@ -1,13 +1,11 @@
use anyhow::Error;
use std::sync::Arc; use std::sync::Arc;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use parking_lot::Mutex; use parking_lot::Mutex;
use flowy_error::FlowyError; use flowy_user_deps::cloud::UserService;
use flowy_user::entities::{ use flowy_user_deps::entities::*;
SignInParams, SignInResponse, SignUpParams, SignUpResponse, UpdateUserProfileParams, UserProfile,
};
use flowy_user::event_map::{UserAuthService, UserCredentials};
use lib_infra::box_any::BoxAny; use lib_infra::box_any::BoxAny;
use lib_infra::future::FutureResult; use lib_infra::future::FutureResult;
@ -19,19 +17,22 @@ lazy_static! {
} }
pub(crate) struct LocalServerUserAuthServiceImpl { pub(crate) struct LocalServerUserAuthServiceImpl {
#[allow(dead_code)]
pub db: Arc<dyn LocalServerDB>, pub db: Arc<dyn LocalServerDB>,
} }
impl UserAuthService for LocalServerUserAuthServiceImpl { impl UserService for LocalServerUserAuthServiceImpl {
fn sign_up(&self, params: BoxAny) -> FutureResult<SignUpResponse, FlowyError> { fn sign_up(&self, params: BoxAny) -> FutureResult<SignUpResponse, Error> {
FutureResult::new(async move { FutureResult::new(async move {
let params = params.unbox_or_error::<SignUpParams>()?; let params = params.unbox_or_error::<SignUpParams>()?;
let uid = ID_GEN.lock().next_id(); let uid = ID_GEN.lock().next_id();
let workspace_id = uuid::Uuid::new_v4().to_string(); let workspace_id = uuid::Uuid::new_v4().to_string();
let user_workspace = UserWorkspace::new(&workspace_id, uid);
Ok(SignUpResponse { Ok(SignUpResponse {
user_id: uid, user_id: uid,
name: params.name, name: params.name,
workspace_id, latest_workspace: user_workspace.clone(),
user_workspaces: vec![user_workspace],
is_new: true, is_new: true,
email: Some(params.email), email: Some(params.email),
token: None, token: None,
@ -39,8 +40,8 @@ impl UserAuthService for LocalServerUserAuthServiceImpl {
}) })
} }
fn sign_in(&self, params: BoxAny) -> FutureResult<SignInResponse, FlowyError> { fn sign_in(&self, params: BoxAny) -> FutureResult<SignInResponse, Error> {
let weak_db = Arc::downgrade(&self.db); let db = self.db.clone();
FutureResult::new(async move { FutureResult::new(async move {
let params: SignInParams = params.unbox_or_error::<SignInParams>()?; let params: SignInParams = params.unbox_or_error::<SignInParams>()?;
let uid = match params.uid { let uid = match params.uid {
@ -48,24 +49,21 @@ impl UserAuthService for LocalServerUserAuthServiceImpl {
Some(uid) => uid, Some(uid) => uid,
}; };
// Get the workspace id from the database if it exists, otherwise generate a new one. let user_workspace = db
let workspace_id = weak_db .get_user_workspace(uid)?
.upgrade() .unwrap_or_else(make_user_workspace);
.and_then(|db| db.get_user_profile(uid).ok())
.and_then(|user_profile| user_profile.map(|user_profile| user_profile.workspace_id))
.unwrap_or(uuid::Uuid::new_v4().to_string());
Ok(SignInResponse { Ok(SignInResponse {
user_id: uid, user_id: uid,
name: params.name, name: params.name,
workspace_id, latest_workspace: user_workspace.clone(),
user_workspaces: vec![user_workspace],
email: Some(params.email), email: Some(params.email),
token: None, token: None,
}) })
}) })
} }
fn sign_out(&self, _token: Option<String>) -> FutureResult<(), FlowyError> { fn sign_out(&self, _token: Option<String>) -> FutureResult<(), Error> {
FutureResult::new(async { Ok(()) }) FutureResult::new(async { Ok(()) })
} }
@ -73,18 +71,47 @@ impl UserAuthService for LocalServerUserAuthServiceImpl {
&self, &self,
_credential: UserCredentials, _credential: UserCredentials,
_params: UpdateUserProfileParams, _params: UpdateUserProfileParams,
) -> FutureResult<(), FlowyError> { ) -> FutureResult<(), Error> {
FutureResult::new(async { Ok(()) }) FutureResult::new(async { Ok(()) })
} }
fn get_user_profile( fn get_user_profile(
&self, &self,
_credential: UserCredentials, _credential: UserCredentials,
) -> FutureResult<Option<UserProfile>, FlowyError> { ) -> FutureResult<Option<UserProfile>, Error> {
FutureResult::new(async { Ok(None) }) FutureResult::new(async { Ok(None) })
} }
fn check_user(&self, _credential: UserCredentials) -> FutureResult<(), FlowyError> { fn get_user_workspaces(&self, _uid: i64) -> FutureResult<Vec<UserWorkspace>, Error> {
FutureResult::new(async { Ok(vec![]) })
}
fn check_user(&self, _credential: UserCredentials) -> FutureResult<(), Error> {
FutureResult::new(async { Ok(()) })
}
fn add_workspace_member(
&self,
_user_email: String,
_workspace_id: String,
) -> FutureResult<(), Error> {
FutureResult::new(async { Ok(()) })
}
fn remove_workspace_member(
&self,
_user_email: String,
_workspace_id: String,
) -> FutureResult<(), Error> {
FutureResult::new(async { Ok(()) }) FutureResult::new(async { Ok(()) })
} }
} }
fn make_user_workspace() -> UserWorkspace {
UserWorkspace {
id: uuid::Uuid::new_v4().to_string(),
name: "My Workspace".to_string(),
created_at: Default::default(),
database_storage_id: uuid::Uuid::new_v4().to_string(),
}
}

View File

@ -1,17 +1,18 @@
use std::sync::Arc; use std::sync::Arc;
use appflowy_integrate::RemoteCollabStorage; use collab_plugins::cloud_storage::RemoteCollabStorage;
use collab_document::YrsDocAction;
use parking_lot::RwLock; use parking_lot::RwLock;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use flowy_database2::deps::DatabaseCloudService; use flowy_database_deps::cloud::DatabaseCloudService;
use flowy_document2::deps::DocumentCloudService; use flowy_document_deps::cloud::DocumentCloudService;
use flowy_error::FlowyError; use flowy_error::FlowyError;
use flowy_folder2::deps::FolderCloudService; use flowy_folder_deps::cloud::FolderCloudService;
use flowy_user::entities::UserProfile; // use flowy_user::services::database::{
use flowy_user::event_map::UserAuthService; // get_user_profile, get_user_workspace, open_collab_db, open_user_db,
use flowy_user::services::database::{get_user_profile, open_collab_db, open_user_db}; // };
use flowy_user_deps::cloud::UserService;
use flowy_user_deps::entities::*;
use crate::local_server::impls::{ use crate::local_server::impls::{
LocalServerDatabaseCloudServiceImpl, LocalServerDocumentCloudServiceImpl, LocalServerDatabaseCloudServiceImpl, LocalServerDocumentCloudServiceImpl,
@ -21,18 +22,19 @@ use crate::AppFlowyServer;
pub trait LocalServerDB: Send + Sync + 'static { pub trait LocalServerDB: Send + Sync + 'static {
fn get_user_profile(&self, uid: i64) -> Result<Option<UserProfile>, FlowyError>; fn get_user_profile(&self, uid: i64) -> Result<Option<UserProfile>, FlowyError>;
fn get_user_workspace(&self, uid: i64) -> Result<Option<UserWorkspace>, FlowyError>;
fn get_collab_updates(&self, uid: i64, object_id: &str) -> Result<Vec<Vec<u8>>, FlowyError>; fn get_collab_updates(&self, uid: i64, object_id: &str) -> Result<Vec<Vec<u8>>, FlowyError>;
} }
pub struct LocalServer { pub struct LocalServer {
storage_path: String, local_db: Arc<dyn LocalServerDB>,
stop_tx: RwLock<Option<mpsc::Sender<()>>>, stop_tx: RwLock<Option<mpsc::Sender<()>>>,
} }
impl LocalServer { impl LocalServer {
pub fn new(storage_path: &str) -> Self { pub fn new(local_db: Arc<dyn LocalServerDB>) -> Self {
Self { Self {
storage_path: storage_path.to_string(), local_db,
stop_tx: Default::default(), stop_tx: Default::default(),
} }
} }
@ -46,18 +48,16 @@ impl LocalServer {
} }
impl AppFlowyServer for LocalServer { impl AppFlowyServer for LocalServer {
fn user_service(&self) -> Arc<dyn UserAuthService> { fn user_service(&self) -> Arc<dyn UserService> {
let db = LocalServerDBImpl { Arc::new(LocalServerUserAuthServiceImpl {
storage_path: self.storage_path.clone(), db: self.local_db.clone(),
}; })
Arc::new(LocalServerUserAuthServiceImpl { db: Arc::new(db) })
} }
fn folder_service(&self) -> Arc<dyn FolderCloudService> { fn folder_service(&self) -> Arc<dyn FolderCloudService> {
let db = LocalServerDBImpl { Arc::new(LocalServerFolderCloudServiceImpl {
storage_path: self.storage_path.clone(), db: self.local_db.clone(),
}; })
Arc::new(LocalServerFolderCloudServiceImpl { db: Arc::new(db) })
} }
fn database_service(&self) -> Arc<dyn DatabaseCloudService> { fn database_service(&self) -> Arc<dyn DatabaseCloudService> {
@ -72,25 +72,3 @@ impl AppFlowyServer for LocalServer {
None None
} }
} }
struct LocalServerDBImpl {
storage_path: String,
}
impl LocalServerDB for LocalServerDBImpl {
fn get_user_profile(&self, uid: i64) -> Result<Option<UserProfile>, FlowyError> {
let sqlite_db = open_user_db(&self.storage_path, uid)?;
let user_profile = get_user_profile(&sqlite_db, uid).ok();
Ok(user_profile)
}
fn get_collab_updates(&self, uid: i64, object_id: &str) -> Result<Vec<Vec<u8>>, FlowyError> {
let collab_db = open_collab_db(&self.storage_path, uid)?;
let read_txn = collab_db.read_txn();
let updates = read_txn
.get_all_updates(uid, object_id)
.map_err(|e| FlowyError::internal().context(format!("Failed to open collab db: {:?}", e)))?;
Ok(updates)
}
}

Some files were not shown because too many files have changed in this diff Show More