feat: encrypt collab update (#3215)

* feat: implement encrypt and decrypt

* feat: encrypt and decrypt

* feat: update user profile with encrypt

* chore: store encryption sign

* fix: login in setting menu

* chore: show encryption account name

* chore: fix test

* ci: fix warnings

* test: enable supabase test

* chore: fix test and rename column

* fix: update user profile after set the secret

* fix: encryption with wrong secret

* fix: don't save user data if the return value of did_sign_up is err

* chore: encrypt snapshot data

* chore: refactor snapshots interface

* ci: add tests

* chore: update collab rev
This commit is contained in:
Nathan.fooo 2023-08-17 23:46:39 +08:00 committed by GitHub
parent 103f56922f
commit 649b0a135a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
103 changed files with 2825 additions and 905 deletions

View File

@ -25,6 +25,7 @@ env:
jobs:
test-on-ubuntu:
environment: SUPABASE_CI
runs-on: ubuntu-latest
steps:
- name: Checkout source code
@ -49,7 +50,23 @@ jobs:
with:
prefix-key: 'ubuntu-latest'
workspaces: |
frontend/rust-lib
frontend/rust-lib
- name: Create .env file in flowy-server
working-directory: frontend/rust-lib/flowy-server
run: |
touch .env.ci
echo SUPABASE_URL=${{ secrets.SUPABASE_URL }} >> .env.ci
echo SUPABASE_ANON_KEY=${{ secrets.SUPABASE_ANON_KEY }} >> .env.ci
echo SUPABASE_JWT_SECRET=${{ secrets.SUPABASE_JWT_SECRET }} >> .env.ci
- name: Create .env file in flowy-test
working-directory: frontend/rust-lib/flowy-test
run: |
touch .env.ci
echo SUPABASE_URL=${{ secrets.SUPABASE_URL }} >> .env.ci
echo SUPABASE_ANON_KEY=${{ secrets.SUPABASE_ANON_KEY }} >> .env.ci
echo SUPABASE_JWT_SECRET=${{ secrets.SUPABASE_JWT_SECRET }} >> .env.ci
- name: Run rust-lib tests
working-directory: frontend/rust-lib
@ -60,5 +77,5 @@ jobs:
working-directory: frontend/rust-lib/
- name: clippy rust-lib
run: cargo clippy --features="rev-sqlite"
run: cargo clippy --all
working-directory: frontend/rust-lib

View File

@ -24,7 +24,7 @@
"program": "./lib/main.dart",
"type": "dart",
"env": {
"RUST_LOG": "debug",
"RUST_LOG": "trace",
},
"cwd": "${workspaceRoot}/appflowy_flutter"
},

View File

@ -1,13 +1,8 @@
import 'package:appflowy/startup/entry_point.dart';
import 'package:flutter/material.dart';
import 'startup/startup.dart';
Future<void> main() async {
WidgetsFlutterBinding.ensureInitialized();
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
);
await runAppFlowy();
}

View File

@ -7,6 +7,7 @@ import 'package:flutter/material.dart';
import 'package:get_it/get_it.dart';
import 'deps_resolver.dart';
import 'entry_point.dart';
import 'launch_configuration.dart';
import 'plugin/plugin.dart';
import 'tasks/prelude.dart';
@ -23,6 +24,13 @@ class FlowyRunnerContext {
FlowyRunnerContext({required this.applicationDataDirectory});
}
Future<void> runAppFlowy() async {
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
);
}
class FlowyRunner {
static Future<FlowyRunnerContext> run(
EntryPoint f,

View File

@ -0,0 +1,106 @@
import 'package:appflowy/plugins/database_view/application/defines.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:dartz/dartz.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import 'auth/auth_service.dart';
part 'encrypt_secret_bloc.freezed.dart';
class EncryptSecretBloc extends Bloc<EncryptSecretEvent, EncryptSecretState> {
final UserProfilePB user;
EncryptSecretBloc({required this.user})
: super(EncryptSecretState.initial()) {
on<EncryptSecretEvent>((event, emit) async {
await event.when(
setEncryptSecret: (secret) async {
if (isLoading()) {
return;
}
final payload = UserSecretPB.create()
..encryptionSecret = secret
..encryptionSign = user.encryptionSign
..encryptionType = user.encryptionType
..userId = user.id;
UserEventSetEncryptionSecret(payload).send().then((result) {
if (!isClosed) {
add(EncryptSecretEvent.didFinishCheck(result));
}
});
emit(
state.copyWith(
loadingState: const LoadingState.loading(),
successOrFail: none(),
),
);
},
cancelInputSecret: () async {
await getIt<AuthService>().signOut();
emit(
state.copyWith(
successOrFail: none(),
isSignOut: true,
),
);
},
didFinishCheck: (Either<Unit, FlowyError> result) {
result.fold(
(unit) {
emit(
state.copyWith(
loadingState: const LoadingState.loading(),
successOrFail: Some(result),
),
);
},
(err) {
emit(
state.copyWith(
loadingState: LoadingState.finish(right(err)),
successOrFail: Some(result),
),
);
},
);
},
);
});
}
bool isLoading() {
final loadingState = state.loadingState;
if (loadingState != null) {
return loadingState.when(loading: () => true, finish: (_) => false);
}
return false;
}
}
@freezed
class EncryptSecretEvent with _$EncryptSecretEvent {
const factory EncryptSecretEvent.setEncryptSecret(String secret) =
_SetEncryptSecret;
const factory EncryptSecretEvent.didFinishCheck(
Either<Unit, FlowyError> result,
) = _DidFinishCheck;
const factory EncryptSecretEvent.cancelInputSecret() = _CancelInputSecret;
}
@freezed
class EncryptSecretState with _$EncryptSecretState {
const factory EncryptSecretState({
required Option<Either<Unit, FlowyError>> successOrFail,
required bool isSignOut,
LoadingState? loadingState,
}) = _EncryptSecretState;
factory EncryptSecretState.initial() => EncryptSecretState(
successOrFail: none(),
isSignOut: false,
);
}

View File

@ -62,7 +62,7 @@ class UserBackendService {
throw UnimplementedError();
}
Future<Either<Unit, FlowyError>> signOut() {
static Future<Either<Unit, FlowyError>> signOut() {
return UserEventSignOut().send();
}

View File

@ -0,0 +1,10 @@
import 'package:flutter/material.dart';
class EmptyWorkspaceScreen extends StatelessWidget {
const EmptyWorkspaceScreen({super.key});
@override
Widget build(BuildContext context) {
return const Placeholder();
}
}

View File

@ -0,0 +1,124 @@
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/workspace/presentation/home/toast.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
import 'package:flowy_infra_ui/widget/buttons/secondary_button.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import '../application/encrypt_secret_bloc.dart';
class EncryptSecretScreen extends StatefulWidget {
final UserProfilePB user;
const EncryptSecretScreen({required this.user, super.key});
@override
State<EncryptSecretScreen> createState() => _EncryptSecretScreenState();
}
class _EncryptSecretScreenState extends State<EncryptSecretScreen> {
final TextEditingController _textEditingController = TextEditingController();
@override
Widget build(BuildContext context) {
return Scaffold(
body: BlocProvider(
create: (context) => EncryptSecretBloc(user: widget.user),
child: MultiBlocListener(
listeners: [
BlocListener<EncryptSecretBloc, EncryptSecretState>(
listenWhen: (previous, current) =>
previous.isSignOut != current.isSignOut,
listener: (context, state) async {
if (state.isSignOut) {
await runAppFlowy();
}
},
),
BlocListener<EncryptSecretBloc, EncryptSecretState>(
listenWhen: (previous, current) =>
previous.successOrFail != current.successOrFail,
listener: (context, state) async {
state.successOrFail.fold(
() {},
(result) {
result.fold(
(unit) async {
await runAppFlowy();
},
(err) {
Log.error(err);
showSnackBarMessage(context, err.msg);
},
);
},
);
},
),
],
child: BlocBuilder<EncryptSecretBloc, EncryptSecretState>(
builder: (context, state) {
final indicator = state.loadingState?.when(
loading: () => const Center(
child: CircularProgressIndicator.adaptive(),
),
finish: (result) => const SizedBox.shrink(),
) ??
const SizedBox.shrink();
return Center(
child: SizedBox(
width: 300,
height: 160,
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Opacity(
opacity: 0.6,
child: FlowyText.medium(
"${LocaleKeys.settings_menu_inputEncryptPrompt.tr()} ${widget.user.email}",
fontSize: 14,
maxLines: 10,
),
),
const VSpace(6),
SizedBox(
width: 300,
child: FlowyTextField(
controller: _textEditingController,
hintText:
LocaleKeys.settings_menu_inputTextFieldHint.tr(),
onChanged: (p0) {},
),
),
OkCancelButton(
alignment: MainAxisAlignment.end,
onOkPressed: () {
context.read<EncryptSecretBloc>().add(
EncryptSecretEvent.setEncryptSecret(
_textEditingController.text,
),
);
},
onCancelPressed: () {
context.read<EncryptSecretBloc>().add(
const EncryptSecretEvent.cancelInputSecret(),
);
},
mode: TextButtonMode.normal,
),
const VSpace(6),
indicator,
],
),
),
);
},
),
),
),
);
}
}

View File

@ -216,7 +216,7 @@ Widget _buildTextButton(
) {
return SecondaryTextButton(
title,
mode: SecondaryTextButtonMode.small,
mode: TextButtonMode.small,
onPressed: onPressed,
);
}

View File

@ -13,6 +13,8 @@ import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'
import 'package:appflowy_backend/protobuf/flowy-folder2/protobuf.dart';
import 'package:flutter/material.dart';
import 'encrypt_secret_screen.dart';
const routerNameRoot = '/';
const routerNameSignUp = '/signUp';
const routerNameSignIn = '/signIn';
@ -69,6 +71,23 @@ class AuthRouter {
(r) => pushWelcomeScreen(context, userProfile),
);
}
Future<void> pushEncryptionScreen(
BuildContext context,
UserProfilePB userProfile,
) async {
Navigator.push(
context,
PageRoutes.fade(
() => EncryptSecretScreen(
user: userProfile,
key: ValueKey(userProfile.id),
),
const RouteSettings(name: routerNameWelcome),
RouteDurations.slow.inMilliseconds * .001,
),
);
}
}
class SplashRoute {

View File

@ -1,13 +1,13 @@
import 'package:appflowy/core/config/kv.dart';
import 'package:appflowy/core/config/kv_keys.dart';
import 'package:appflowy/core/frameless_window.dart';
import 'package:appflowy/startup/entry_point.dart';
import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/user/application/historical_user_bloc.dart';
import 'package:appflowy/user/application/sign_in_bloc.dart';
import 'package:appflowy/user/presentation/router.dart';
import 'package:appflowy/user/presentation/widgets/background.dart';
import 'package:appflowy_backend/protobuf/flowy-user/user_profile.pb.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra/size.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
@ -15,8 +15,6 @@ import 'package:flowy_infra_ui/widget/rounded_button.dart';
import 'package:flowy_infra_ui/widget/rounded_input_field.dart';
import 'package:flowy_infra_ui/style_widget/snap_bar.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart'
show UserProfilePB;
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:dartz/dartz.dart';
@ -57,7 +55,13 @@ class SignInScreen extends StatelessWidget {
BuildContext context,
) {
result.fold(
(user) => router.pushHomeScreen(context, user),
(user) {
if (user.encryptionType == EncryptionTypePB.Symmetric) {
router.pushEncryptionScreen(context, user);
} else {
router.pushHomeScreen(context, user);
}
},
(error) => showSnapBar(context, error.msg),
);
}
@ -205,10 +209,7 @@ class SignInAsGuestButton extends StatelessWidget {
listenWhen: (previous, current) =>
previous.openedHistoricalUser != current.openedHistoricalUser,
listener: (context, state) async {
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
);
await runAppFlowy();
},
child: BlocBuilder<HistoricalUserBloc, HistoricalUserState>(
builder: (context, state) {

View File

@ -10,16 +10,6 @@ import '../application/splash_bloc.dart';
import '../domain/auth_state.dart';
import 'router.dart';
// [[diagram: splash screen]]
// 1.get user 2.send UserEventCheckUser
// SplashScreen SplashBlocISplashUser
//
//
//
//
// HomeScreen BlocListener RustSDK
//
// 4. Show HomeScreen or SignIn 3.return AuthState
class SplashScreen extends StatelessWidget {
const SplashScreen({
Key? key,
@ -65,23 +55,40 @@ class SplashScreen extends StatelessWidget {
);
}
/// Handles the authentication flow once a user is authenticated.
Future<void> _handleAuthenticated(
BuildContext context,
Authenticated authenticated,
) async {
final userProfile = authenticated.userProfile;
final result = await FolderEventGetCurrentWorkspace().send();
/// After a user is authenticated, this function checks if encryption is required.
final result = await UserEventCheckEncryptionSign().send();
result.fold(
(workspaceSetting) {
getIt<SplashRoute>().pushHomeScreen(
context,
userProfile,
workspaceSetting,
);
(check) async {
/// If encryption is needed, the user is navigated to the encryption screen.
/// Otherwise, it fetches the current workspace for the user and navigates them
if (check.isNeedSecret) {
getIt<AuthRouter>().pushEncryptionScreen(context, userProfile);
} else {
final result = await FolderEventGetCurrentWorkspace().send();
result.fold(
(workspaceSetting) {
getIt<SplashRoute>().pushHomeScreen(
context,
userProfile,
workspaceSetting,
);
},
(error) async {
Log.error(error);
getIt<SplashRoute>().pushWelcomeScreen(context, userProfile);
},
);
}
},
(error) async {
Log.error(error);
getIt<SplashRoute>().pushWelcomeScreen(context, userProfile);
(err) {
Log.error(err);
},
);
}

View File

@ -0,0 +1,57 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-notification/subject.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:appflowy_backend/rust_stream.dart';
import 'package:dartz/dartz.dart';
import '../../../core/notification/user_notification.dart';
class UserCloudConfigListener {
final String userId;
StreamSubscription<SubscribeObject>? _subscription;
void Function(Either<UserCloudConfigPB, FlowyError>)? _onSettingChanged;
UserNotificationParser? _userParser;
UserCloudConfigListener({
required this.userId,
});
void start({
void Function(Either<UserCloudConfigPB, FlowyError>)? onSettingChanged,
}) {
_onSettingChanged = onSettingChanged;
_userParser = UserNotificationParser(
id: userId,
callback: _userNotificationCallback,
);
_subscription = RustStreamReceiver.listen((observable) {
_userParser?.parse(observable);
});
}
Future<void> stop() async {
_userParser = null;
await _subscription?.cancel();
_onSettingChanged = null;
}
void _userNotificationCallback(
UserNotification ty,
Either<Uint8List, FlowyError> result,
) {
switch (ty) {
case UserNotification.DidUpdateCloudConfig:
result.fold(
(payload) => _onSettingChanged
?.call(left(UserCloudConfigPB.fromBuffer(payload))),
(error) => _onSettingChanged?.call(right(error)),
);
break;
default:
break;
}
}
}

View File

@ -1,72 +1,87 @@
import 'package:appflowy/plugins/database_view/application/defines.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:dartz/dartz.dart';
import 'package:protobuf/protobuf.dart';
import 'cloud_setting_listener.dart';
part 'setting_supabase_bloc.freezed.dart';
class SyncSettingBloc extends Bloc<SyncSettingEvent, SyncSettingState> {
SyncSettingBloc() : super(SyncSettingState.initial()) {
on<SyncSettingEvent>((event, emit) async {
class CloudSettingBloc extends Bloc<CloudSettingEvent, CloudSettingState> {
final UserCloudConfigListener _listener;
CloudSettingBloc({
required String userId,
required UserCloudConfigPB config,
}) : _listener = UserCloudConfigListener(userId: userId),
super(CloudSettingState.initial(config)) {
on<CloudSettingEvent>((event, emit) async {
await event.when(
initial: () async {
await getSupabaseConfig();
_listener.start(
onSettingChanged: (result) {
if (isClosed) {
return;
}
result.fold(
(config) => add(CloudSettingEvent.didReceiveConfig(config)),
(error) => Log.error(error),
);
},
);
},
enableSync: (bool enable) async {
final oldConfig = state.config;
if (oldConfig != null) {
oldConfig.freeze();
final newConfig = oldConfig.rebuild((config) {
config.enableSync = enable;
});
updateSupabaseConfig(newConfig);
emit(state.copyWith(config: newConfig));
}
final update = UpdateCloudConfigPB.create()..enableSync = enable;
updateCloudConfig(update);
},
didReceiveSyncConfig: (SupabaseConfigPB config) {
emit(state.copyWith(config: config));
didReceiveConfig: (UserCloudConfigPB config) {
emit(
state.copyWith(
config: config,
loadingState: LoadingState.finish(left(unit)),
),
);
},
enableEncrypt: (bool enable) {
final update = UpdateCloudConfigPB.create()..enableEncrypt = enable;
updateCloudConfig(update);
emit(state.copyWith(loadingState: const LoadingState.loading()));
},
);
});
}
Future<void> updateSupabaseConfig(SupabaseConfigPB config) async {
await UserEventSetSupabaseConfig(config).send();
}
Future<void> getSupabaseConfig() async {
final result = await UserEventGetSupabaseConfig().send();
result.fold(
(config) {
if (!isClosed) {
add(SyncSettingEvent.didReceiveSyncConfig(config));
}
},
(r) => Log.error(r),
);
Future<void> updateCloudConfig(UpdateCloudConfigPB config) async {
await UserEventSetCloudConfig(config).send();
}
}
@freezed
class SyncSettingEvent with _$SyncSettingEvent {
const factory SyncSettingEvent.initial() = _Initial;
const factory SyncSettingEvent.didReceiveSyncConfig(
SupabaseConfigPB config,
class CloudSettingEvent with _$CloudSettingEvent {
const factory CloudSettingEvent.initial() = _Initial;
const factory CloudSettingEvent.didReceiveConfig(
UserCloudConfigPB config,
) = _DidSyncSupabaseConfig;
const factory SyncSettingEvent.enableSync(bool enable) = _EnableSync;
const factory CloudSettingEvent.enableSync(bool enable) = _EnableSync;
const factory CloudSettingEvent.enableEncrypt(bool enable) = _EnableEncrypt;
}
@freezed
class SyncSettingState with _$SyncSettingState {
const factory SyncSettingState({
SupabaseConfigPB? config,
class CloudSettingState with _$CloudSettingState {
const factory CloudSettingState({
required UserCloudConfigPB config,
required Either<Unit, String> successOrFailure,
}) = _SyncSettingState;
required LoadingState loadingState,
}) = _CloudSettingState;
factory SyncSettingState.initial() => SyncSettingState(
factory CloudSettingState.initial(UserCloudConfigPB config) =>
CloudSettingState(
config: config,
successOrFailure: left(unit),
loadingState: LoadingState.finish(left(unit)),
);
}

View File

@ -1,6 +1,5 @@
import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/plugins/document/presentation/more/cubit/document_appearance_cubit.dart';
import 'package:appflowy/startup/entry_point.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/util/color_generator/color_generator.dart';
import 'package:appflowy/workspace/application/menu/menu_user_bloc.dart';
@ -120,21 +119,13 @@ class SidebarUser extends StatelessWidget {
didLogout: () async {
// Pop the dialog using the dialog context
Navigator.of(dialogContext).pop();
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
);
await runAppFlowy();
},
dismissDialog: () => Navigator.of(context).pop(),
didOpenUser: () async {
// Pop the dialog using the dialog context
Navigator.of(dialogContext).pop();
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
);
await runAppFlowy();
},
),
);

View File

@ -102,7 +102,7 @@ class SettingsDialog extends StatelessWidget {
didOpenUser: didOpenUser,
);
case SettingsPage.syncSetting:
return const SyncSettingView();
return SyncSettingView(userId: user.id.toString());
case SettingsPage.shortcuts:
return const SettingsCustomizeShortcutsWrapper();
default:

View File

@ -1,8 +1,7 @@
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/startup/entry_point.dart';
import 'package:appflowy/startup/launch_configuration.dart';
import 'package:appflowy/startup/startup.dart';
import 'package:appflowy/user/application/sign_in_bloc.dart';
import 'package:appflowy/user/presentation/router.dart';
import 'package:appflowy/user/presentation/sign_in_screen.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/user_profile.pb.dart';
@ -10,7 +9,7 @@ import 'package:dartz/dartz.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
import 'package:flowy_infra_ui/style_widget/snap_bar.dart';
import 'package:flutter/widgets.dart';
import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
class SettingThirdPartyLogin extends StatelessWidget {
@ -28,19 +27,30 @@ class SettingThirdPartyLogin extends StatelessWidget {
(result) => _handleSuccessOrFail(result, context),
);
},
builder: (_, __) => Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
FlowyText.medium(
LocaleKeys.signIn_signInWith.tr(),
fontSize: 16,
),
const VSpace(6),
const ThirdPartySignInButtons(
mainAxisAlignment: MainAxisAlignment.start,
),
],
),
builder: (_, state) {
final indicator = state.isSubmitting
? const CircularProgressIndicator.adaptive()
: const SizedBox.shrink();
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
FlowyText.medium(
LocaleKeys.signIn_signInWith.tr(),
fontSize: 16,
),
const HSpace(6),
indicator
],
),
const VSpace(6),
const ThirdPartySignInButtons(
mainAxisAlignment: MainAxisAlignment.start,
),
],
);
},
),
);
}
@ -51,14 +61,12 @@ class SettingThirdPartyLogin extends StatelessWidget {
) async {
result.fold(
(user) async {
didLogin();
await FlowyRunner.run(
FlowyApp(),
integrationEnv(),
config: const LaunchConfiguration(
autoRegistrationSupported: true,
),
);
if (user.encryptionType == EncryptionTypePB.Symmetric) {
getIt<AuthRouter>().pushEncryptionScreen(context, user);
} else {
didLogin();
await runAppFlowy();
}
},
(error) => showSnapBar(context, error.msg),
);

View File

@ -167,7 +167,7 @@ class _ChangeStoragePathButtonState extends State<_ChangeStoragePathButton> {
message: LocaleKeys.settings_files_changeLocationTooltips.tr(),
child: SecondaryTextButton(
LocaleKeys.settings_files_change.tr(),
mode: SecondaryTextButtonMode.small,
mode: TextButtonMode.small,
onPressed: () async {
// pick the new directory and reload app
final path = await getIt<FilePickerService>().getDirectoryPath();

View File

@ -108,26 +108,38 @@ class SettingsUserView extends StatelessWidget {
}
Widget _renderLogoutButton(BuildContext context) {
return Tooltip(
message: LocaleKeys.settings_user_clickToLogout.tr(),
child: FlowyButton(
margin: const EdgeInsets.symmetric(vertical: 8.0, horizontal: 2.0),
text: FlowyText.medium(
LocaleKeys.settings_menu_logout.tr(),
fontSize: 13,
return Center(
child: SizedBox(
width: 160,
child: FlowyButton(
margin: const EdgeInsets.symmetric(vertical: 8.0, horizontal: 2.0),
text: FlowyText.medium(
LocaleKeys.settings_menu_logout.tr(),
fontSize: 13,
textAlign: TextAlign.center,
),
onTap: () async {
NavigatorAlertDialog(
title: logoutPromptMessage(),
confirm: () async {
await getIt<AuthService>().signOut();
didLogout();
},
).show(context);
},
),
onTap: () async {
NavigatorAlertDialog(
title: LocaleKeys.settings_menu_logoutPrompt.tr(),
confirm: () async {
await getIt<AuthService>().signOut();
didLogout();
},
).show(context);
},
),
);
}
String logoutPromptMessage() {
switch (user.encryptionType) {
case EncryptionTypePB.Symmetric:
return LocaleKeys.settings_menu_selfEncryptionLogoutPrompt.tr();
default:
return LocaleKeys.settings_menu_logoutPrompt.tr();
}
}
}
@visibleForTesting

View File

@ -1,36 +1,162 @@
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/setting_supabase_bloc.dart';
import 'package:appflowy/workspace/presentation/home/toast.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/user_setting.pb.dart';
import 'package:dartz/dartz.dart' show Either;
import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra/size.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
import 'package:flowy_infra_ui/widget/error_page.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
class SyncSettingView extends StatelessWidget {
const SyncSettingView({super.key});
final String userId;
const SyncSettingView({required this.userId, super.key});
@override
Widget build(BuildContext context) {
return BlocProvider(
create: (context) =>
SyncSettingBloc()..add(const SyncSettingEvent.initial()),
child: BlocBuilder<SyncSettingBloc, SyncSettingState>(
builder: (context, state) {
return Row(
children: [
FlowyText.medium(LocaleKeys.settings_menu_enableSync.tr()),
const Spacer(),
Switch(
onChanged: (bool value) {
context.read<SyncSettingBloc>().add(
SyncSettingEvent.enableSync(value),
);
},
value: state.config?.enableSync ?? false,
)
],
return FutureBuilder<Either<UserCloudConfigPB, FlowyError>>(
future: UserEventGetCloudConfig().send(),
builder: (context, snapshot) {
if (snapshot.data != null &&
snapshot.connectionState == ConnectionState.done) {
return snapshot.data!.fold(
(config) {
return BlocProvider(
create: (context) => CloudSettingBloc(
userId: userId,
config: config,
)..add(const CloudSettingEvent.initial()),
child: BlocBuilder<CloudSettingBloc, CloudSettingState>(
builder: (context, state) {
return const Column(
children: [
EnableSync(),
EnableEncrypt(),
],
);
},
),
);
},
(err) {
return FlowyErrorPage.message(err.toString(), howToFix: "");
},
);
},
),
} else {
return const Center(
child: CircularProgressIndicator(),
);
}
},
);
}
}
class EnableEncrypt extends StatelessWidget {
const EnableEncrypt({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<CloudSettingBloc, CloudSettingState>(
builder: (context, state) {
final indicator = state.loadingState.when(
loading: () => const CircularProgressIndicator.adaptive(),
finish: (successOrFail) => const SizedBox.shrink(),
);
return Column(
children: [
Row(
children: [
FlowyText.medium(LocaleKeys.settings_menu_enableEncrypt.tr()),
const Spacer(),
indicator,
const HSpace(3),
Switch(
onChanged: state.config.enableEncrypt
? null
: (bool value) {
context
.read<CloudSettingBloc>()
.add(CloudSettingEvent.enableEncrypt(value));
},
value: state.config.enableEncrypt,
)
],
),
Column(
crossAxisAlignment: CrossAxisAlignment.start,
mainAxisAlignment: MainAxisAlignment.start,
children: [
IntrinsicHeight(
child: Opacity(
opacity: 0.6,
child: FlowyText.medium(
LocaleKeys.settings_menu_enableEncryptPrompt.tr(),
maxLines: 13,
),
),
),
const VSpace(6),
SizedBox(
height: 40,
child: Tooltip(
message: LocaleKeys.settings_menu_clickToCopySecret.tr(),
child: FlowyButton(
disable: !(state.config.enableEncrypt),
decoration: BoxDecoration(
borderRadius: Corners.s5Border,
border: Border.all(
color: Theme.of(context).colorScheme.secondary,
),
),
text: FlowyText.medium(state.config.encryptSecret),
onTap: () async {
await Clipboard.setData(
ClipboardData(text: state.config.encryptSecret),
);
// TODO(Lucas): bring the toast to the top of the dialog.
showMessageToast(LocaleKeys.message_copy_success.tr());
},
),
),
),
],
)
],
);
},
);
}
}
class EnableSync extends StatelessWidget {
const EnableSync({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<CloudSettingBloc, CloudSettingState>(
builder: (context, state) {
return Row(
children: [
FlowyText.medium(LocaleKeys.settings_menu_enableSync.tr()),
const Spacer(),
Switch(
onChanged: (bool value) {
context.read<CloudSettingBloc>().add(
CloudSettingEvent.enableSync(value),
);
},
value: state.config.enableSync,
)
],
);
},
);
}
}

View File

@ -125,10 +125,17 @@ class _CreateFlowyAlertDialog extends State<NavigatorAlertDialog> {
mainAxisAlignment: MainAxisAlignment.center,
children: <Widget>[
...[
FlowyText.medium(
widget.title,
fontSize: FontSizes.s16,
color: Theme.of(context).colorScheme.tertiary,
ConstrainedBox(
constraints: const BoxConstraints(
maxWidth: 300,
maxHeight: 100,
),
child: FlowyText.medium(
widget.title,
fontSize: FontSizes.s16,
color: Theme.of(context).colorScheme.tertiary,
maxLines: null,
),
),
],
if (widget.confirm != null) ...[
@ -216,6 +223,7 @@ class OkCancelButton extends StatelessWidget {
final String? cancelTitle;
final double? minHeight;
final MainAxisAlignment alignment;
final TextButtonMode mode;
const OkCancelButton({
Key? key,
@ -225,6 +233,7 @@ class OkCancelButton extends StatelessWidget {
this.cancelTitle,
this.minHeight,
this.alignment = MainAxisAlignment.spaceAround,
this.mode = TextButtonMode.big,
}) : super(key: key);
@override
@ -238,14 +247,14 @@ class OkCancelButton extends StatelessWidget {
SecondaryTextButton(
cancelTitle ?? LocaleKeys.button_Cancel.tr(),
onPressed: onCancelPressed,
mode: SecondaryTextButtonMode.big,
mode: mode,
),
HSpace(Insets.m),
if (onOkPressed != null)
PrimaryTextButton(
okTitle ?? LocaleKeys.button_OK.tr(),
onPressed: onOkPressed,
bigMode: true,
mode: mode,
),
],
),

View File

@ -1,21 +1,21 @@
import 'package:flowy_infra_ui/style_widget/text.dart';
import 'package:flutter/material.dart';
import 'package:flowy_infra/size.dart';
import 'base_styled_button.dart';
import 'secondary_button.dart';
class PrimaryTextButton extends StatelessWidget {
final String label;
final VoidCallback? onPressed;
final bool bigMode;
final TextButtonMode mode;
const PrimaryTextButton(this.label,
{Key? key, this.onPressed, this.bigMode = false})
{Key? key, this.onPressed, this.mode = TextButtonMode.big})
: super(key: key);
@override
Widget build(BuildContext context) {
return PrimaryButton(
bigMode: bigMode,
mode: mode,
onPressed: onPressed,
child: FlowyText.regular(
label,
@ -28,21 +28,24 @@ class PrimaryTextButton extends StatelessWidget {
class PrimaryButton extends StatelessWidget {
final Widget child;
final VoidCallback? onPressed;
final bool bigMode;
final TextButtonMode mode;
const PrimaryButton(
{Key? key, required this.child, this.onPressed, this.bigMode = false})
{Key? key,
required this.child,
this.onPressed,
this.mode = TextButtonMode.big})
: super(key: key);
@override
Widget build(BuildContext context) {
return BaseStyledButton(
minWidth: bigMode ? 100 : 80,
minHeight: bigMode ? 40 : 38,
minWidth: mode.size.width,
minHeight: mode.size.height,
contentPadding: EdgeInsets.zero,
bgColor: Theme.of(context).colorScheme.primary,
hoverColor: Theme.of(context).colorScheme.primaryContainer,
borderRadius: bigMode ? Corners.s12Border : Corners.s8Border,
borderRadius: mode.borderRadius,
onPressed: onPressed,
child: child,
);

View File

@ -4,29 +4,29 @@ import 'package:flowy_infra/size.dart';
import 'base_styled_button.dart';
enum SecondaryTextButtonMode {
enum TextButtonMode {
normal,
big,
small;
Size get size {
switch (this) {
case SecondaryTextButtonMode.normal:
return const Size(80, 38);
case SecondaryTextButtonMode.big:
case TextButtonMode.normal:
return const Size(80, 32);
case TextButtonMode.big:
return const Size(100, 40);
case SecondaryTextButtonMode.small:
case TextButtonMode.small:
return const Size(100, 30);
}
}
BorderRadius get borderRadius {
switch (this) {
case SecondaryTextButtonMode.normal:
case TextButtonMode.normal:
return Corners.s8Border;
case SecondaryTextButtonMode.big:
case TextButtonMode.big:
return Corners.s12Border;
case SecondaryTextButtonMode.small:
case TextButtonMode.small:
return Corners.s6Border;
}
}
@ -37,12 +37,12 @@ class SecondaryTextButton extends StatelessWidget {
this.label, {
super.key,
this.onPressed,
this.mode = SecondaryTextButtonMode.normal,
this.mode = TextButtonMode.normal,
});
final String label;
final VoidCallback? onPressed;
final SecondaryTextButtonMode mode;
final TextButtonMode mode;
@override
Widget build(BuildContext context) {
@ -62,12 +62,12 @@ class SecondaryButton extends StatelessWidget {
super.key,
required this.child,
this.onPressed,
this.mode = SecondaryTextButtonMode.normal,
this.mode = TextButtonMode.normal,
});
final Widget child;
final VoidCallback? onPressed;
final SecondaryTextButtonMode mode;
final TextButtonMode mode;
@override
Widget build(BuildContext context) {

View File

@ -34,14 +34,14 @@ default = ["custom-protocol"]
custom-protocol = ["tauri/custom-protocol"]
[patch.crates-io]
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
#collab = { path = "../../../../AppFlowy-Collab/collab" }
#collab-folder = { path = "../../../../AppFlowy-Collab/collab-folder" }

View File

@ -226,8 +226,14 @@
"open": "Open Settings",
"logout": "Logout",
"logoutPrompt": "Are you sure to logout?",
"selfEncryptionLogoutPrompt": "Are you sure you want to log out? Please ensure you have copied the encryption secret",
"syncSetting": "Sync Setting",
"enableSync": "Enable sync",
"enableEncrypt": "Encrypt data",
"enableEncryptPrompt": "Activate encryption to secure your data with this secret. Store it safely; once enabled, it can't be turned off. If lost, your data becomes irretrievable. Click to copy",
"inputEncryptPrompt": "Please enter your encryption secret for",
"clickToCopySecret": "Click to copy secret",
"inputTextFieldHint": "Your secret",
"historicalUserList": "User login history",
"historicalUserListTooltip": "This list displays your anonymous accounts. You can click on an account to view its details. Anonymous accounts are created by clicking the 'Get Started' button",
"openHistoricalUser": "Click to open the anonymous account"

View File

@ -17,6 +17,16 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "aead"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0"
dependencies = [
"crypto-common",
"generic-array",
]
[[package]]
name = "aes"
version = "0.8.3"
@ -28,6 +38,20 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "aes-gcm"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "209b47e8954a928e1d72e86eca7000ebb6655fe1436d33eefc2201cad027e237"
dependencies = [
"aead",
"aes",
"cipher",
"ctr",
"ghash",
"subtle",
]
[[package]]
name = "ahash"
version = "0.7.6"
@ -89,14 +113,14 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.71"
version = "1.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
[[package]]
name = "appflowy-integrate"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"anyhow",
"collab",
@ -255,9 +279,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.21.0"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a"
checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
[[package]]
name = "base64ct"
@ -587,7 +611,7 @@ dependencies = [
[[package]]
name = "collab"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"anyhow",
"bytes",
@ -605,7 +629,7 @@ dependencies = [
[[package]]
name = "collab-client-ws"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"bytes",
"collab-sync",
@ -623,11 +647,11 @@ dependencies = [
[[package]]
name = "collab-database"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"anyhow",
"async-trait",
"base64 0.21.0",
"base64 0.21.2",
"chrono",
"collab",
"collab-derive",
@ -650,7 +674,7 @@ dependencies = [
[[package]]
name = "collab-derive"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"proc-macro2",
"quote",
@ -662,7 +686,7 @@ dependencies = [
[[package]]
name = "collab-document"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"anyhow",
"collab",
@ -681,7 +705,7 @@ dependencies = [
[[package]]
name = "collab-folder"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"anyhow",
"chrono",
@ -701,7 +725,7 @@ dependencies = [
[[package]]
name = "collab-persistence"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"bincode",
"chrono",
@ -721,7 +745,7 @@ dependencies = [
[[package]]
name = "collab-plugins"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"anyhow",
"async-trait",
@ -749,7 +773,7 @@ dependencies = [
[[package]]
name = "collab-sync"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"bytes",
"collab",
@ -771,7 +795,7 @@ dependencies = [
[[package]]
name = "collab-user"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=7f26d5#7f26d568b87fb0a14242bfa018f8f1df0d03665c"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=c3c22d#c3c22d9addda6cf9943e28c4294b4180d3454299"
dependencies = [
"anyhow",
"collab",
@ -936,6 +960,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
dependencies = [
"generic-array",
"rand_core 0.6.4",
"typenum",
]
@ -960,6 +985,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "ctr"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835"
dependencies = [
"cipher",
]
[[package]]
name = "cxx"
version = "1.0.94"
@ -1096,9 +1130,9 @@ dependencies = [
[[package]]
name = "digest"
version = "0.10.6"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f"
checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
dependencies = [
"block-buffer",
"crypto-common",
@ -1458,6 +1492,19 @@ dependencies = [
"uuid",
]
[[package]]
name = "flowy-encrypt"
version = "0.1.0"
dependencies = [
"aes-gcm",
"anyhow",
"base64 0.21.2",
"hmac",
"pbkdf2 0.12.2",
"rand 0.8.5",
"sha2",
]
[[package]]
name = "flowy-error"
version = "0.1.0"
@ -1563,6 +1610,7 @@ dependencies = [
"dotenv",
"flowy-database-deps",
"flowy-document-deps",
"flowy-encrypt",
"flowy-error",
"flowy-folder-deps",
"flowy-server-config",
@ -1645,8 +1693,12 @@ dependencies = [
"collab-plugins",
"dotenv",
"flowy-core",
"flowy-database-deps",
"flowy-database2",
"flowy-document-deps",
"flowy-document2",
"flowy-encrypt",
"flowy-folder-deps",
"flowy-folder2",
"flowy-net",
"flowy-notification",
@ -1676,7 +1728,7 @@ name = "flowy-user"
version = "0.1.0"
dependencies = [
"appflowy-integrate",
"base64 0.21.0",
"base64 0.21.2",
"bytes",
"chrono",
"collab",
@ -1689,6 +1741,7 @@ dependencies = [
"fancy-regex 0.11.0",
"flowy-codegen",
"flowy-derive",
"flowy-encrypt",
"flowy-error",
"flowy-notification",
"flowy-server-config",
@ -1919,6 +1972,16 @@ dependencies = [
"wasi 0.11.0+wasi-snapshot-preview1",
]
[[package]]
name = "ghash"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d930750de5717d2dd0b8c0d42c076c0e884c81a73e6cab859bbd2339c71e3e40"
dependencies = [
"opaque-debug",
"polyval",
]
[[package]]
name = "gimli"
version = "0.27.2"
@ -2722,6 +2785,12 @@ version = "1.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
[[package]]
name = "opaque-debug"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "openssl"
version = "0.10.50"
@ -2872,6 +2941,16 @@ dependencies = [
"sha2",
]
[[package]]
name = "pbkdf2"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
dependencies = [
"digest",
"hmac",
]
[[package]]
name = "peeking_take_while"
version = "0.1.2"
@ -3087,13 +3166,25 @@ version = "0.3.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160"
[[package]]
name = "polyval"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52cff9d1d4dee5fe6d03729099f4a310a41179e0a10dbf542039873f2e826fb"
dependencies = [
"cfg-if",
"cpufeatures",
"opaque-debug",
"universal-hash",
]
[[package]]
name = "postgres-protocol"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78b7fa9f396f51dffd61546fd8573ee20592287996568e6175ceb0f8699ad75d"
dependencies = [
"base64 0.21.0",
"base64 0.21.2",
"byteorder",
"bytes",
"fallible-iterator",
@ -3595,7 +3686,7 @@ version = "0.11.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27b71749df584b7f4cac2c426c127a7c785a5106cc98f7a8feb044115f0fa254"
dependencies = [
"base64 0.21.0",
"base64 0.21.2",
"bytes",
"encoding_rs",
"futures-core",
@ -3771,7 +3862,7 @@ version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b"
dependencies = [
"base64 0.21.0",
"base64 0.21.2",
]
[[package]]
@ -3958,9 +4049,9 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
[[package]]
name = "sha2"
version = "0.10.6"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0"
checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8"
dependencies = [
"cfg-if",
"cpufeatures",
@ -4775,6 +4866,16 @@ version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
[[package]]
name = "universal-hash"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea"
dependencies = [
"crypto-common",
"subtle",
]
[[package]]
name = "untrusted"
version = "0.7.1"
@ -5236,7 +5337,7 @@ dependencies = [
"crossbeam-utils",
"flate2",
"hmac",
"pbkdf2",
"pbkdf2 0.11.0",
"sha1",
"time 0.3.21",
"zstd",

View File

@ -21,6 +21,7 @@ members = [
"flowy-server",
"flowy-server-config",
"flowy-config",
"flowy-encrypt",
]
[profile.dev]
@ -38,13 +39,13 @@ opt-level = 3
incremental = false
[patch.crates-io]
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "7f26d5" }
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-plugins = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c3c22d" }
#collab = { path = "../AppFlowy-Collab/collab" }
#collab-folder = { path = "../AppFlowy-Collab/collab-folder" }

View File

@ -8,6 +8,8 @@ pub struct AppFlowyEnv {
}
impl AppFlowyEnv {
/// Parse the environment variable from the frontend application. The frontend will
/// pass the environment variable as a json string after launching.
pub fn parser(env_str: &str) {
if let Ok(env) = serde_json::from_str::<AppFlowyEnv>(env_str) {
env.supabase_config.write_env();

View File

@ -94,6 +94,7 @@ struct DocumentFolderOperation(Arc<DocumentManager>);
impl FolderOperationHandler for DocumentFolderOperation {
fn create_workspace_view(
&self,
uid: i64,
workspace_view_builder: Arc<RwLock<WorkspaceViewBuilder>>,
) -> FutureResult<(), FlowyError> {
let manager = self.0.clone();
@ -109,7 +110,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
let json_str = include_str!("../../assets/read_me.json");
let document_pb = JsonToDocumentParser::json_str_to_document(json_str).unwrap();
manager
.create_document(&view.parent_view.id, Some(document_pb.into()))
.create_document(uid, &view.parent_view.id, Some(document_pb.into()))
.unwrap();
view
})
@ -152,7 +153,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
fn create_view_with_view_data(
&self,
_user_id: i64,
user_id: i64,
view_id: &str,
_name: &str,
data: Vec<u8>,
@ -164,7 +165,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
let manager = self.0.clone();
FutureResult::new(async move {
let data = DocumentDataPB::try_from(Bytes::from(data))?;
manager.create_document(&view_id, Some(data.into()))?;
manager.create_document(user_id, &view_id, Some(data.into()))?;
Ok(())
})
}
@ -172,7 +173,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
/// Create a view with built-in data.
fn create_built_in_view(
&self,
_user_id: i64,
user_id: i64,
view_id: &str,
_name: &str,
layout: ViewLayout,
@ -181,13 +182,14 @@ impl FolderOperationHandler for DocumentFolderOperation {
let view_id = view_id.to_string();
let manager = self.0.clone();
FutureResult::new(async move {
manager.create_document(&view_id, None)?;
manager.create_document(user_id, &view_id, None)?;
Ok(())
})
}
fn import_from_bytes(
&self,
uid: i64,
view_id: &str,
_name: &str,
_import_type: ImportType,
@ -197,7 +199,7 @@ impl FolderOperationHandler for DocumentFolderOperation {
let manager = self.0.clone();
FutureResult::new(async move {
let data = DocumentDataPB::try_from(Bytes::from(bytes))?;
manager.create_document(&view_id, Some(data.into()))?;
manager.create_document(uid, &view_id, Some(data.into()))?;
Ok(())
})
}
@ -315,6 +317,7 @@ impl FolderOperationHandler for DatabaseFolderOperation {
fn import_from_bytes(
&self,
_uid: i64,
view_id: &str,
_name: &str,
import_type: ImportType,

View File

@ -17,14 +17,14 @@ use flowy_server::local_server::{LocalServer, LocalServerDB};
use flowy_server::self_host::configuration::self_host_server_configuration;
use flowy_server::self_host::SelfHostServer;
use flowy_server::supabase::SupabaseServer;
use flowy_server::AppFlowyServer;
use flowy_server::{AppFlowyEncryption, AppFlowyServer, EncryptionImpl};
use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_sqlite::kv::StorePreferences;
use flowy_user::event_map::UserCloudServiceProvider;
use flowy_user::services::database::{
get_user_profile, get_user_workspace, open_collab_db, open_user_db,
};
use flowy_user_deps::cloud::UserService;
use flowy_user_deps::cloud::{UserCloudConfig, UserService};
use flowy_user_deps::entities::*;
use lib_infra::future::FutureResult;
@ -66,7 +66,8 @@ pub struct AppFlowyServerProvider {
provider_type: RwLock<ServerProviderType>,
device_id: Mutex<String>,
providers: RwLock<HashMap<ServerProviderType, Arc<dyn AppFlowyServer>>>,
supabase_config: RwLock<Option<SupabaseConfiguration>>,
enable_sync: RwLock<bool>,
encryption: RwLock<Arc<dyn AppFlowyEncryption>>,
store_preferences: Weak<StorePreferences>,
}
@ -74,15 +75,26 @@ impl AppFlowyServerProvider {
pub fn new(
config: AppFlowyCoreConfig,
provider_type: ServerProviderType,
supabase_config: Option<SupabaseConfiguration>,
cloud_config: Option<UserCloudConfig>,
store_preferences: Weak<StorePreferences>,
) -> Self {
let enable_sync = cloud_config
.as_ref()
.map(|config| config.enable_sync)
.unwrap_or(true);
let encryption = EncryptionImpl::new(
cloud_config
.as_ref()
.map(|config| config.encrypt_secret.clone()),
);
Self {
config,
provider_type: RwLock::new(provider_type),
device_id: Default::default(),
providers: RwLock::new(HashMap::new()),
supabase_config: RwLock::new(supabase_config),
enable_sync: RwLock::new(enable_sync),
encryption: RwLock::new(Arc::new(encryption)),
store_preferences,
}
}
@ -127,11 +139,13 @@ impl AppFlowyServerProvider {
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(server)
},
ServerProviderType::Supabase => {
let config = self.supabase_config.read().clone().ok_or(FlowyError::new(
ErrorCode::InvalidAuthConfig,
"Missing supabase config".to_string(),
))?;
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(Arc::new(SupabaseServer::new(config)))
let config = SupabaseConfiguration::from_env()?;
let encryption = Arc::downgrade(&*self.encryption.read());
Ok::<Arc<dyn AppFlowyServer>, FlowyError>(Arc::new(SupabaseServer::new(
config,
*self.enable_sync.read(),
encryption,
)))
},
}?;
server.set_sync_device_id(&self.device_id.lock());
@ -152,18 +166,20 @@ impl AppFlowyServerProvider {
}
impl UserCloudServiceProvider for AppFlowyServerProvider {
fn set_supabase_config(&self, supabase_config: &SupabaseConfiguration) {
self
.supabase_config
.write()
.replace(supabase_config.clone());
supabase_config.write_env();
if let Ok(provider) = self.get_provider(&self.provider_type.read()) {
provider.enable_sync(supabase_config.enable_sync);
fn set_enable_sync(&self, enable_sync: bool) {
match self.get_provider(&self.provider_type.read()) {
Ok(server) => {
server.set_enable_sync(enable_sync);
*self.enable_sync.write() = enable_sync;
},
Err(e) => tracing::error!("🔴Failed to enable sync: {:?}", e),
}
}
fn set_encrypt_secret(&self, secret: String) {
self.encryption.write().set_secret(secret);
}
/// When user login, the provider type is set by the [AuthType] and save to disk for next use.
///
/// Each [AuthType] has a corresponding [ServerProviderType]. The [ServerProviderType] is used
@ -224,16 +240,17 @@ impl FolderCloudService for AppFlowyServerProvider {
})
}
fn get_folder_latest_snapshot(
fn get_folder_snapshots(
&self,
workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, Error> {
limit: usize,
) -> FutureResult<Vec<FolderSnapshot>, Error> {
let workspace_id = workspace_id.to_string();
let server = self.get_provider(&self.provider_type.read());
FutureResult::new(async move {
server?
.folder_service()
.get_folder_latest_snapshot(&workspace_id)
.get_folder_snapshots(&workspace_id, limit)
.await
})
}
@ -287,16 +304,17 @@ impl DatabaseCloudService for AppFlowyServerProvider {
})
}
fn get_collab_latest_snapshot(
fn get_collab_snapshots(
&self,
object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, Error> {
limit: usize,
) -> FutureResult<Vec<DatabaseSnapshot>, Error> {
let server = self.get_provider(&self.provider_type.read());
let database_id = object_id.to_string();
FutureResult::new(async move {
server?
.database_service()
.get_collab_latest_snapshot(&database_id)
.get_collab_snapshots(&database_id, limit)
.await
})
}
@ -314,16 +332,17 @@ impl DocumentCloudService for AppFlowyServerProvider {
})
}
fn get_document_latest_snapshot(
fn get_document_snapshots(
&self,
document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, Error> {
limit: usize,
) -> FutureResult<Vec<DocumentSnapshot>, Error> {
let server = self.get_provider(&self.provider_type.read());
let document_id = document_id.to_string();
FutureResult::new(async move {
server?
.document_service()
.get_document_latest_snapshot(&document_id)
.get_document_snapshots(&document_id, limit)
.await
})
}
@ -361,12 +380,7 @@ impl CollabStorageProvider for AppFlowyServerProvider {
}
fn is_sync_enabled(&self) -> bool {
self
.supabase_config
.read()
.as_ref()
.map(|config| config.enable_sync)
.unwrap_or(false)
*self.enable_sync.read()
}
}

View File

@ -21,7 +21,8 @@ use flowy_folder2::manager::{FolderInitializeData, FolderManager};
use flowy_sqlite::kv::StorePreferences;
use flowy_task::{TaskDispatcher, TaskRunner};
use flowy_user::event_map::{SignUpContext, UserCloudServiceProvider, UserStatusCallback};
use flowy_user::manager::{get_supabase_config, UserManager, UserSessionConfig};
use flowy_user::manager::{UserManager, UserSessionConfig};
use flowy_user::services::cloud_config::get_cloud_config;
use flowy_user_deps::entities::{AuthType, UserProfile, UserWorkspace};
use lib_dispatch::prelude::*;
use lib_dispatch::runtime::tokio_default_runtime;
@ -149,7 +150,7 @@ impl AppFlowyCore {
let server_provider = Arc::new(AppFlowyServerProvider::new(
config.clone(),
provider_type,
get_supabase_config(&store_preference),
get_cloud_config(&store_preference),
Arc::downgrade(&store_preference),
));
@ -367,7 +368,7 @@ impl UserStatusCallback for UserStatusCallbackImpl {
to_fut(async move {
folder_manager
.initialize_with_new_user(
user_profile.id,
user_profile.uid,
&user_profile.token,
context.is_new,
context.local_folder,
@ -376,14 +377,14 @@ impl UserStatusCallback for UserStatusCallbackImpl {
.await?;
database_manager
.initialize_with_new_user(
user_profile.id,
user_profile.uid,
user_workspace.id.clone(),
user_workspace.database_storage_id,
)
.await?;
document_manager
.initialize_with_new_user(user_profile.id, user_workspace.id)
.initialize_with_new_user(user_profile.uid, user_workspace.id)
.await?;
Ok(())
})

View File

@ -1,6 +1,6 @@
use anyhow::Error;
use std::collections::HashMap;
use anyhow::Error;
use collab_plugins::cloud_storage::CollabType;
use lib_infra::future::FutureResult;
@ -24,10 +24,11 @@ pub trait DatabaseCloudService: Send + Sync {
object_ty: CollabType,
) -> FutureResult<CollabObjectUpdateByOid, Error>;
fn get_collab_latest_snapshot(
fn get_collab_snapshots(
&self,
object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, Error>;
limit: usize,
) -> FutureResult<Vec<DatabaseSnapshot>, Error>;
}
pub struct DatabaseSnapshot {

View File

@ -887,6 +887,6 @@ pub(crate) async fn get_snapshots_handler(
) -> DataResult<RepeatedDatabaseSnapshotPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let view_id = data.into_inner().value;
let snapshots = manager.get_database_snapshots(&view_id).await?;
let snapshots = manager.get_database_snapshots(&view_id, 10).await?;
data_result_ok(RepeatedDatabaseSnapshotPB { items: snapshots })
}

View File

@ -73,7 +73,7 @@ impl DatabaseManager {
&self,
uid: i64,
_workspace_id: String,
workspace_database_id: String,
database_storage_id: String,
) -> FlowyResult<()> {
let collab_db = self.user.collab_db(uid)?;
let collab_builder = UserDatabaseCollabServiceImpl {
@ -84,28 +84,30 @@ impl DatabaseManager {
let mut collab_raw_data = CollabRawData::default();
// If the workspace database not exist in disk, try to fetch from remote.
if !self.is_collab_exist(uid, &collab_db, &workspace_database_id) {
if !self.is_collab_exist(uid, &collab_db, &database_storage_id) {
tracing::trace!("workspace database not exist, try to fetch from remote");
match self
.cloud_service
.get_collab_update(&workspace_database_id, CollabType::WorkspaceDatabase)
.get_collab_update(&database_storage_id, CollabType::WorkspaceDatabase)
.await
{
Ok(updates) => collab_raw_data = updates,
Ok(updates) => {
collab_raw_data = updates;
},
Err(err) => {
return Err(FlowyError::record_not_found().context(format!(
"get workspace database :{} failed: {}",
workspace_database_id, err,
database_storage_id, err,
)));
},
}
}
// Construct the workspace database.
tracing::trace!("open workspace database: {}", &workspace_database_id);
tracing::trace!("open workspace database: {}", &database_storage_id);
let collab = collab_builder.build_collab_with_config(
uid,
&workspace_database_id,
&database_storage_id,
CollabType::WorkspaceDatabase,
collab_db.clone(),
collab_raw_data,
@ -307,22 +309,21 @@ impl DatabaseManager {
pub async fn get_database_snapshots(
&self,
view_id: &str,
limit: usize,
) -> FlowyResult<Vec<DatabaseSnapshotPB>> {
let database_id = self.get_database_id_with_view_id(view_id).await?;
let mut snapshots = vec![];
if let Some(snapshot) = self
let snapshots = self
.cloud_service
.get_collab_latest_snapshot(&database_id)
.get_collab_snapshots(&database_id, limit)
.await?
.into_iter()
.map(|snapshot| DatabaseSnapshotPB {
snapshot_id: snapshot.snapshot_id,
snapshot_desc: "".to_string(),
created_at: snapshot.created_at,
data: snapshot.data,
})
{
snapshots.push(snapshot);
}
.collect::<Vec<_>>();
Ok(snapshots)
}

View File

@ -1,3 +1,19 @@
use std::format;
use std::str::FromStr;
use std::sync::Arc;
use chrono::{
DateTime, Datelike, Days, Duration, Local, NaiveDate, NaiveDateTime, Offset, TimeZone,
};
use chrono_tz::Tz;
use collab_database::database::timestamp;
use collab_database::fields::Field;
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail};
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
use flowy_error::FlowyResult;
use crate::entities::{
DateCellDataPB, FieldType, GroupPB, GroupRowsNotificationPB, InsertedGroupPB, InsertedRowPB,
RowMetaPB,
@ -12,19 +28,6 @@ use crate::services::group::controller::{
use crate::services::group::{
make_no_status_group, move_group_row, GeneratedGroupConfig, GeneratedGroups, Group,
};
use chrono::{
DateTime, Datelike, Days, Duration, Local, NaiveDate, NaiveDateTime, Offset, TimeZone,
};
use chrono_tz::Tz;
use collab_database::database::timestamp;
use collab_database::fields::Field;
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail};
use flowy_error::FlowyResult;
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
use std::format;
use std::str::FromStr;
use std::sync::Arc;
pub trait GroupConfigurationContentSerde: Sized + Send + Sync {
fn from_json(s: &str) -> Result<Self, serde_json::Error>;
@ -46,9 +49,10 @@ impl GroupConfigurationContentSerde for DateGroupConfiguration {
}
}
#[derive(Serialize_repr, Deserialize_repr)]
#[derive(Default, Serialize_repr, Deserialize_repr)]
#[repr(u8)]
pub enum DateCondition {
#[default]
Relative = 0,
Day = 1,
Week = 2,
@ -56,12 +60,6 @@ pub enum DateCondition {
Year = 4,
}
impl std::default::Default for DateCondition {
fn default() -> Self {
DateCondition::Relative
}
}
pub type DateGroupController = BaseGroupController<
DateGroupConfiguration,
DateTypeOption,
@ -245,7 +243,7 @@ impl GroupController for DateGroupController {
match self.context.get_group(group_id) {
None => tracing::warn!("Can not find the group: {}", group_id),
Some((_, _)) => {
let date = DateTime::parse_from_str(&group_id, GROUP_ID_DATE_FORMAT).unwrap();
let date = DateTime::parse_from_str(group_id, GROUP_ID_DATE_FORMAT).unwrap();
let cell = insert_date_cell(date.timestamp(), None, field);
cells.insert(field.id.clone(), cell);
},
@ -299,7 +297,7 @@ impl GroupsBuilder for DateGroupGenerator {
fn make_group_from_date_cell(
cell_data: &DateCellData,
type_option: Option<&DateTypeOption>,
setting_content: &String,
setting_content: &str,
) -> Group {
let group_id = group_id(cell_data, type_option, setting_content);
Group::new(
@ -308,12 +306,12 @@ fn make_group_from_date_cell(
)
}
const GROUP_ID_DATE_FORMAT: &'static str = "%Y/%m/%d";
const GROUP_ID_DATE_FORMAT: &str = "%Y/%m/%d";
fn group_id(
cell_data: &DateCellData,
type_option: Option<&DateTypeOption>,
setting_content: &String,
setting_content: &str,
) -> String {
let binding = DateTypeOption::default();
let type_option = type_option.unwrap_or(&binding);
@ -343,11 +341,11 @@ fn group_id(
now.checked_add_signed(Duration::days(-1))
} else if diff == 1 {
now.checked_add_signed(Duration::days(1))
} else if diff >= -7 && diff < -1 {
} else if (-7..-1).contains(&diff) {
now.checked_add_signed(Duration::days(-7))
} else if diff > 1 && diff <= 7 {
now.checked_add_signed(Duration::days(2))
} else if diff >= -30 && diff < -7 {
} else if (-30..-7).contains(&diff) {
now.checked_add_signed(Duration::days(-30))
} else if diff > 7 && diff <= 30 {
now.checked_add_signed(Duration::days(8))
@ -374,9 +372,9 @@ fn group_id(
}
fn group_name_from_id(
group_id: &String,
group_id: &str,
type_option: Option<&DateTypeOption>,
setting_content: &String,
setting_content: &str,
) -> String {
let binding = DateTypeOption::default();
let type_option = type_option.unwrap_or(&binding);
@ -386,12 +384,7 @@ fn group_name_from_id(
let tmp;
match config.condition {
DateCondition::Day => {
tmp = format!(
"{} {}, {}",
date.format("%b").to_string(),
date.day(),
date.year(),
);
tmp = format!("{} {}, {}", date.format("%b"), date.day(), date.year(),);
tmp
},
DateCondition::Week => {
@ -406,15 +399,15 @@ fn group_name_from_id(
tmp = format!(
"Week of {} {}-{} {}",
date.format("%b").to_string(),
begin_of_week.to_string(),
end_of_week.to_string(),
date.format("%b"),
begin_of_week,
end_of_week,
date.year()
);
tmp
},
DateCondition::Month => {
tmp = format!("{} {}", date.format("%b").to_string(), date.year(),);
tmp = format!("{} {}", date.format("%b"), date.year(),);
tmp
},
DateCondition::Year => date.year().to_string(),
@ -431,7 +424,7 @@ fn group_name_from_id(
-30 => "Last 30 days",
8 => "Next 30 days",
_ => {
tmp = format!("{} {}", date.format("%b").to_string(), date.year(),);
tmp = format!("{} {}", date.format("%b"), date.year(),);
&tmp
},
};
@ -441,7 +434,7 @@ fn group_name_from_id(
}
}
fn date_time_from_timestamp(timestamp: Option<i64>, timezone_id: &String) -> DateTime<Local> {
fn date_time_from_timestamp(timestamp: Option<i64>, timezone_id: &str) -> DateTime<Local> {
match timestamp {
Some(timestamp) => {
let naive = NaiveDateTime::from_timestamp_opt(timestamp, 0).unwrap();
@ -458,14 +451,17 @@ fn date_time_from_timestamp(timestamp: Option<i64>, timezone_id: &String) -> Dat
#[cfg(test)]
mod tests {
use std::vec;
use chrono::{offset, Days, Duration, NaiveDateTime};
use crate::entities::FieldType;
use crate::services::{
field::{date_type_option::DateTypeOption, DateCellData},
group::controller_impls::date_controller::{
group_id, group_name_from_id, GROUP_ID_DATE_FORMAT,
},
};
use chrono::{offset, Days, Duration, NaiveDateTime};
use std::vec;
#[test]
fn group_id_name_test() {
@ -485,9 +481,9 @@ mod tests {
let today = offset::Local::now();
let three_days_before = today.checked_add_signed(Duration::days(-3)).unwrap();
let mut local_date_type_option = DateTypeOption::default();
let mut local_date_type_option = DateTypeOption::new(FieldType::DateTime);
local_date_type_option.timezone_id = today.offset().to_string();
let mut default_date_type_option = DateTypeOption::default();
let mut default_date_type_option = DateTypeOption::new(FieldType::DateTime);
default_date_type_option.timezone_id = "".to_string();
let tests = vec![
@ -552,7 +548,7 @@ mod tests {
exp_group_name: "Mar 2022".to_string(),
},
GroupIDTest {
cell_data: mar_14_2022_cd.clone(),
cell_data: mar_14_2022_cd,
type_option: &local_date_type_option,
setting_content: r#"{"condition": 4, "hide_empty": false}"#.to_string(),
exp_group_id: "2022/01/01".to_string(),
@ -588,7 +584,7 @@ mod tests {
);
assert_eq!(test.exp_group_id, group_id, "test {}", i);
if test.exp_group_name != "" {
if !test.exp_group_name.is_empty() {
let group_name =
group_name_from_id(&group_id, Some(test.type_option), &test.setting_content);
assert_eq!(test.exp_group_name, group_name, "test {}", i);

View File

@ -174,11 +174,9 @@ pub fn make_inserted_cell(group_id: &str, field: &Field) -> Option<Cell> {
Some(cell)
},
FieldType::DateTime => {
let date = NaiveDateTime::parse_from_str(
&format!("{} 00:00:00", group_id).to_string(),
"%Y/%m/%d %H:%M:%S",
)
.unwrap();
let date =
NaiveDateTime::parse_from_str(&format!("{} 00:00:00", group_id), "%Y/%m/%d %H:%M:%S")
.unwrap();
let cell = insert_date_cell(date.timestamp(), None, field);
Some(cell)
},

View File

@ -1,14 +1,17 @@
use crate::database::group_test::script::DatabaseGroupTest;
use crate::database::group_test::script::GroupScript::*;
use std::collections::HashMap;
use std::vec;
use chrono::NaiveDateTime;
use chrono::{offset, Duration};
use collab_database::database::gen_row_id;
use collab_database::rows::CreateRowParams;
use flowy_database2::entities::FieldType;
use flowy_database2::services::cell::CellBuilder;
use flowy_database2::services::field::DateCellData;
use std::collections::HashMap;
use std::vec;
use crate::database::group_test::script::DatabaseGroupTest;
use crate::database::group_test::script::GroupScript::*;
#[tokio::test]
async fn group_by_date_test() {
@ -197,7 +200,7 @@ async fn change_date_on_moving_row_to_another_group() {
.get_rows()
.await
.into_iter()
.find(|r| r.row.id.to_string() == row_id.to_string())
.find(|r| r.row.id.to_string() == *row_id)
.unwrap();
let cell = row_detail.row.cells.get(&date_field.id.clone()).unwrap();
let date_cell = DateCellData::from(cell);

View File

@ -9,10 +9,11 @@ use lib_infra::future::FutureResult;
pub trait DocumentCloudService: Send + Sync + 'static {
fn get_document_updates(&self, document_id: &str) -> FutureResult<Vec<Vec<u8>>, Error>;
fn get_document_latest_snapshot(
fn get_document_snapshots(
&self,
document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, Error>;
limit: usize,
) -> FutureResult<Vec<DocumentSnapshot>, Error>;
fn get_document_data(&self, document_id: &str) -> FutureResult<Option<DocumentData>, Error>;
}

View File

@ -33,7 +33,8 @@ pub(crate) async fn create_document_handler(
) -> FlowyResult<()> {
let manager = upgrade_document(manager)?;
let params: CreateDocumentParams = data.into_inner().try_into()?;
manager.create_document(&params.document_id, params.initial_data)?;
let uid = manager.user.user_id()?;
manager.create_document(uid, &params.document_id, params.initial_data)?;
Ok(())
}
@ -175,7 +176,7 @@ pub(crate) async fn get_snapshot_handler(
let manager = upgrade_document(manager)?;
let params: OpenDocumentParams = data.into_inner().try_into()?;
let doc_id = params.document_id;
let snapshots = manager.get_document_snapshots(&doc_id).await?;
let snapshots = manager.get_document_snapshots(&doc_id, 10).await?;
data_result_ok(RepeatedDocumentSnapshotPB { items: snapshots })
}

View File

@ -11,7 +11,7 @@ use collab_document::YrsDocAction;
use parking_lot::RwLock;
use flowy_document_deps::cloud::DocumentCloudService;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
use crate::document::MutexDocument;
use crate::entities::DocumentSnapshotPB;
@ -23,7 +23,7 @@ pub trait DocumentUser: Send + Sync {
}
pub struct DocumentManager {
user: Arc<dyn DocumentUser>,
pub user: Arc<dyn DocumentUser>,
collab_builder: Arc<AppFlowyCollabBuilder>,
documents: Arc<RwLock<HashMap<String, Arc<MutexDocument>>>>,
#[allow(dead_code)]
@ -59,11 +59,12 @@ impl DocumentManager {
/// if the data is None, will create a document with default data.
pub fn create_document(
&self,
uid: i64,
doc_id: &str,
data: Option<DocumentData>,
) -> FlowyResult<Arc<MutexDocument>> {
tracing::trace!("create a document: {:?}", doc_id);
let collab = self.collab_for_document(doc_id, vec![])?;
let collab = self.collab_for_document(uid, doc_id, vec![])?;
let data = data.unwrap_or_else(default_document_data);
let document = Arc::new(MutexDocument::create_with_data(collab, data)?);
Ok(document)
@ -107,6 +108,12 @@ impl DocumentManager {
let mut updates = vec![];
if !self.is_doc_exist(doc_id)? {
if let Ok(document_updates) = self.cloud_service.get_document_updates(doc_id).await {
if document_updates.is_empty() {
return Err(FlowyError::new(
ErrorCode::UnexpectedEmptyCollabUpdates,
"Can't not read the document data",
));
}
updates = document_updates;
} else {
return Err(
@ -114,8 +121,8 @@ impl DocumentManager {
);
}
}
let collab = self.collab_for_document(doc_id, updates)?;
let uid = self.user.user_id()?;
let collab = self.collab_for_document(uid, doc_id, updates)?;
Document::open(collab)?
.get_document_data()
.map_err(internal_error)
@ -142,31 +149,30 @@ impl DocumentManager {
pub async fn get_document_snapshots(
&self,
document_id: &str,
limit: usize,
) -> FlowyResult<Vec<DocumentSnapshotPB>> {
let mut snapshots = vec![];
if let Some(snapshot) = self
let snapshots = self
.cloud_service
.get_document_latest_snapshot(document_id)
.get_document_snapshots(document_id, limit)
.await?
.into_iter()
.map(|snapshot| DocumentSnapshotPB {
snapshot_id: snapshot.snapshot_id,
snapshot_desc: "".to_string(),
created_at: snapshot.created_at,
data: snapshot.data,
})
{
snapshots.push(snapshot);
}
.collect::<Vec<_>>();
Ok(snapshots)
}
fn collab_for_document(
&self,
uid: i64,
doc_id: &str,
updates: Vec<Vec<u8>>,
) -> FlowyResult<Arc<MutexCollab>> {
let uid = self.user.user_id()?;
let db = self.user.collab_db(uid)?;
let collab = self
.collab_builder

View File

@ -13,7 +13,7 @@ async fn undo_redo_test() {
let data = default_document_data();
// create a document
_ = test.create_document(&doc_id, Some(data.clone()));
_ = test.create_document(test.user.user_id().unwrap(), &doc_id, Some(data.clone()));
// open a document
let document = test.get_document(&doc_id).await.unwrap();

View File

@ -13,7 +13,10 @@ async fn restore_document() {
// create a document
let doc_id: String = gen_document_id();
let data = default_document_data();
let document_a = test.create_document(&doc_id, Some(data.clone())).unwrap();
let uid = test.user.user_id().unwrap();
let document_a = test
.create_document(uid, &doc_id, Some(data.clone()))
.unwrap();
let data_a = document_a.lock().get_document_data().unwrap();
assert_eq!(data_a, data);
@ -30,7 +33,7 @@ async fn restore_document() {
assert_eq!(data_b, data);
// restore
_ = test.create_document(&doc_id, Some(data.clone()));
_ = test.create_document(uid, &doc_id, Some(data.clone()));
// open a document
let data_b = test
.get_document(&doc_id)
@ -48,11 +51,12 @@ async fn restore_document() {
#[tokio::test]
async fn document_apply_insert_action() {
let test = DocumentTest::new();
let uid = test.user.user_id().unwrap();
let doc_id: String = gen_document_id();
let data = default_document_data();
// create a document
_ = test.create_document(&doc_id, Some(data.clone()));
_ = test.create_document(uid, &doc_id, Some(data.clone()));
// open a document
let document = test.get_document(&doc_id).await.unwrap();
@ -99,10 +103,11 @@ async fn document_apply_insert_action() {
async fn document_apply_update_page_action() {
let test = DocumentTest::new();
let doc_id: String = gen_document_id();
let uid = test.user.user_id().unwrap();
let data = default_document_data();
// create a document
_ = test.create_document(&doc_id, Some(data.clone()));
_ = test.create_document(uid, &doc_id, Some(data.clone()));
// open a document
let document = test.get_document(&doc_id).await.unwrap();
@ -138,11 +143,12 @@ async fn document_apply_update_page_action() {
#[tokio::test]
async fn document_apply_update_action() {
let test = DocumentTest::new();
let uid = test.user.user_id().unwrap();
let doc_id: String = gen_document_id();
let data = default_document_data();
// create a document
_ = test.create_document(&doc_id, Some(data.clone()));
_ = test.create_document(uid, &doc_id, Some(data.clone()));
// open a document
let document = test.get_document(&doc_id).await.unwrap();

View File

@ -90,9 +90,11 @@ pub async fn create_and_open_empty_document() -> (DocumentTest, Arc<MutexDocumen
let test = DocumentTest::new();
let doc_id: String = gen_document_id();
let data = default_document_data();
let uid = test.user.user_id().unwrap();
// create a document
_ = test.create_document(&doc_id, Some(data.clone())).unwrap();
_ = test
.create_document(uid, &doc_id, Some(data.clone()))
.unwrap();
let document = test.get_document(&doc_id).await.unwrap();
@ -114,11 +116,12 @@ impl DocumentCloudService for LocalTestDocumentCloudServiceImpl {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_document_latest_snapshot(
fn get_document_snapshots(
&self,
_document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(None) })
_limit: usize,
) -> FutureResult<Vec<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_document_data(&self, _document_id: &str) -> FutureResult<Option<DocumentData>, Error> {

View File

@ -0,0 +1,15 @@
[package]
name = "flowy-encrypt"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
aes-gcm = "0.10.2"
rand = "0.8"
pbkdf2 = "0.12.2"
hmac = "0.12.1"
sha2 = "0.10.7"
anyhow = "1.0.72"
base64 = "0.21.2"

View File

@ -0,0 +1,118 @@
use aes_gcm::aead::generic_array::GenericArray;
use aes_gcm::aead::Aead;
use aes_gcm::{Aes256Gcm, KeyInit};
use anyhow::Result;
use base64::engine::general_purpose::STANDARD;
use base64::Engine;
use pbkdf2::hmac::Hmac;
use pbkdf2::pbkdf2;
use rand::distributions::Alphanumeric;
use rand::Rng;
use sha2::Sha256;
const SALT_LENGTH: usize = 16;
const KEY_LENGTH: usize = 32;
const ITERATIONS: u32 = 1000;
const NONCE_LENGTH: usize = 12;
const CONCATENATED_DELIMITER: &str = "$";
pub fn generate_encrypt_secret() -> String {
let passphrase = generate_passphrase();
let salt = generate_salt();
concatenate_passphrase_and_salt(&passphrase, &salt)
}
pub fn encrypt_bytes<T: AsRef<[u8]>>(data: T, combined_passphrase_salt: &str) -> Result<Vec<u8>> {
let (passphrase, salt) = split_passphrase_and_salt(combined_passphrase_salt)?;
let key = derive_key(passphrase, &salt)?;
let cipher = Aes256Gcm::new(GenericArray::from_slice(&key));
let nonce: [u8; NONCE_LENGTH] = rand::thread_rng().gen();
let ciphertext = cipher
.encrypt(GenericArray::from_slice(&nonce), data.as_ref())
.unwrap();
Ok(nonce.into_iter().chain(ciphertext).collect())
}
pub fn decrypt_bytes<T: AsRef<[u8]>>(data: T, combined_passphrase_salt: &str) -> Result<Vec<u8>> {
if data.as_ref().len() <= NONCE_LENGTH {
return Err(anyhow::anyhow!("Ciphertext too short to include nonce."));
}
let (passphrase, salt) = split_passphrase_and_salt(combined_passphrase_salt)?;
let key = derive_key(passphrase, &salt)?;
let cipher = Aes256Gcm::new(GenericArray::from_slice(&key));
let (nonce, cipher_data) = data.as_ref().split_at(NONCE_LENGTH);
cipher
.decrypt(GenericArray::from_slice(nonce), cipher_data)
.map_err(|e| anyhow::anyhow!("Decryption error: {:?}", e))
}
pub fn encrypt_string<T: AsRef<[u8]>>(data: T, combined_passphrase_salt: &str) -> Result<String> {
let encrypted = encrypt_bytes(data.as_ref(), combined_passphrase_salt)?;
Ok(STANDARD.encode(encrypted))
}
pub fn decrypt_string<T: AsRef<[u8]>>(data: T, combined_passphrase_salt: &str) -> Result<String> {
let encrypted = STANDARD.decode(data)?;
let decrypted = decrypt_bytes(encrypted, combined_passphrase_salt)?;
Ok(String::from_utf8(decrypted)?)
}
fn generate_passphrase() -> String {
rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(30) // e.g., 30 characters
.map(char::from)
.collect()
}
fn generate_salt() -> [u8; SALT_LENGTH] {
let mut rng = rand::thread_rng();
let salt: [u8; SALT_LENGTH] = rng.gen();
salt
}
fn concatenate_passphrase_and_salt(passphrase: &str, salt: &[u8; SALT_LENGTH]) -> String {
let salt_base64 = STANDARD.encode(salt);
format!("{}{}{}", passphrase, CONCATENATED_DELIMITER, salt_base64)
}
fn split_passphrase_and_salt(combined: &str) -> Result<(&str, [u8; SALT_LENGTH]), anyhow::Error> {
let parts: Vec<&str> = combined.split(CONCATENATED_DELIMITER).collect();
if parts.len() != 2 {
return Err(anyhow::anyhow!("Invalid combined format"));
}
let passphrase = parts[0];
let salt = STANDARD.decode(parts[1])?;
if salt.len() != SALT_LENGTH {
return Err(anyhow::anyhow!("Incorrect salt length"));
}
let mut salt_array = [0u8; SALT_LENGTH];
salt_array.copy_from_slice(&salt);
Ok((passphrase, salt_array))
}
fn derive_key(passphrase: &str, salt: &[u8; SALT_LENGTH]) -> Result<[u8; KEY_LENGTH]> {
let mut key = [0u8; KEY_LENGTH];
pbkdf2::<Hmac<Sha256>>(passphrase.as_bytes(), salt, ITERATIONS, &mut key)?;
Ok(key)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_encrypt_decrypt() {
let secret = generate_encrypt_secret();
let data = b"hello world";
let encrypted = encrypt_bytes(data, &secret).unwrap();
let decrypted = decrypt_bytes(encrypted, &secret).unwrap();
assert_eq!(data, decrypted.as_slice());
let s = "123".to_string();
let encrypted = encrypt_string(&s, &secret).unwrap();
let decrypted_str = decrypt_string(encrypted, &secret).unwrap();
assert_eq!(s, decrypted_str);
}
}

View File

@ -0,0 +1,3 @@
pub use encrypt::*;
mod encrypt;

View File

@ -220,6 +220,12 @@ pub enum ErrorCode {
#[error("Conflict")]
Conflict = 73,
#[error("Invalid decryption secret")]
InvalidEncryptSecret = 74,
#[error("Unexpected empty collab updates")]
UnexpectedEmptyCollabUpdates = 75,
}
impl ErrorCode {

View File

@ -1,9 +1,8 @@
pub use collab_folder::core::{Folder, FolderData, Workspace};
pub use anyhow::Error;
pub use collab_folder::core::{Folder, FolderData, Workspace};
use uuid::Uuid;
use lib_infra::future::FutureResult;
use uuid::Uuid;
/// [FolderCloudService] represents the cloud service for folder.
pub trait FolderCloudService: Send + Sync + 'static {
@ -11,10 +10,11 @@ pub trait FolderCloudService: Send + Sync + 'static {
fn get_folder_data(&self, workspace_id: &str) -> FutureResult<Option<FolderData>, Error>;
fn get_folder_latest_snapshot(
fn get_folder_snapshots(
&self,
workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, Error>;
limit: usize,
) -> FutureResult<Vec<FolderSnapshot>, Error>;
fn get_folder_updates(&self, workspace_id: &str, uid: i64) -> FutureResult<Vec<Vec<u8>>, Error>;

View File

@ -1,10 +1,11 @@
use std::sync::{Arc, Weak};
use flowy_error::{FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use crate::entities::*;
use crate::manager::FolderManager;
use crate::share::ImportParams;
use flowy_error::{FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
fn upgrade_folder(
folder_manager: AFPluginState<Weak<FolderManager>>,
@ -319,7 +320,7 @@ pub(crate) async fn get_folder_snapshots_handler(
) -> DataResult<RepeatedFolderSnapshotPB, FlowyError> {
let folder = upgrade_folder(folder)?;
if let Some(workspace_id) = &data.value {
let snapshots = folder.get_folder_snapshots(workspace_id).await?;
let snapshots = folder.get_folder_snapshots(workspace_id, 10).await?;
data_result_ok(RepeatedFolderSnapshotPB { items: snapshots })
} else {
data_result_ok(RepeatedFolderSnapshotPB { items: vec![] })

View File

@ -3,7 +3,7 @@ use std::ops::Deref;
use std::sync::{Arc, Weak};
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
use appflowy_integrate::{CollabPersistenceConfig, CollabType, RocksCollabDB};
use appflowy_integrate::{CollabPersistenceConfig, CollabType, RocksCollabDB, YrsDocAction};
use collab::core::collab::{CollabRawData, MutexCollab};
use collab::core::collab_state::SyncState;
use collab_folder::core::{
@ -145,10 +145,23 @@ impl FolderManager {
let folder = match initial_data {
FolderInitializeData::Empty => {
let is_exist = is_exist_in_local_disk(&self.user, &workspace_id).unwrap_or(false);
if !is_exist {
return Err(FlowyError::new(
ErrorCode::RecordNotFound,
"Can't find any workspace data",
));
}
let collab = self.collab_for_folder(uid, &workspace_id, collab_db, vec![])?;
Folder::open(collab, Some(folder_notifier))
},
FolderInitializeData::Raw(raw_data) => {
if raw_data.is_empty() {
return Err(FlowyError::new(
ErrorCode::UnexpectedEmptyCollabUpdates,
"Can't fetch the workspace data from server",
));
}
let collab = self.collab_for_folder(uid, &workspace_id, collab_db, raw_data)?;
Folder::open(collab, Some(folder_notifier))
},
@ -207,7 +220,7 @@ impl FolderManager {
.get_folder_updates(workspace_id, user_id)
.await?;
tracing::trace!(
tracing::info!(
"Get folder updates via {}, number of updates: {}",
self.cloud_service.service_name(),
folder_updates.len()
@ -238,12 +251,8 @@ impl FolderManager {
if is_new {
let folder_data = match folder_data {
None => {
DefaultFolderBuilder::build(
self.user.user_id()?,
workspace_id.to_string(),
&self.operation_handlers,
)
.await
DefaultFolderBuilder::build(user_id, workspace_id.to_string(), &self.operation_handlers)
.await
},
Some(folder_data) => folder_data,
};
@ -256,19 +265,18 @@ impl FolderManager {
)
.await?;
} else {
// The folder data is loaded through the [FolderCloudService]. If the cloud service in use is
// [LocalServerFolderCloudServiceImpl], the folder data will be None because the Folder will load
// the data directly from the disk. If any other cloud service is in use, the folder data will be loaded remotely.
// The folder updates should not be empty, as the folder data is stored
// when the user signs up for the first time.
let folder_updates = self
.cloud_service
.get_folder_updates(workspace_id, user_id)
.await?;
if !folder_updates.is_empty() {
tracing::trace!(
"Get folder updates via {}",
self.cloud_service.service_name()
);
}
tracing::info!(
"Get folder updates via {}, number of updates: {}",
self.cloud_service.service_name(),
folder_updates.len()
);
self
.initialize(
user_id,
@ -779,9 +787,16 @@ impl FolderManager {
let handler = self.get_handler(&import_data.view_layout)?;
let view_id = gen_view_id();
let uid = self.user.user_id()?;
if let Some(data) = import_data.data {
handler
.import_from_bytes(&view_id, &import_data.name, import_data.import_type, data)
.import_from_bytes(
uid,
&view_id,
&import_data.name,
import_data.import_type,
data,
)
.await?;
}
@ -889,21 +904,20 @@ impl FolderManager {
pub async fn get_folder_snapshots(
&self,
workspace_id: &str,
limit: usize,
) -> FlowyResult<Vec<FolderSnapshotPB>> {
let mut snapshots = vec![];
if let Some(snapshot) = self
let snapshots = self
.cloud_service
.get_folder_latest_snapshot(workspace_id)
.get_folder_snapshots(workspace_id, limit)
.await?
.into_iter()
.map(|snapshot| FolderSnapshotPB {
snapshot_id: snapshot.snapshot_id,
snapshot_desc: "".to_string(),
created_at: snapshot.created_at,
data: snapshot.data,
})
{
snapshots.push(snapshot);
}
.collect::<Vec<_>>();
Ok(snapshots)
}
@ -1159,7 +1173,20 @@ unsafe impl Sync for MutexFolder {}
unsafe impl Send for MutexFolder {}
pub enum FolderInitializeData {
/// It means using the data stored on local disk to initialize the folder
Empty,
/// If there is no data stored on local disk, we will use the data from the server to initialize the folder
Raw(CollabRawData),
/// If the user is new, we use the [DefaultFolderBuilder] to create the default folder.
Data(FolderData),
}
fn is_exist_in_local_disk(user: &Arc<dyn FolderUser>, doc_id: &str) -> FlowyResult<bool> {
let uid = user.user_id()?;
if let Some(collab_db) = user.collab_db(uid)?.upgrade() {
let read_txn = collab_db.read_txn();
Ok(read_txn.is_exist(uid, doc_id))
} else {
Ok(false)
}
}

View File

@ -13,7 +13,7 @@ use crate::view_operation::{
pub struct DefaultFolderBuilder();
impl DefaultFolderBuilder {
pub async fn build(
_uid: i64,
uid: i64,
workspace_id: String,
handlers: &FolderOperationHandlers,
) -> FolderData {
@ -21,7 +21,7 @@ impl DefaultFolderBuilder {
Arc::new(RwLock::new(WorkspaceViewBuilder::new(workspace_id.clone())));
for handler in handlers.values() {
let _ = handler
.create_workspace_view(workspace_view_builder.clone())
.create_workspace_view(uid, workspace_view_builder.clone())
.await;
}

View File

@ -157,6 +157,7 @@ pub trait FolderOperationHandler {
/// Only called once when the user is created.
fn create_workspace_view(
&self,
_uid: i64,
_workspace_view_builder: Arc<RwLock<WorkspaceViewBuilder>>,
) -> FutureResult<(), FlowyError> {
FutureResult::new(async { Ok(()) })
@ -209,6 +210,7 @@ pub trait FolderOperationHandler {
/// Create a view by importing data
fn import_from_bytes(
&self,
uid: i64,
view_id: &str,
name: &str,
import_type: ImportType,

View File

@ -22,17 +22,11 @@ pub struct SupabaseConfiguration {
pub anon_key: String,
/// The secret used to sign the JWT tokens.
pub jwt_secret: String,
/// Whether to enable the supabase sync.
/// User can disable it by injecting the environment variable ENABLE_SUPABASE_SYNC=false
pub enable_sync: bool,
}
impl SupabaseConfiguration {
pub fn from_env() -> Result<Self, FlowyError> {
Ok(Self {
enable_sync: std::env::var(ENABLE_SUPABASE_SYNC)
.map(|v| v == "true")
.unwrap_or(false),
url: std::env::var(SUPABASE_URL)
.map_err(|_| FlowyError::new(ErrorCode::InvalidAuthConfig, "Missing SUPABASE_URL"))?,
anon_key: std::env::var(SUPABASE_ANON_KEY)
@ -45,11 +39,6 @@ impl SupabaseConfiguration {
/// Write the configuration to the environment variables.
pub fn write_env(&self) {
if self.enable_sync {
std::env::set_var(ENABLE_SUPABASE_SYNC, "true");
} else {
std::env::set_var(ENABLE_SUPABASE_SYNC, "false");
}
std::env::set_var(SUPABASE_URL, &self.url);
std::env::set_var(SUPABASE_ANON_KEY, &self.anon_key);
std::env::set_var(SUPABASE_JWT_SECRET, &self.jwt_secret);

View File

@ -36,6 +36,7 @@ flowy-database-deps = { path = "../flowy-database-deps" }
flowy-document-deps = { path = "../flowy-document-deps" }
flowy-error = { path = "../flowy-error", features = ["impl_from_postgres", "impl_from_serde", "impl_from_reqwest"] }
flowy-server-config = { path = "../flowy-server-config" }
flowy-encrypt = { path = "../flowy-encrypt" }
[dev-dependencies]
uuid = { version = "1.3.3", features = ["v4"] }

View File

@ -1,6 +1,7 @@
use std::sync::Arc;
use collab_plugins::cloud_storage::{CollabObject, RemoteCollabStorage};
use parking_lot::RwLock;
use serde_json::Value;
use flowy_database_deps::cloud::DatabaseCloudService;
@ -15,8 +16,26 @@ pub mod self_host;
pub mod supabase;
pub mod util;
pub trait AppFlowyEncryption: Send + Sync + 'static {
fn get_secret(&self) -> Option<String>;
fn set_secret(&self, secret: String);
}
impl<T> AppFlowyEncryption for Arc<T>
where
T: AppFlowyEncryption,
{
fn get_secret(&self) -> Option<String> {
(**self).get_secret()
}
fn set_secret(&self, secret: String) {
(**self).set_secret(secret)
}
}
pub trait AppFlowyServer: Send + Sync + 'static {
fn enable_sync(&self, _enable: bool) {}
fn set_enable_sync(&self, _enable: bool) {}
fn set_sync_device_id(&self, _device_id: &str) {}
fn user_service(&self) -> Arc<dyn UserService>;
fn folder_service(&self) -> Arc<dyn FolderCloudService>;
@ -25,3 +44,25 @@ pub trait AppFlowyServer: Send + Sync + 'static {
fn collab_storage(&self, collab_object: &CollabObject) -> Option<Arc<dyn RemoteCollabStorage>>;
fn handle_realtime_event(&self, _json: Value) {}
}
pub struct EncryptionImpl {
secret: RwLock<Option<String>>,
}
impl EncryptionImpl {
pub fn new(secret: Option<String>) -> Self {
Self {
secret: RwLock::new(secret),
}
}
}
impl AppFlowyEncryption for EncryptionImpl {
fn get_secret(&self) -> Option<String> {
self.secret.read().clone()
}
fn set_secret(&self, secret: String) {
*self.secret.write() = Some(secret);
}
}

View File

@ -4,7 +4,6 @@ use collab_plugins::cloud_storage::CollabType;
use flowy_database_deps::cloud::{
CollabObjectUpdate, CollabObjectUpdateByOid, DatabaseCloudService, DatabaseSnapshot,
};
use lib_infra::future::FutureResult;
pub(crate) struct LocalServerDatabaseCloudServiceImpl();
@ -26,10 +25,11 @@ impl DatabaseCloudService for LocalServerDatabaseCloudServiceImpl {
FutureResult::new(async move { Ok(CollabObjectUpdateByOid::default()) })
}
fn get_collab_latest_snapshot(
fn get_collab_snapshots(
&self,
_object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, Error> {
FutureResult::new(async move { Ok(None) })
_limit: usize,
) -> FutureResult<Vec<DatabaseSnapshot>, Error> {
FutureResult::new(async move { Ok(vec![]) })
}
}

View File

@ -1,6 +1,6 @@
use anyhow::Error;
use flowy_document_deps::cloud::*;
use flowy_document_deps::cloud::*;
use lib_infra::future::FutureResult;
pub(crate) struct LocalServerDocumentCloudServiceImpl();
@ -10,11 +10,12 @@ impl DocumentCloudService for LocalServerDocumentCloudServiceImpl {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_document_latest_snapshot(
fn get_document_snapshots(
&self,
_document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(None) })
_limit: usize,
) -> FutureResult<Vec<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_document_data(&self, _document_id: &str) -> FutureResult<Option<DocumentData>, Error> {

View File

@ -1,6 +1,7 @@
use anyhow::Error;
use std::sync::Arc;
use anyhow::Error;
use flowy_folder_deps::cloud::{
gen_workspace_id, FolderCloudService, FolderData, FolderSnapshot, Workspace,
};
@ -30,11 +31,12 @@ impl FolderCloudService for LocalServerFolderCloudServiceImpl {
FutureResult::new(async move { Ok(None) })
}
fn get_folder_latest_snapshot(
fn get_folder_snapshots(
&self,
_workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, Error> {
FutureResult::new(async move { Ok(None) })
_limit: usize,
) -> FutureResult<Vec<FolderSnapshot>, Error> {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_folder_updates(&self, workspace_id: &str, uid: i64) -> FutureResult<Vec<Vec<u8>>, Error> {

View File

@ -39,10 +39,11 @@ impl UserService for LocalServerUserAuthServiceImpl {
name: user_name,
latest_workspace: user_workspace.clone(),
user_workspaces: vec![user_workspace],
is_new: true,
is_new_user: true,
email: Some(params.email),
token: None,
device_id: params.device_id,
encryption_type: EncryptionType::NoEncryption,
})
})
}
@ -64,6 +65,7 @@ impl UserService for LocalServerUserAuthServiceImpl {
email: Some(params.email),
token: None,
device_id: params.device_id,
encryption_type: EncryptionType::NoEncryption,
})
})
}

View File

@ -4,7 +4,6 @@ use collab_plugins::cloud_storage::CollabType;
use flowy_database_deps::cloud::{
CollabObjectUpdate, CollabObjectUpdateByOid, DatabaseCloudService, DatabaseSnapshot,
};
use lib_infra::future::FutureResult;
pub(crate) struct SelfHostedDatabaseCloudServiceImpl();
@ -26,10 +25,11 @@ impl DatabaseCloudService for SelfHostedDatabaseCloudServiceImpl {
FutureResult::new(async move { Ok(CollabObjectUpdateByOid::default()) })
}
fn get_collab_latest_snapshot(
fn get_collab_snapshots(
&self,
_object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, Error> {
FutureResult::new(async move { Ok(None) })
_limit: usize,
) -> FutureResult<Vec<DatabaseSnapshot>, Error> {
FutureResult::new(async move { Ok(vec![]) })
}
}

View File

@ -1,6 +1,6 @@
use anyhow::Error;
use flowy_document_deps::cloud::*;
use flowy_document_deps::cloud::*;
use lib_infra::future::FutureResult;
pub(crate) struct SelfHostedDocumentCloudServiceImpl();
@ -10,11 +10,12 @@ impl DocumentCloudService for SelfHostedDocumentCloudServiceImpl {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_document_latest_snapshot(
fn get_document_snapshots(
&self,
_document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(None) })
_limit: usize,
) -> FutureResult<Vec<DocumentSnapshot>, Error> {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_document_data(&self, _document_id: &str) -> FutureResult<Option<DocumentData>, Error> {

View File

@ -1,4 +1,5 @@
use anyhow::Error;
use flowy_folder_deps::cloud::{
gen_workspace_id, FolderCloudService, FolderData, FolderSnapshot, Workspace,
};
@ -24,11 +25,12 @@ impl FolderCloudService for SelfHostedServerFolderCloudServiceImpl {
FutureResult::new(async move { Ok(None) })
}
fn get_folder_latest_snapshot(
fn get_folder_snapshots(
&self,
_workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, Error> {
FutureResult::new(async move { Ok(None) })
_limit: usize,
) -> FutureResult<Vec<FolderSnapshot>, Error> {
FutureResult::new(async move { Ok(vec![]) })
}
fn get_folder_updates(

View File

@ -1,5 +1,5 @@
use std::str::FromStr;
use std::sync::Arc;
use std::sync::{Arc, Weak};
use anyhow::Error;
use chrono::{DateTime, Utc};
@ -15,25 +15,39 @@ use lib_infra::async_trait::async_trait;
use lib_infra::util::md5;
use crate::supabase::api::request::{
create_snapshot, get_latest_snapshot_from_server, get_updates_from_server,
FetchObjectUpdateAction, UpdateItem,
create_snapshot, get_snapshots_from_server, get_updates_from_server, FetchObjectUpdateAction,
UpdateItem,
};
use crate::supabase::api::util::{
ExtendedResponse, InsertParamsBuilder, SupabaseBinaryColumnEncoder,
};
use crate::supabase::api::{PostgresWrapper, SupabaseServerService};
use crate::supabase::define::*;
use crate::AppFlowyEncryption;
pub struct SupabaseCollabStorageImpl<T> {
server: T,
rx: Mutex<Option<RemoteUpdateReceiver>>,
encryption: Weak<dyn AppFlowyEncryption>,
}
impl<T> SupabaseCollabStorageImpl<T> {
pub fn new(server: T, rx: Option<RemoteUpdateReceiver>) -> Self {
pub fn new(
server: T,
rx: Option<RemoteUpdateReceiver>,
encryption: Weak<dyn AppFlowyEncryption>,
) -> Self {
Self {
server,
rx: Mutex::new(rx),
encryption,
}
}
pub fn secret(&self) -> Option<String> {
match self.encryption.upgrade() {
None => None,
Some(encryption) => encryption.get_secret(),
}
}
}
@ -55,11 +69,25 @@ where
Ok(updates)
}
async fn get_latest_snapshot(&self, object_id: &str) -> Option<RemoteCollabSnapshot> {
let postgrest = self.server.try_get_postgrest().ok()?;
get_latest_snapshot_from_server(object_id, postgrest)
.await
.ok()?
async fn get_snapshots(&self, object_id: &str, limit: usize) -> Vec<RemoteCollabSnapshot> {
match self.server.try_get_postgrest() {
Ok(postgrest) => match get_snapshots_from_server(object_id, postgrest, limit).await {
Ok(snapshots) => snapshots,
Err(err) => {
tracing::error!(
"🔴fetch snapshots by oid:{} with limit: {} failed: {:?}",
object_id,
limit,
err
);
vec![]
},
},
Err(err) => {
tracing::error!("🔴get postgrest failed: {:?}", err);
vec![]
},
}
}
async fn get_collab_state(&self, object_id: &str) -> Result<Option<RemoteCollabState>, Error> {
@ -116,7 +144,7 @@ where
let workspace_id = object
.get_workspace_id()
.ok_or(anyhow::anyhow!("Invalid workspace id"))?;
send_update(workspace_id, object, update, &postgrest).await?;
send_update(workspace_id, object, update, &postgrest, &self.secret()).await?;
}
Ok(())
@ -138,7 +166,14 @@ where
// If the update_items is empty, we can send the init_update directly
if update_items.is_empty() {
send_update(workspace_id, object, init_update, &postgrest).await?;
send_update(
workspace_id,
object,
init_update,
&postgrest,
&self.secret(),
)
.await?;
} else {
// 2.Merge the updates into one and then delete the merged updates
let merge_result = spawn_blocking(move || merge_updates(update_items, init_update)).await??;
@ -146,10 +181,12 @@ where
let value_size = merge_result.new_update.len() as i32;
let md5 = md5(&merge_result.new_update);
let new_update = format!("\\x{}", hex::encode(merge_result.new_update));
let (new_update, encrypt) =
SupabaseBinaryColumnEncoder::encode(merge_result.new_update, &self.secret())?;
let params = InsertParamsBuilder::new()
.insert("oid", object.object_id.clone())
.insert("new_value", new_update)
.insert("encrypt", encrypt)
.insert("md5", md5)
.insert("value_size", value_size)
.insert("partition_key", partition_key(&object.ty))
@ -160,7 +197,7 @@ where
.build();
postgrest
.rpc("flush_collab_updates_v2", params)
.rpc("flush_collab_updates_v3", params)
.execute()
.await?
.success()
@ -183,14 +220,16 @@ async fn send_update(
object: &CollabObject,
update: Vec<u8>,
postgrest: &Arc<PostgresWrapper>,
encryption_secret: &Option<String>,
) -> Result<(), Error> {
let value_size = update.len() as i32;
let md5 = md5(&update);
let update = SupabaseBinaryColumnEncoder::encode(update);
let (update, encrypt) = SupabaseBinaryColumnEncoder::encode(update, encryption_secret)?;
let builder = InsertParamsBuilder::new()
.insert("oid", object.object_id.clone())
.insert("partition_key", partition_key(&object.ty))
.insert("value", update)
.insert("encrypt", encrypt)
.insert("uid", object.uid)
.insert("md5", md5)
.insert("workspace_id", workspace_id)

View File

@ -8,7 +8,7 @@ use flowy_database_deps::cloud::{
use lib_infra::future::FutureResult;
use crate::supabase::api::request::{
get_latest_snapshot_from_server, BatchFetchObjectUpdateAction, FetchObjectUpdateAction,
get_snapshots_from_server, BatchFetchObjectUpdateAction, FetchObjectUpdateAction,
};
use crate::supabase::api::SupabaseServerService;
@ -69,23 +69,27 @@ where
FutureResult::new(async { rx.await? })
}
fn get_collab_latest_snapshot(
fn get_collab_snapshots(
&self,
object_id: &str,
) -> FutureResult<Option<DatabaseSnapshot>, Error> {
limit: usize,
) -> FutureResult<Vec<DatabaseSnapshot>, Error> {
let try_get_postgrest = self.server.try_get_postgrest();
let object_id = object_id.to_string();
FutureResult::new(async move {
let postgrest = try_get_postgrest?;
let snapshot = get_latest_snapshot_from_server(&object_id, postgrest)
let snapshots = get_snapshots_from_server(&object_id, postgrest, limit)
.await?
.into_iter()
.map(|snapshot| DatabaseSnapshot {
snapshot_id: snapshot.sid,
database_id: snapshot.oid,
data: snapshot.blob,
created_at: snapshot.created_at,
});
Ok(snapshot)
})
.collect::<Vec<_>>();
Ok(snapshots)
})
}
}

View File

@ -8,13 +8,16 @@ use tokio::sync::oneshot::channel;
use flowy_document_deps::cloud::{DocumentCloudService, DocumentSnapshot};
use lib_infra::future::FutureResult;
use crate::supabase::api::request::{get_latest_snapshot_from_server, FetchObjectUpdateAction};
use crate::supabase::api::request::{get_snapshots_from_server, FetchObjectUpdateAction};
use crate::supabase::api::SupabaseServerService;
pub struct SupabaseDocumentServiceImpl<T>(T);
pub struct SupabaseDocumentServiceImpl<T> {
server: T,
}
impl<T> SupabaseDocumentServiceImpl<T> {
pub fn new(server: T) -> Self {
Self(server)
Self { server }
}
}
@ -23,7 +26,7 @@ where
T: SupabaseServerService,
{
fn get_document_updates(&self, document_id: &str) -> FutureResult<Vec<Vec<u8>>, Error> {
let try_get_postgrest = self.0.try_get_weak_postgrest();
let try_get_postgrest = self.server.try_get_weak_postgrest();
let document_id = document_id.to_string();
let (tx, rx) = channel();
tokio::spawn(async move {
@ -39,28 +42,31 @@ where
FutureResult::new(async { rx.await? })
}
fn get_document_latest_snapshot(
fn get_document_snapshots(
&self,
document_id: &str,
) -> FutureResult<Option<DocumentSnapshot>, Error> {
let try_get_postgrest = self.0.try_get_postgrest();
limit: usize,
) -> FutureResult<Vec<DocumentSnapshot>, Error> {
let try_get_postgrest = self.server.try_get_postgrest();
let document_id = document_id.to_string();
FutureResult::new(async move {
let postgrest = try_get_postgrest?;
let snapshot = get_latest_snapshot_from_server(&document_id, postgrest)
let snapshots = get_snapshots_from_server(&document_id, postgrest, limit)
.await?
.into_iter()
.map(|snapshot| DocumentSnapshot {
snapshot_id: snapshot.sid,
document_id: snapshot.oid,
data: snapshot.blob,
created_at: snapshot.created_at,
});
Ok(snapshot)
})
.collect::<Vec<_>>();
Ok(snapshots)
})
}
fn get_document_data(&self, document_id: &str) -> FutureResult<Option<DocumentData>, Error> {
let try_get_postgrest = self.0.try_get_weak_postgrest();
let try_get_postgrest = self.server.try_get_weak_postgrest();
let document_id = document_id.to_string();
let (tx, rx) = channel();
tokio::spawn(async move {

View File

@ -13,17 +13,19 @@ use flowy_folder_deps::cloud::{
use lib_infra::future::FutureResult;
use crate::supabase::api::request::{
get_latest_snapshot_from_server, get_updates_from_server, FetchObjectUpdateAction,
get_snapshots_from_server, get_updates_from_server, FetchObjectUpdateAction,
};
use crate::supabase::api::util::{ExtendedResponse, InsertParamsBuilder};
use crate::supabase::api::SupabaseServerService;
use crate::supabase::define::*;
pub struct SupabaseFolderServiceImpl<T>(T);
pub struct SupabaseFolderServiceImpl<T> {
server: T,
}
impl<T> SupabaseFolderServiceImpl<T> {
pub fn new(server: T) -> Self {
Self(server)
Self { server }
}
}
@ -32,7 +34,7 @@ where
T: SupabaseServerService,
{
fn create_workspace(&self, uid: i64, name: &str) -> FutureResult<Workspace, Error> {
let try_get_postgrest = self.0.try_get_postgrest();
let try_get_postgrest = self.server.try_get_postgrest();
let name = name.to_string();
let new_workspace_id = gen_workspace_id().to_string();
FutureResult::new(async move {
@ -66,44 +68,51 @@ where
}
fn get_folder_data(&self, workspace_id: &str) -> FutureResult<Option<FolderData>, Error> {
let try_get_postgrest = self.0.try_get_postgrest();
let try_get_postgrest = self.server.try_get_postgrest();
let workspace_id = workspace_id.to_string();
FutureResult::new(async move {
let postgrest = try_get_postgrest?;
get_updates_from_server(&workspace_id, &CollabType::Folder, postgrest)
.await
.map(|updates| {
let updates = updates.into_iter().map(|item| item.value).collect();
let folder =
Folder::from_collab_raw_data(CollabOrigin::Empty, updates, &workspace_id, vec![])
.ok()?;
folder.get_folder_data()
})
let updates = get_updates_from_server(&workspace_id, &CollabType::Folder, postgrest).await?;
let updates = updates
.into_iter()
.map(|item| item.value)
.collect::<Vec<_>>();
if updates.is_empty() {
return Ok(None);
}
let folder =
Folder::from_collab_raw_data(CollabOrigin::Empty, updates, &workspace_id, vec![])?;
Ok(folder.get_folder_data())
})
}
fn get_folder_latest_snapshot(
fn get_folder_snapshots(
&self,
workspace_id: &str,
) -> FutureResult<Option<FolderSnapshot>, Error> {
let try_get_postgrest = self.0.try_get_postgrest();
limit: usize,
) -> FutureResult<Vec<FolderSnapshot>, Error> {
let try_get_postgrest = self.server.try_get_postgrest();
let workspace_id = workspace_id.to_string();
FutureResult::new(async move {
let postgrest = try_get_postgrest?;
let snapshot = get_latest_snapshot_from_server(&workspace_id, postgrest)
let snapshots = get_snapshots_from_server(&workspace_id, postgrest, limit)
.await?
.into_iter()
.map(|snapshot| FolderSnapshot {
snapshot_id: snapshot.sid,
database_id: snapshot.oid,
data: snapshot.blob,
created_at: snapshot.created_at,
});
Ok(snapshot)
})
.collect::<Vec<_>>();
Ok(snapshots)
})
}
fn get_folder_updates(&self, workspace_id: &str, _uid: i64) -> FutureResult<Vec<Vec<u8>>, Error> {
let try_get_postgrest = self.0.try_get_weak_postgrest();
let try_get_postgrest = self.server.try_get_weak_postgrest();
let workspace_id = workspace_id.to_string();
let (tx, rx) = channel();
tokio::spawn(async move {

View File

@ -1,21 +1,35 @@
use anyhow::Error;
use parking_lot::RwLock;
use std::ops::Deref;
use std::sync::{Arc, Weak};
use flowy_error::{ErrorCode, FlowyError};
use anyhow::Error;
use parking_lot::RwLock;
use postgrest::Postgrest;
use flowy_error::{ErrorCode, FlowyError};
use flowy_server_config::supabase_config::SupabaseConfiguration;
use crate::AppFlowyEncryption;
/// Creates a wrapper for Postgrest, which allows us to extend the functionality of Postgrest.
pub struct PostgresWrapper(Postgrest);
pub struct PostgresWrapper {
inner: Postgrest,
pub encryption: Weak<dyn AppFlowyEncryption>,
}
impl PostgresWrapper {
pub fn secret(&self) -> Option<String> {
match self.encryption.upgrade() {
None => None,
Some(encryption) => encryption.get_secret(),
}
}
}
impl Deref for PostgresWrapper {
type Target = Postgrest;
fn deref(&self) -> &Self::Target {
&self.0
&self.inner
}
}
@ -24,14 +38,17 @@ pub struct RESTfulPostgresServer {
}
impl RESTfulPostgresServer {
pub fn new(config: SupabaseConfiguration) -> Self {
pub fn new(config: SupabaseConfiguration, encryption: Weak<dyn AppFlowyEncryption>) -> Self {
let url = format!("{}/rest/v1", config.url);
let auth = format!("Bearer {}", config.anon_key);
let postgrest = Postgrest::new(url)
.insert_header("apikey", config.anon_key)
.insert_header("Authorization", auth);
Self {
postgrest: Arc::new(PostgresWrapper(postgrest)),
postgrest: Arc::new(PostgresWrapper {
inner: postgrest,
encryption,
}),
}
}
}
@ -42,6 +59,23 @@ pub trait SupabaseServerService: Send + Sync + 'static {
fn try_get_weak_postgrest(&self) -> Result<Weak<PostgresWrapper>, Error>;
}
impl<T> SupabaseServerService for Arc<T>
where
T: SupabaseServerService,
{
fn get_postgrest(&self) -> Option<Arc<PostgresWrapper>> {
(**self).get_postgrest()
}
fn try_get_postgrest(&self) -> Result<Arc<PostgresWrapper>, Error> {
(**self).try_get_postgrest()
}
fn try_get_weak_postgrest(&self) -> Result<Weak<PostgresWrapper>, Error> {
(**self).try_get_weak_postgrest()
}
}
#[derive(Clone)]
pub struct SupabaseServerServiceImpl(pub Arc<RwLock<Option<Arc<RESTfulPostgresServer>>>>);

View File

@ -16,7 +16,7 @@ use flowy_database_deps::cloud::{CollabObjectUpdate, CollabObjectUpdateByOid};
use lib_infra::util::md5;
use crate::supabase::api::util::{
ExtendedResponse, InsertParamsBuilder, SupabaseBinaryColumnDecoder,
ExtendedResponse, InsertParamsBuilder, SupabaseBinaryColumnDecoder, SupabaseBinaryColumnEncoder,
};
use crate::supabase::api::PostgresWrapper;
use crate::supabase::define::*;
@ -124,69 +124,106 @@ pub async fn create_snapshot(
snapshot: Vec<u8>,
) -> Result<i64, Error> {
let value_size = snapshot.len() as i32;
let snapshot = format!("\\x{}", hex::encode(snapshot));
postgrest
let (snapshot, encrypt) = SupabaseBinaryColumnEncoder::encode(&snapshot, &postgrest.secret())?;
let ret: Value = postgrest
.from(AF_COLLAB_SNAPSHOT_TABLE)
.insert(
InsertParamsBuilder::new()
.insert(AF_COLLAB_SNAPSHOT_OID_COLUMN, object.object_id.clone())
.insert("name", object.ty.to_string())
.insert(AF_COLLAB_SNAPSHOT_ENCRYPT_COLUMN, encrypt)
.insert(AF_COLLAB_SNAPSHOT_BLOB_COLUMN, snapshot)
.insert(AF_COLLAB_SNAPSHOT_BLOB_SIZE_COLUMN, value_size)
.build(),
)
.execute()
.await?
.success()
.get_json()
.await?;
Ok(1)
let snapshot_id = ret
.as_array()
.and_then(|array| array.first())
.and_then(|value| value.get("sid"))
.and_then(|value| value.as_i64())
.unwrap_or(0);
Ok(snapshot_id)
}
pub async fn get_latest_snapshot_from_server(
pub async fn get_snapshots_from_server(
object_id: &str,
postgrest: Arc<PostgresWrapper>,
) -> Result<Option<RemoteCollabSnapshot>, Error> {
let json = postgrest
limit: usize,
) -> Result<Vec<RemoteCollabSnapshot>, Error> {
let json: Value = postgrest
.from(AF_COLLAB_SNAPSHOT_TABLE)
.select(format!(
"{},{},{}",
"{},{},{},{}",
AF_COLLAB_SNAPSHOT_ID_COLUMN,
AF_COLLAB_SNAPSHOT_BLOB_COLUMN,
AF_COLLAB_SNAPSHOT_CREATED_AT_COLUMN
AF_COLLAB_SNAPSHOT_CREATED_AT_COLUMN,
AF_COLLAB_SNAPSHOT_ENCRYPT_COLUMN
))
.order(format!("{}.desc", AF_COLLAB_SNAPSHOT_ID_COLUMN))
.limit(1)
.limit(limit)
.eq(AF_COLLAB_SNAPSHOT_OID_COLUMN, object_id)
.execute()
.await?
.get_json()
.await?;
let snapshot = json
.as_array()
.and_then(|array| array.first())
.and_then(|value| {
let blob = value
.get("blob")
.and_then(|blob| blob.as_str())
.and_then(SupabaseBinaryColumnDecoder::decode)?;
let sid = value.get("sid").and_then(|id| id.as_i64())?;
let created_at = value.get("created_at").and_then(|created_at| {
created_at
.as_str()
.map(|id| DateTime::<Utc>::from_str(id).ok())
.and_then(|date| date)
})?;
let mut snapshots = vec![];
let secret = postgrest.secret();
match json.as_array() {
None => {
if let Some(snapshot) = parser_snapshot(object_id, &json, &secret) {
snapshots.push(snapshot);
}
},
Some(snapshot_values) => {
for snapshot_value in snapshot_values {
if let Some(snapshot) = parser_snapshot(object_id, snapshot_value, &secret) {
snapshots.push(snapshot);
}
}
},
}
Ok(snapshots)
}
Some(RemoteCollabSnapshot {
sid,
oid: object_id.to_string(),
blob,
created_at: created_at.timestamp(),
})
});
Ok(snapshot)
fn parser_snapshot(
object_id: &str,
snapshot: &Value,
secret: &Option<String>,
) -> Option<RemoteCollabSnapshot> {
let blob = match (
snapshot
.get(AF_COLLAB_SNAPSHOT_ENCRYPT_COLUMN)
.and_then(|encrypt| encrypt.as_i64()),
snapshot
.get(AF_COLLAB_SNAPSHOT_BLOB_COLUMN)
.and_then(|value| value.as_str()),
) {
(Some(encrypt), Some(value)) => {
SupabaseBinaryColumnDecoder::decode(value, encrypt as i32, secret).ok()
},
_ => None,
}?;
let sid = snapshot.get("sid").and_then(|id| id.as_i64())?;
let created_at = snapshot.get("created_at").and_then(|created_at| {
created_at
.as_str()
.map(|id| DateTime::<Utc>::from_str(id).ok())
.and_then(|date| date)
})?;
Some(RemoteCollabSnapshot {
sid,
oid: object_id.to_string(),
blob,
created_at: created_at.timestamp(),
})
}
pub async fn batch_get_updates_from_server(
@ -196,7 +233,7 @@ pub async fn batch_get_updates_from_server(
) -> Result<CollabObjectUpdateByOid, Error> {
let json = postgrest
.from(table_name(object_ty))
.select("oid, key, value, md5")
.select("oid, key, value, encrypt, md5")
.order(format!("{}.asc", AF_COLLAB_KEY_COLUMN))
.in_("oid", object_ids)
.execute()
@ -207,15 +244,20 @@ pub async fn batch_get_updates_from_server(
let mut updates_by_oid = CollabObjectUpdateByOid::new();
if let Some(records) = json.as_array() {
for record in records {
tracing::debug!("get updates from server: {:?}", record);
if let Some(oid) = record.get("oid").and_then(|value| value.as_str()) {
if let Ok(updates) = parser_updates_form_json(record.clone()) {
let object_updates = updates_by_oid
.entry(oid.to_string())
.or_insert_with(Vec::new);
tracing::debug!("get updates from server: {:?}", record);
for update in updates {
object_updates.push(update.value);
}
match parser_updates_form_json(record.clone(), &postgrest.secret()) {
Ok(updates) => {
let object_updates = updates_by_oid
.entry(oid.to_string())
.or_insert_with(Vec::new);
for update in updates {
object_updates.push(update.value);
}
},
Err(e) => {
tracing::error!("parser_updates_form_json error: {:?}", e);
},
}
}
}
@ -230,14 +272,14 @@ pub async fn get_updates_from_server(
) -> Result<Vec<UpdateItem>, Error> {
let json = postgrest
.from(table_name(object_ty))
.select("key, value, md5")
.select("key, value, encrypt, md5")
.order(format!("{}.asc", AF_COLLAB_KEY_COLUMN))
.eq("oid", object_id)
.execute()
.await?
.get_json()
.await?;
parser_updates_form_json(json)
parser_updates_form_json(json, &postgrest.secret())
}
/// json format:
@ -245,24 +287,35 @@ pub async fn get_updates_from_server(
/// [
/// {
/// "value": "\\x...",
/// "encrypt": 1,
/// "md5": "..."
/// },
/// {
/// "value": "\\x...",
/// "encrypt": 1,
/// "md5": "..."
/// },
/// ...
/// ]
/// ```
fn parser_updates_form_json(json: Value) -> Result<Vec<UpdateItem>, Error> {
fn parser_updates_form_json(
json: Value,
encryption_secret: &Option<String>,
) -> Result<Vec<UpdateItem>, Error> {
let mut updates = vec![];
match json.as_array() {
None => {
updates.push(parser_update_from_json(&json)?);
updates.push(parser_update_from_json(&json, encryption_secret)?);
},
Some(values) => {
let expected_update_len = values.len();
for value in values {
updates.push(parser_update_from_json(value)?);
updates.push(parser_update_from_json(value, encryption_secret)?);
}
if updates.len() != expected_update_len {
return Err(anyhow::anyhow!(
"The length of the updates does not match the length of the expected updates, indicating that some updates failed to parse."
));
}
},
}
@ -270,11 +323,36 @@ fn parser_updates_form_json(json: Value) -> Result<Vec<UpdateItem>, Error> {
Ok(updates)
}
fn parser_update_from_json(json: &Value) -> Result<UpdateItem, Error> {
let some_record = json
.get("value")
.and_then(|value| value.as_str())
.and_then(SupabaseBinaryColumnDecoder::decode);
/// Parses update from a JSON representation.
///
/// This function attempts to decode an encrypted value from a JSON object
/// and verify its integrity against a provided MD5 hash.
///
/// # Parameters
/// - `json`: The JSON value representing the update information.
/// - `encryption_secret`: An optional encryption secret used for decrypting the value.
///
/// json format:
/// ```json
/// {
/// "value": "\\x...",
/// "encrypt": 1,
/// "md5": "..."
/// },
/// ```
fn parser_update_from_json(
json: &Value,
encryption_secret: &Option<String>,
) -> Result<UpdateItem, Error> {
let some_record = match (
json.get("encrypt").and_then(|encrypt| encrypt.as_i64()),
json.get("value").and_then(|value| value.as_str()),
) {
(Some(encrypt), Some(value)) => {
SupabaseBinaryColumnDecoder::decode(value, encrypt as i32, encryption_secret).ok()
},
_ => None,
};
let some_key = json.get("key").and_then(|value| value.as_i64());
if let (Some(value), Some(key)) = (some_record, some_key) {
@ -282,12 +360,14 @@ fn parser_update_from_json(json: &Value) -> Result<UpdateItem, Error> {
// that we calculated locally.
if let Some(expected_md5) = json.get("md5").and_then(|v| v.as_str()) {
let value_md5 = md5(&value);
debug_assert!(
value_md5 == expected_md5,
"md5 not match: {} != {}",
value_md5,
expected_md5
);
if value_md5 != expected_md5 {
let msg = format!(
"md5 not match: key:{} {} != {}",
key, value_md5, expected_md5
);
tracing::error!("{}", msg);
return Err(anyhow::anyhow!(msg));
}
}
Ok(UpdateItem { key, value })
} else {

View File

@ -88,10 +88,11 @@ where
name: user_name,
latest_workspace: latest_workspace.unwrap(),
user_workspaces,
is_new: is_new_user,
is_new_user,
email: Some(user_profile.email),
token: None,
device_id: params.device_id,
encryption_type: EncryptionType::from_sign(&user_profile.encryption_sign),
})
})
}
@ -102,23 +103,24 @@ where
let postgrest = try_get_postgrest?;
let params = third_party_params_from_box_any(params)?;
let uuid = params.uuid;
let user_profile = get_user_profile(postgrest.clone(), GetUserProfileParams::Uuid(uuid))
let response = get_user_profile(postgrest.clone(), GetUserProfileParams::Uuid(uuid))
.await?
.unwrap();
let user_workspaces = get_user_workspaces(postgrest.clone(), user_profile.uid).await?;
let user_workspaces = get_user_workspaces(postgrest.clone(), response.uid).await?;
let latest_workspace = user_workspaces
.iter()
.find(|user_workspace| user_workspace.id == user_profile.latest_workspace_id)
.find(|user_workspace| user_workspace.id == response.latest_workspace_id)
.cloned();
Ok(SignInResponse {
user_id: user_profile.uid,
user_id: response.uid,
name: DEFAULT_USER_NAME(),
latest_workspace: latest_workspace.unwrap(),
user_workspaces,
email: None,
token: None,
device_id: params.device_id,
encryption_type: EncryptionType::from_sign(&response.encryption_sign),
})
})
}
@ -154,15 +156,16 @@ where
let user_profile_resp = get_user_profile(postgrest, GetUserProfileParams::Uid(uid)).await?;
match user_profile_resp {
None => Ok(None),
Some(user_profile_resp) => Ok(Some(UserProfile {
id: user_profile_resp.uid,
email: user_profile_resp.email,
name: user_profile_resp.name,
Some(response) => Ok(Some(UserProfile {
uid: response.uid,
email: response.email,
name: response.name,
token: "".to_string(),
icon_url: "".to_string(),
openai_key: "".to_string(),
workspace_id: user_profile_resp.latest_workspace_id,
workspace_id: response.latest_workspace_id,
auth_type: AuthType::Supabase,
encryption_type: EncryptionType::from_sign(&response.encryption_sign),
})),
}
})
@ -214,7 +217,7 @@ where
let postgrest = try_get_postgrest?;
let action =
FetchObjectUpdateAction::new(awareness_id, CollabType::UserAwareness, postgrest);
action.run_with_fix_interval(5, 10).await
action.run_with_fix_interval(3, 3).await
}
.await,
)
@ -229,7 +232,7 @@ async fn get_user_profile(
) -> Result<Option<UserProfileResponse>, Error> {
let mut builder = postgrest
.from(USER_PROFILE_VIEW)
.select("uid, email, name, latest_workspace_id");
.select("uid, email, name, encryption_sign, latest_workspace_id");
match params {
GetUserProfileParams::Uid(uid) => builder = builder.eq("uid", uid.to_string()),
@ -245,7 +248,10 @@ async fn get_user_profile(
match profiles.len() {
0 => Ok(None),
1 => Ok(Some(profiles.swap_remove(0))),
_ => unreachable!(),
_ => {
tracing::error!("multiple user profile found");
Ok(None)
},
}
}
@ -276,7 +282,7 @@ async fn update_user_profile(
let exists = !postgrest
.from(USER_TABLE)
.select("uid")
.eq("uid", params.id.to_string())
.eq("uid", params.uid.to_string())
.execute()
.await?
.error_for_status()?
@ -284,9 +290,8 @@ async fn update_user_profile(
.await?
.is_empty();
if !exists {
anyhow::bail!("user uid {} does not exist", params.id);
anyhow::bail!("user uid {} does not exist", params.uid);
}
let mut update_params = serde_json::Map::new();
if let Some(name) = params.name {
update_params.insert("name".to_string(), serde_json::json!(name));
@ -294,18 +299,24 @@ async fn update_user_profile(
if let Some(email) = params.email {
update_params.insert("email".to_string(), serde_json::json!(email));
}
let update_payload = serde_json::to_string(&update_params).unwrap();
if let Some(encrypt_sign) = params.encryption_sign {
update_params.insert(
"encryption_sign".to_string(),
serde_json::json!(encrypt_sign),
);
}
let update_payload = serde_json::to_string(&update_params).unwrap();
let resp = postgrest
.from(USER_TABLE)
.update(update_payload)
.eq("uid", params.id.to_string())
.eq("uid", params.uid.to_string())
.execute()
.await?
.success_with_body()
.await?;
tracing::debug!("update user profile resp: {:?}", resp);
tracing::trace!("update user profile resp: {:?}", resp);
Ok(())
}

View File

@ -1,7 +1,9 @@
use anyhow::Error;
use anyhow::Result;
use reqwest::{Response, StatusCode};
use serde_json::Value;
use flowy_encrypt::{decrypt_bytes, encrypt_bytes};
use flowy_error::{ErrorCode, FlowyError};
use lib_infra::future::{to_fut, Fut};
@ -138,8 +140,20 @@ impl SupabaseBinaryColumnEncoder {
///
/// # Returns
/// Returns the encoded string in the format: `\\xHEX_ENCODED_STRING`
pub fn encode<T: AsRef<[u8]>>(value: T) -> String {
format!("\\x{}", hex::encode(value))
pub fn encode<T: AsRef<[u8]>>(
value: T,
encryption_secret: &Option<String>,
) -> Result<(String, i32)> {
let encrypt = if encryption_secret.is_some() { 1 } else { 0 };
let value = match encryption_secret {
None => hex::encode(value),
Some(encryption_secret) => {
let encrypt_data = encrypt_bytes(value, encryption_secret)?;
hex::encode(encrypt_data)
},
};
Ok((format!("\\x{}", value), encrypt))
}
}
@ -157,9 +171,30 @@ impl SupabaseBinaryColumnDecoder {
/// # Returns
/// Returns an `Option` containing the decoded binary data if decoding is successful.
/// Otherwise, returns `None`.
pub fn decode<T: AsRef<str>>(value: T) -> Option<Vec<u8>> {
let s = value.as_ref().strip_prefix("\\x")?;
hex::decode(s).ok()
pub fn decode<T: AsRef<str>>(
value: T,
encrypt: i32,
encryption_secret: &Option<String>,
) -> Result<Vec<u8>> {
let s = value
.as_ref()
.strip_prefix("\\x")
.ok_or(anyhow::anyhow!("Value is not start with: \\x",))?;
if encrypt == 0 {
let bytes = hex::decode(s)?;
Ok(bytes)
} else {
match encryption_secret {
None => Err(anyhow::anyhow!(
"encryption_secret is None, but encrypt is 1"
)),
Some(encryption_secret) => {
let encrypt_data = hex::decode(s)?;
decrypt_bytes(encrypt_data, encryption_secret)
},
}
}
}
}
@ -178,7 +213,8 @@ impl SupabaseRealtimeEventBinaryColumnDecoder {
/// Returns an `Option` containing the decoded binary data if decoding is successful.
/// Otherwise, returns `None`.
pub fn decode<T: AsRef<str>>(value: T) -> Option<Vec<u8>> {
let bytes = SupabaseBinaryColumnDecoder::decode(value)?;
let s = value.as_ref().strip_prefix("\\x")?;
let bytes = hex::decode(s).ok()?;
hex::decode(bytes).ok()
}
}

View File

@ -5,6 +5,7 @@ pub const AF_COLLAB_KEY_COLUMN: &str = "key";
pub const AF_COLLAB_SNAPSHOT_OID_COLUMN: &str = "oid";
pub const AF_COLLAB_SNAPSHOT_ID_COLUMN: &str = "sid";
pub const AF_COLLAB_SNAPSHOT_BLOB_COLUMN: &str = "blob";
pub const AF_COLLAB_SNAPSHOT_ENCRYPT_COLUMN: &str = "encrypt";
pub const AF_COLLAB_SNAPSHOT_BLOB_SIZE_COLUMN: &str = "blob_size";
pub const AF_COLLAB_SNAPSHOT_CREATED_AT_COLUMN: &str = "created_at";
pub const AF_COLLAB_SNAPSHOT_TABLE: &str = "af_collab_snapshot";
@ -16,6 +17,7 @@ pub const USER_EMAIL: &str = "email";
pub const USER_TABLE: &str = "af_user";
pub const WORKSPACE_TABLE: &str = "af_workspace";
pub const USER_PROFILE_VIEW: &str = "af_user_profile_view";
pub const USER_DEVICE_ID: &str = "device_id";
pub(crate) const WORKSPACE_ID: &str = "workspace_id";
pub(crate) const WORKSPACE_NAME: &str = "workspace_name";

View File

@ -25,6 +25,9 @@ pub(crate) struct UserProfileResponse {
#[serde(deserialize_with = "deserialize_null_or_default")]
pub latest_workspace_id: String,
#[serde(deserialize_with = "deserialize_null_or_default")]
pub encryption_sign: String,
}
#[derive(Debug, Deserialize)]
@ -64,6 +67,8 @@ pub struct RealtimeCollabUpdate {
pub did: String,
#[serde(deserialize_with = "deserialize_value")]
pub value: Vec<u8>,
#[serde(default)]
pub encrypt: i32,
}
pub fn deserialize_value<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>

View File

@ -1,5 +1,5 @@
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::{Arc, Weak};
use collab_plugins::cloud_storage::{CollabObject, RemoteCollabStorage, RemoteUpdateSender};
use parking_lot::{Mutex, RwLock};
@ -17,7 +17,7 @@ use crate::supabase::api::{
SupabaseUserServiceImpl,
};
use crate::supabase::entities::RealtimeCollabUpdateEvent;
use crate::AppFlowyServer;
use crate::{AppFlowyEncryption, AppFlowyServer};
/// https://www.pgbouncer.org/features.html
/// Only support session mode.
@ -60,13 +60,21 @@ pub struct SupabaseServer {
device_id: Mutex<String>,
update_tx: RwLock<HashMap<String, RemoteUpdateSender>>,
restful_postgres: Arc<RwLock<Option<Arc<RESTfulPostgresServer>>>>,
encryption: Weak<dyn AppFlowyEncryption>,
}
impl SupabaseServer {
pub fn new(config: SupabaseConfiguration) -> Self {
pub fn new(
config: SupabaseConfiguration,
enable_sync: bool,
encryption: Weak<dyn AppFlowyEncryption>,
) -> Self {
let update_tx = RwLock::new(HashMap::new());
let restful_postgres = if config.enable_sync {
Some(Arc::new(RESTfulPostgresServer::new(config.clone())))
let restful_postgres = if enable_sync {
Some(Arc::new(RESTfulPostgresServer::new(
config.clone(),
encryption.clone(),
)))
} else {
None
};
@ -75,6 +83,7 @@ impl SupabaseServer {
device_id: Default::default(),
update_tx,
restful_postgres: Arc::new(RwLock::new(restful_postgres)),
encryption,
}
}
@ -83,8 +92,8 @@ impl SupabaseServer {
if self.restful_postgres.read().is_some() {
return;
}
*self.restful_postgres.write() =
Some(Arc::new(RESTfulPostgresServer::new(self.config.clone())));
let postgres = RESTfulPostgresServer::new(self.config.clone(), self.encryption.clone());
*self.restful_postgres.write() = Some(Arc::new(postgres));
} else {
*self.restful_postgres.write() = None;
}
@ -92,7 +101,7 @@ impl SupabaseServer {
}
impl AppFlowyServer for SupabaseServer {
fn enable_sync(&self, enable: bool) {
fn set_enable_sync(&self, enable: bool) {
tracing::info!("supabase sync: {}", enable);
self.set_enable_sync(enable);
}
@ -134,6 +143,7 @@ impl AppFlowyServer for SupabaseServer {
Some(Arc::new(SupabaseCollabStorageImpl::new(
SupabaseServerServiceImpl(self.restful_postgres.clone()),
Some(rx),
self.encryption.clone(),
)))
}

View File

@ -5,18 +5,19 @@ use flowy_user_deps::entities::SignUpResponse;
use lib_infra::box_any::BoxAny;
use crate::supabase_test::util::{
collab_service, database_service, get_supabase_config, sign_up_param, user_auth_service,
collab_service, database_service, get_supabase_ci_config, third_party_sign_up_param,
user_auth_service,
};
#[tokio::test]
async fn supabase_create_workspace_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
let user_service = user_auth_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
let collab_service = collab_service();

View File

@ -1,8 +1,6 @@
use assert_json_diff::assert_json_eq;
use collab_plugins::cloud_storage::{CollabObject, CollabType};
use futures::future::join_all;
use serde_json::json;
use tokio::task;
use uuid::Uuid;
use yrs::types::ToJson;
use yrs::updates::decoder::Decode;
@ -12,12 +10,13 @@ use flowy_user_deps::entities::SignUpResponse;
use lib_infra::box_any::BoxAny;
use crate::supabase_test::util::{
collab_service, folder_service, get_supabase_config, sign_up_param, user_auth_service,
collab_service, folder_service, get_supabase_ci_config, third_party_sign_up_param,
user_auth_service,
};
#[tokio::test]
async fn supabase_create_workspace_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
@ -29,7 +28,7 @@ async fn supabase_create_workspace_test() {
#[tokio::test]
async fn supabase_get_folder_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
@ -37,7 +36,7 @@ async fn supabase_get_folder_test() {
let user_service = user_auth_service();
let collab_service = collab_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
let collab_object = CollabObject {
@ -75,26 +74,17 @@ async fn supabase_get_folder_test() {
.unwrap();
assert_eq!(updates.len(), 2);
// The init sync will try to merge the updates into one. Spawn 5 tasks to simulate
// multiple clients trying to init sync at the same time.
let mut handles = Vec::new();
for _ in 0..5 {
let cloned_collab_service = collab_service.clone();
let cloned_collab_object = collab_object.clone();
let handle = task::spawn(async move {
cloned_collab_service
.send_init_sync(&cloned_collab_object, 3, vec![])
.await
.unwrap();
});
handles.push(handle);
collab_service
.send_init_sync(&collab_object, 3, vec![])
.await
.unwrap();
}
let _results: Vec<_> = join_all(handles).await;
// after the init sync, the updates should be merged into one.
let updates: Vec<Vec<u8>> = folder_service
.get_folder_updates(&user.latest_workspace.id, user.user_id)
.await
.unwrap();
assert_eq!(updates.len(), 1);
// Other the init sync, try to get the updates from the server.
let remote_update = updates.first().unwrap().clone();
@ -112,7 +102,7 @@ async fn supabase_get_folder_test() {
/// Finally, it asserts that the duplicated updates don't affect the overall data consistency in Supabase.
#[tokio::test]
async fn supabase_duplicate_updates_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
@ -120,7 +110,7 @@ async fn supabase_duplicate_updates_test() {
let user_service = user_auth_service();
let collab_service = collab_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
let collab_object = CollabObject {
@ -206,9 +196,20 @@ async fn supabase_duplicate_updates_test() {
}
}
/// The state vector of doc;
/// ```json
/// "map": {},
/// "array": []
/// ```
/// The old version of doc:
/// ```json
/// "map": {}
/// ```
///
/// Try to apply the updates from doc to old version doc and check the result.
#[tokio::test]
async fn supabase_diff_state_vec_test() {
if get_supabase_config().is_none() {
async fn supabase_diff_state_vector_test() {
if get_supabase_ci_config().is_none() {
return;
}
@ -216,7 +217,7 @@ async fn supabase_diff_state_vec_test() {
let user_service = user_auth_service();
let collab_service = collab_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
let collab_object = CollabObject {
@ -278,3 +279,22 @@ async fn supabase_diff_state_vec_test() {
})
);
}
// #[tokio::test]
// async fn print_folder_object_test() {
// if get_supabase_dev_config().is_none() {
// return;
// }
// let secret = Some("43bSxEPHeNkk5ZxxEYOfAjjd7sK2DJ$vVnxwuNc5ru0iKFvhs8wLg==".to_string());
// print_encryption_folder("f8b14b84-e8ec-4cf4-a318-c1e008ecfdfa", secret).await;
// }
//
// #[tokio::test]
// async fn print_folder_snapshot_object_test() {
// if get_supabase_dev_config().is_none() {
// return;
// }
// let secret = Some("NTXRXrDSybqFEm32jwMBDzbxvCtgjU$8np3TGywbBdJAzHtu1QIyQ==".to_string());
// // let secret = None;
// print_encryption_folder_snapshot("12533251-bdd4-41f4-995f-ff12fceeaa42", secret).await;
// }

View File

@ -1,19 +1,22 @@
use uuid::Uuid;
use flowy_encrypt::{encrypt_string, generate_encrypt_secret};
use flowy_user_deps::entities::*;
use lib_infra::box_any::BoxAny;
use crate::supabase_test::util::{get_supabase_config, sign_up_param, user_auth_service};
use crate::supabase_test::util::{
get_supabase_ci_config, third_party_sign_up_param, user_auth_service,
};
// ‼️‼️‼️ Warning: this test will create a table in the database
#[tokio::test]
async fn supabase_user_sign_up_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
let user_service = user_auth_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
assert!(!user.latest_workspace.id.is_empty());
assert!(!user.user_workspaces.is_empty());
@ -22,12 +25,12 @@ async fn supabase_user_sign_up_test() {
#[tokio::test]
async fn supabase_user_sign_up_with_existing_uuid_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
let user_service = user_auth_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let _user: SignUpResponse = user_service
.sign_up(BoxAny::new(params.clone()))
.await
@ -40,12 +43,12 @@ async fn supabase_user_sign_up_with_existing_uuid_test() {
#[tokio::test]
async fn supabase_update_user_profile_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
let user_service = user_auth_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service
.sign_up(BoxAny::new(params.clone()))
.await
@ -55,13 +58,13 @@ async fn supabase_update_user_profile_test() {
.update_user(
UserCredentials::from_uid(user.user_id),
UpdateUserProfileParams {
id: user.user_id,
auth_type: Default::default(),
uid: user.user_id,
name: Some("123".to_string()),
email: Some(format!("{}@test.com", Uuid::new_v4())),
password: None,
icon_url: None,
openai_key: None,
encryption_sign: None,
},
)
.await
@ -78,12 +81,12 @@ async fn supabase_update_user_profile_test() {
#[tokio::test]
async fn supabase_get_user_profile_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
let user_service = user_auth_service();
let uuid = Uuid::new_v4().to_string();
let params = sign_up_param(uuid);
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service
.sign_up(BoxAny::new(params.clone()))
.await
@ -99,7 +102,7 @@ async fn supabase_get_user_profile_test() {
#[tokio::test]
async fn supabase_get_not_exist_user_profile_test() {
if get_supabase_config().is_none() {
if get_supabase_ci_config().is_none() {
return;
}
@ -111,3 +114,37 @@ async fn supabase_get_not_exist_user_profile_test() {
// user not found
assert!(result.is_none());
}
#[tokio::test]
async fn user_encryption_sign_test() {
if get_supabase_ci_config().is_none() {
return;
}
let user_service = user_auth_service();
let uuid = Uuid::new_v4().to_string();
let params = third_party_sign_up_param(uuid);
let user: SignUpResponse = user_service.sign_up(BoxAny::new(params)).await.unwrap();
// generate encryption sign
let secret = generate_encrypt_secret();
let sign = encrypt_string(user.user_id.to_string(), &secret).unwrap();
user_service
.update_user(
UserCredentials::from_uid(user.user_id),
UpdateUserProfileParams::new(user.user_id)
.with_encryption_type(EncryptionType::SelfEncryption(sign.clone())),
)
.await
.unwrap();
let user_profile: UserProfile = user_service
.get_user_profile(UserCredentials::from_uid(user.user_id))
.await
.unwrap()
.unwrap();
assert_eq!(
user_profile.encryption_type,
EncryptionType::SelfEncryption(sign)
);
}

View File

@ -1,66 +1,123 @@
use std::collections::HashMap;
use std::sync::Arc;
use collab::core::collab::MutexCollab;
use collab::core::origin::CollabOrigin;
use collab_plugins::cloud_storage::RemoteCollabStorage;
use uuid::Uuid;
use flowy_database_deps::cloud::DatabaseCloudService;
use flowy_folder_deps::cloud::FolderCloudService;
use flowy_folder_deps::cloud::{Folder, FolderCloudService};
use flowy_server::supabase::api::{
RESTfulPostgresServer, SupabaseCollabStorageImpl, SupabaseDatabaseServiceImpl,
SupabaseFolderServiceImpl, SupabaseServerServiceImpl, SupabaseUserServiceImpl,
};
use flowy_server::supabase::define::{USER_EMAIL, USER_UUID};
use flowy_server::supabase::define::{USER_DEVICE_ID, USER_EMAIL, USER_UUID};
use flowy_server::{AppFlowyEncryption, EncryptionImpl};
use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_user_deps::cloud::UserService;
use crate::setup_log;
pub fn get_supabase_config() -> Option<SupabaseConfiguration> {
dotenv::from_filename("./.env.test").ok()?;
pub fn get_supabase_ci_config() -> Option<SupabaseConfiguration> {
dotenv::from_filename("./.env.ci").ok()?;
setup_log();
SupabaseConfiguration::from_env().ok()
}
#[allow(dead_code)]
pub fn get_supabase_dev_config() -> Option<SupabaseConfiguration> {
dotenv::from_filename("./.env.dev").ok()?;
setup_log();
SupabaseConfiguration::from_env().ok()
}
pub fn collab_service() -> Arc<dyn RemoteCollabStorage> {
let config = SupabaseConfiguration::from_env().unwrap();
let server = Arc::new(RESTfulPostgresServer::new(config));
let (server, encryption_impl) = appflowy_server(None);
Arc::new(SupabaseCollabStorageImpl::new(
SupabaseServerServiceImpl::new(server),
server,
None,
Arc::downgrade(&encryption_impl),
))
}
pub fn database_service() -> Arc<dyn DatabaseCloudService> {
let config = SupabaseConfiguration::from_env().unwrap();
let server = Arc::new(RESTfulPostgresServer::new(config));
Arc::new(SupabaseDatabaseServiceImpl::new(
SupabaseServerServiceImpl::new(server),
))
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseDatabaseServiceImpl::new(server))
}
pub fn user_auth_service() -> Arc<dyn UserService> {
let config = SupabaseConfiguration::from_env().unwrap();
let server = Arc::new(RESTfulPostgresServer::new(config));
Arc::new(SupabaseUserServiceImpl::new(
SupabaseServerServiceImpl::new(server),
))
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseUserServiceImpl::new(server))
}
pub fn folder_service() -> Arc<dyn FolderCloudService> {
let config = SupabaseConfiguration::from_env().unwrap();
let server = Arc::new(RESTfulPostgresServer::new(config));
Arc::new(SupabaseFolderServiceImpl::new(
SupabaseServerServiceImpl::new(server),
))
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseFolderServiceImpl::new(server))
}
pub fn sign_up_param(uuid: String) -> HashMap<String, String> {
#[allow(dead_code)]
pub fn encryption_folder_service(
secret: Option<String>,
) -> (Arc<dyn FolderCloudService>, Arc<dyn AppFlowyEncryption>) {
let (server, encryption_impl) = appflowy_server(secret);
let service = Arc::new(SupabaseFolderServiceImpl::new(server));
(service, encryption_impl)
}
pub fn encryption_collab_service(
secret: Option<String>,
) -> (Arc<dyn RemoteCollabStorage>, Arc<dyn AppFlowyEncryption>) {
let (server, encryption_impl) = appflowy_server(secret);
let service = Arc::new(SupabaseCollabStorageImpl::new(
server,
None,
Arc::downgrade(&encryption_impl),
));
(service, encryption_impl)
}
pub async fn print_encryption_folder(folder_id: &str, encryption_secret: Option<String>) {
let (cloud_service, _encryption) = encryption_folder_service(encryption_secret);
let folder_data = cloud_service.get_folder_data(folder_id).await.unwrap();
let json = serde_json::to_value(folder_data).unwrap();
println!("{}", serde_json::to_string_pretty(&json).unwrap());
}
pub async fn print_encryption_folder_snapshot(folder_id: &str, encryption_secret: Option<String>) {
let (cloud_service, _encryption) = encryption_collab_service(encryption_secret);
let snapshot = cloud_service
.get_snapshots(folder_id, 1)
.await
.pop()
.unwrap();
let collab = Arc::new(
MutexCollab::new_with_raw_data(CollabOrigin::Empty, folder_id, vec![snapshot.blob], vec![])
.unwrap(),
);
let folder_data = Folder::open(collab, None).get_folder_data().unwrap();
let json = serde_json::to_value(folder_data).unwrap();
println!("{}", serde_json::to_string_pretty(&json).unwrap());
}
pub fn appflowy_server(
encryption_secret: Option<String>,
) -> (SupabaseServerServiceImpl, Arc<dyn AppFlowyEncryption>) {
let config = SupabaseConfiguration::from_env().unwrap();
let encryption_impl: Arc<dyn AppFlowyEncryption> =
Arc::new(EncryptionImpl::new(encryption_secret));
let encryption = Arc::downgrade(&encryption_impl);
let server = Arc::new(RESTfulPostgresServer::new(config, encryption));
(SupabaseServerServiceImpl::new(server), encryption_impl)
}
pub fn third_party_sign_up_param(uuid: String) -> HashMap<String, String> {
let mut params = HashMap::new();
params.insert(USER_UUID.to_string(), uuid);
params.insert(
USER_EMAIL.to_string(),
format!("{}@test.com", Uuid::new_v4()),
);
params.insert(USER_DEVICE_ID.to_string(), Uuid::new_v4().to_string());
params
}

View File

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
ALTER TABLE user_table
DROP COLUMN encrypt;

View File

@ -0,0 +1,3 @@
-- Your SQL goes here
ALTER TABLE user_table
ADD COLUMN encryption_type TEXT NOT NULL DEFAULT "";

View File

@ -30,6 +30,7 @@ diesel::table! {
token -> Text,
email -> Text,
auth_type -> Integer,
encryption_type -> Text,
}
}

View File

@ -11,8 +11,12 @@ flowy-user = { path = "../flowy-user"}
flowy-user-deps = { path = "../flowy-user-deps"}
flowy-net = { path = "../flowy-net"}
flowy-folder2 = { path = "../flowy-folder2", features = ["test_helper"] }
flowy-folder-deps = { path = "../flowy-folder-deps" }
flowy-database2 = { path = "../flowy-database2" }
flowy-database-deps = { path = "../flowy-database-deps" }
flowy-document2 = { path = "../flowy-document2" }
flowy-document-deps = { path = "../flowy-document-deps" }
flowy-encrypt = { path = "../flowy-encrypt" }
lib-dispatch = { path = "../lib-dispatch" }
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-server = { path = "../flowy-server" }

View File

@ -9,6 +9,7 @@ use nanoid::nanoid;
use parking_lot::RwLock;
use protobuf::ProtobufError;
use tokio::sync::broadcast::{channel, Sender};
use uuid::Uuid;
use flowy_core::{AppFlowyCore, AppFlowyCoreConfig};
use flowy_database2::entities::*;
@ -20,8 +21,10 @@ use flowy_folder2::entities::*;
use flowy_folder2::event_map::FolderEvent;
use flowy_notification::entities::SubscribeObject;
use flowy_notification::{register_notification_sender, NotificationSender};
use flowy_server::supabase::define::{USER_EMAIL, USER_UUID};
use flowy_user::entities::{AuthTypePB, ThirdPartyAuthPB, UserProfilePB};
use flowy_server::supabase::define::{USER_DEVICE_ID, USER_EMAIL, USER_UUID};
use flowy_user::entities::{
AuthTypePB, ThirdPartyAuthPB, UpdateCloudConfigPB, UserCloudConfigPB, UserProfilePB,
};
use flowy_user::errors::{FlowyError, FlowyResult};
use flowy_user::event_map::UserEvent::*;
@ -77,6 +80,34 @@ impl FlowyCoreTest {
}
}
pub async fn enable_encryption(&self) -> String {
let config = EventBuilder::new(self.clone())
.event(GetCloudConfig)
.async_send()
.await
.parse::<UserCloudConfigPB>();
let update = UpdateCloudConfigPB {
enable_sync: None,
enable_encrypt: Some(true),
};
let error = EventBuilder::new(self.clone())
.event(SetCloudConfig)
.payload(update)
.async_send()
.await
.error();
assert!(error.is_none());
config.encrypt_secret
}
pub async fn get_user_profile(&self) -> Result<UserProfilePB, FlowyError> {
EventBuilder::new(self.clone())
.event(GetUserProfile)
.async_send()
.await
.try_parse::<UserProfilePB>()
}
pub async fn new_with_guest_user() -> Self {
let test = Self::default();
test.sign_up_as_guest().await;
@ -87,9 +118,8 @@ impl FlowyCoreTest {
async_sign_up(self.inner.dispatcher(), AuthTypePB::Local).await
}
pub async fn supabase_party_sign_up(&self, uuid: &str) -> UserProfilePB {
let mut map = HashMap::new();
map.insert("uuid".to_string(), uuid.to_string());
pub async fn supabase_party_sign_up(&self) -> UserProfilePB {
let map = third_party_sign_up_param(Uuid::new_v4().to_string());
let payload = ThirdPartyAuthPB {
map,
auth_type: AuthTypePB::Supabase,
@ -125,6 +155,7 @@ impl FlowyCoreTest {
) -> FlowyResult<UserProfilePB> {
let mut map = HashMap::new();
map.insert(USER_UUID.to_string(), uuid.to_string());
map.insert(USER_DEVICE_ID.to_string(), uuid.to_string());
map.insert(
USER_EMAIL.to_string(),
email.unwrap_or_else(|| format!("{}@appflowy.io", nanoid!(10))),
@ -837,3 +868,14 @@ impl Drop for Cleaner {
Self::cleanup(&self.0)
}
}
pub fn third_party_sign_up_param(uuid: String) -> HashMap<String, String> {
let mut params = HashMap::new();
params.insert(USER_UUID.to_string(), uuid);
params.insert(
USER_EMAIL.to_string(),
format!("{}@test.com", Uuid::new_v4()),
);
params.insert(USER_DEVICE_ID.to_string(), Uuid::new_v4().to_string());
params
}

View File

@ -5,6 +5,7 @@ use collab::core::collab::MutexCollab;
use collab::core::origin::CollabOrigin;
use collab::preclude::updates::decoder::Decode;
use collab::preclude::{merge_updates_v1, JsonValue, Update};
use collab_folder::core::FolderData;
use flowy_folder2::entities::{FolderSnapshotPB, RepeatedFolderSnapshotPB, WorkspaceIdPB};
use flowy_folder2::event_map::FolderEvent::GetFolderSnapshots;
@ -29,6 +30,11 @@ impl FlowySupabaseFolderTest {
folder.as_ref().unwrap().to_json_value()
}
pub async fn get_local_folder_data(&self) -> FolderData {
let folder = self.folder_manager.get_mutex_folder().lock();
folder.as_ref().unwrap().get_folder_data().unwrap()
}
pub async fn get_folder_snapshots(&self, workspace_id: &str) -> Vec<FolderSnapshotPB> {
EventBuilder::new(self.inner.deref().clone())
.event(GetFolderSnapshots)

View File

@ -1,20 +1,84 @@
use std::time::Duration;
use assert_json_diff::assert_json_eq;
use serde_json::json;
use flowy_folder2::entities::{FolderSnapshotStatePB, FolderSyncStatePB};
use flowy_folder2::notification::FolderNotification::DidUpdateFolderSnapshotState;
use crate::folder::supabase_test::helper::{assert_folder_collab_content, FlowySupabaseFolderTest};
use crate::util::receive_with_timeout;
use crate::util::{get_folder_data_from_server, receive_with_timeout};
#[tokio::test]
async fn supabase_initial_folder_snapshot_test() {
async fn supabase_encrypt_folder_test() {
if let Some(test) = FlowySupabaseFolderTest::new().await {
let secret = test.enable_encryption().await;
let local_folder_data = test.get_local_folder_data().await;
let workspace_id = test.get_current_workspace().await.workspace.id;
let remote_folder_data = get_folder_data_from_server(&workspace_id, Some(secret))
.await
.unwrap()
.unwrap();
assert_json_eq!(json!(local_folder_data), json!(remote_folder_data));
}
}
#[tokio::test]
async fn supabase_decrypt_folder_data_test() {
if let Some(test) = FlowySupabaseFolderTest::new().await {
let secret = Some(test.enable_encryption().await);
let workspace_id = test.get_current_workspace().await.workspace.id;
test
.create_view(&workspace_id, "encrypt view".to_string())
.await;
let mut rx = test
.notification_sender
.subscribe_with_condition::<FolderSyncStatePB, _>(&workspace_id, |pb| pb.is_finish);
receive_with_timeout(&mut rx, Duration::from_secs(10))
.await
.unwrap();
let folder_data = get_folder_data_from_server(&workspace_id, secret)
.await
.unwrap()
.unwrap();
assert_eq!(folder_data.views.len(), 2);
assert_eq!(folder_data.views[1].name, "encrypt view");
}
}
#[tokio::test]
#[should_panic]
async fn supabase_decrypt_with_invalid_secret_folder_data_test() {
if let Some(test) = FlowySupabaseFolderTest::new().await {
let _ = Some(test.enable_encryption().await);
let workspace_id = test.get_current_workspace().await.workspace.id;
test
.create_view(&workspace_id, "encrypt view".to_string())
.await;
let mut rx = test
.notification_sender
.subscribe_with_condition::<FolderSyncStatePB, _>(&workspace_id, |pb| pb.is_finish);
receive_with_timeout(&mut rx, Duration::from_secs(10))
.await
.unwrap();
let _ = get_folder_data_from_server(&workspace_id, Some("invalid secret".to_string()))
.await
.unwrap();
}
}
#[tokio::test]
async fn supabase_folder_snapshot_test() {
if let Some(test) = FlowySupabaseFolderTest::new().await {
let workspace_id = test.get_current_workspace().await.workspace.id;
let mut rx = test
.notification_sender
.subscribe::<FolderSnapshotStatePB>(&workspace_id, DidUpdateFolderSnapshotState);
receive_with_timeout(&mut rx, Duration::from_secs(30))
receive_with_timeout(&mut rx, Duration::from_secs(10))
.await
.unwrap();
@ -44,7 +108,7 @@ async fn supabase_initial_folder_snapshot_test2() {
.notification_sender
.subscribe_with_condition::<FolderSyncStatePB, _>(&workspace_id, |pb| pb.is_finish);
receive_with_timeout(&mut rx, Duration::from_secs(30))
receive_with_timeout(&mut rx, Duration::from_secs(10))
.await
.unwrap();

View File

@ -2,6 +2,7 @@ use std::collections::HashMap;
use nanoid::nanoid;
use flowy_encrypt::decrypt_string;
use flowy_server::supabase::define::{USER_EMAIL, USER_UUID};
use flowy_test::event_builder::EventBuilder;
use flowy_test::FlowyCoreTest;
@ -38,6 +39,23 @@ async fn third_party_sign_up_test() {
}
}
#[tokio::test]
async fn third_party_sign_up_with_encrypt_test() {
if get_supabase_config().is_some() {
let test = FlowyCoreTest::new();
test.supabase_party_sign_up().await;
let user_profile = test.get_user_profile().await.unwrap();
assert!(user_profile.encryption_sign.is_empty());
let secret = test.enable_encryption().await;
let user_profile = test.get_user_profile().await.unwrap();
assert!(!user_profile.encryption_sign.is_empty());
let decryption_sign = decrypt_string(user_profile.encryption_sign, &secret).unwrap();
assert_eq!(decryption_sign, user_profile.id.to_string());
}
}
#[tokio::test]
async fn third_party_sign_up_with_duplicated_uuid() {
if get_supabase_config().is_some() {

View File

@ -1,22 +1,29 @@
use std::ops::Deref;
use std::sync::Arc;
use std::time::Duration;
use anyhow::Error;
use collab_folder::core::FolderData;
use collab_plugins::cloud_storage::RemoteCollabStorage;
use tokio::sync::mpsc::Receiver;
use tokio::time::timeout;
use flowy_database_deps::cloud::DatabaseCloudService;
use flowy_folder_deps::cloud::{FolderCloudService, FolderSnapshot};
use flowy_server::supabase::api::*;
use flowy_server::{AppFlowyEncryption, EncryptionImpl};
use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_test::event_builder::EventBuilder;
use flowy_test::FlowyCoreTest;
use flowy_user::entities::{
AuthTypePB, UpdateUserProfilePayloadPB, UserCredentialsPB, UserProfilePB,
};
use flowy_user::entities::{AuthTypePB, UpdateUserProfilePayloadPB, UserCredentialsPB};
use flowy_user::errors::FlowyError;
use flowy_user::event_map::UserCloudServiceProvider;
use flowy_user::event_map::UserEvent::*;
use flowy_user_deps::cloud::UserService;
use flowy_user_deps::entities::AuthType;
pub fn get_supabase_config() -> Option<SupabaseConfiguration> {
dotenv::from_path(".env.test").ok()?;
dotenv::from_path(".env.ci").ok()?;
SupabaseConfiguration::from_env().ok()
}
@ -47,14 +54,6 @@ impl FlowySupabaseTest {
}
}
pub async fn get_user_profile(&self) -> Result<UserProfilePB, FlowyError> {
EventBuilder::new(self.inner.clone())
.event(GetUserProfile)
.async_send()
.await
.try_parse::<UserProfilePB>()
}
pub async fn update_user_profile(
&self,
payload: UpdateUserProfilePayloadPB,
@ -85,3 +84,89 @@ pub async fn receive_with_timeout<T>(
.ok_or(anyhow::anyhow!("recv timeout"))?;
Ok(res)
}
pub fn get_supabase_ci_config() -> Option<SupabaseConfiguration> {
dotenv::from_filename("./.env.ci").ok()?;
SupabaseConfiguration::from_env().ok()
}
#[allow(dead_code)]
pub fn get_supabase_dev_config() -> Option<SupabaseConfiguration> {
dotenv::from_filename("./.env.dev").ok()?;
SupabaseConfiguration::from_env().ok()
}
pub fn collab_service() -> Arc<dyn RemoteCollabStorage> {
let (server, encryption_impl) = appflowy_server(None);
Arc::new(SupabaseCollabStorageImpl::new(
server,
None,
Arc::downgrade(&encryption_impl),
))
}
pub fn database_service() -> Arc<dyn DatabaseCloudService> {
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseDatabaseServiceImpl::new(server))
}
pub fn user_auth_service() -> Arc<dyn UserService> {
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseUserServiceImpl::new(server))
}
pub fn folder_service() -> Arc<dyn FolderCloudService> {
let (server, _encryption_impl) = appflowy_server(None);
Arc::new(SupabaseFolderServiceImpl::new(server))
}
#[allow(dead_code)]
pub fn encryption_folder_service(
secret: Option<String>,
) -> (Arc<dyn FolderCloudService>, Arc<dyn AppFlowyEncryption>) {
let (server, encryption_impl) = appflowy_server(secret);
let service = Arc::new(SupabaseFolderServiceImpl::new(server));
(service, encryption_impl)
}
pub fn encryption_collab_service(
secret: Option<String>,
) -> (Arc<dyn RemoteCollabStorage>, Arc<dyn AppFlowyEncryption>) {
let (server, encryption_impl) = appflowy_server(secret);
let service = Arc::new(SupabaseCollabStorageImpl::new(
server,
None,
Arc::downgrade(&encryption_impl),
));
(service, encryption_impl)
}
pub async fn get_folder_data_from_server(
folder_id: &str,
encryption_secret: Option<String>,
) -> Result<Option<FolderData>, Error> {
let (cloud_service, _encryption) = encryption_folder_service(encryption_secret);
cloud_service.get_folder_data(folder_id).await
}
pub async fn get_folder_snapshots(
folder_id: &str,
encryption_secret: Option<String>,
) -> Vec<FolderSnapshot> {
let (cloud_service, _encryption) = encryption_folder_service(encryption_secret);
cloud_service
.get_folder_snapshots(folder_id, 10)
.await
.unwrap()
}
pub fn appflowy_server(
encryption_secret: Option<String>,
) -> (SupabaseServerServiceImpl, Arc<dyn AppFlowyEncryption>) {
let config = SupabaseConfiguration::from_env().unwrap();
let encryption_impl: Arc<dyn AppFlowyEncryption> =
Arc::new(EncryptionImpl::new(encryption_secret));
let encryption = Arc::downgrade(&encryption_impl);
let server = Arc::new(RESTfulPostgresServer::new(config, encryption));
(SupabaseServerServiceImpl::new(server), encryption_impl)
}

View File

@ -2,6 +2,7 @@ use std::collections::HashMap;
use std::str::FromStr;
use anyhow::Error;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use flowy_error::{ErrorCode, FlowyError};
@ -13,6 +14,24 @@ use crate::entities::{
UserProfile, UserWorkspace,
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserCloudConfig {
pub enable_sync: bool,
pub enable_encrypt: bool,
// The secret used to encrypt the user's data
pub encrypt_secret: String,
}
impl UserCloudConfig {
pub fn new(encrypt_secret: String) -> Self {
Self {
enable_sync: true,
enable_encrypt: false,
encrypt_secret,
}
}
}
/// Provide the generic interface for the user cloud service
/// The user cloud service is responsible for the user authentication and user profile management
pub trait UserService: Send + Sync {

View File

@ -1,8 +1,21 @@
use std::str::FromStr;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_repr::*;
use uuid::Uuid;
pub trait UserAuthResponse {
fn user_id(&self) -> i64;
fn user_name(&self) -> &str;
fn latest_workspace(&self) -> &UserWorkspace;
fn user_workspaces(&self) -> &[UserWorkspace];
fn device_id(&self) -> &str;
fn user_token(&self) -> Option<String>;
fn user_email(&self) -> Option<String>;
fn encryption_type(&self) -> EncryptionType;
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct SignInResponse {
pub user_id: i64,
@ -12,6 +25,41 @@ pub struct SignInResponse {
pub email: Option<String>,
pub token: Option<String>,
pub device_id: String,
pub encryption_type: EncryptionType,
}
impl UserAuthResponse for SignInResponse {
fn user_id(&self) -> i64 {
self.user_id
}
fn user_name(&self) -> &str {
&self.name
}
fn latest_workspace(&self) -> &UserWorkspace {
&self.latest_workspace
}
fn user_workspaces(&self) -> &[UserWorkspace] {
&self.user_workspaces
}
fn device_id(&self) -> &str {
&self.device_id
}
fn user_token(&self) -> Option<String> {
self.token.clone()
}
fn user_email(&self) -> Option<String> {
self.email.clone()
}
fn encryption_type(&self) -> EncryptionType {
self.encryption_type.clone()
}
}
#[derive(Default, Serialize, Deserialize, Debug)]
@ -38,10 +86,45 @@ pub struct SignUpResponse {
pub name: String,
pub latest_workspace: UserWorkspace,
pub user_workspaces: Vec<UserWorkspace>,
pub is_new: bool,
pub is_new_user: bool,
pub email: Option<String>,
pub token: Option<String>,
pub device_id: String,
pub encryption_type: EncryptionType,
}
impl UserAuthResponse for SignUpResponse {
fn user_id(&self) -> i64 {
self.user_id
}
fn user_name(&self) -> &str {
&self.name
}
fn latest_workspace(&self) -> &UserWorkspace {
&self.latest_workspace
}
fn user_workspaces(&self) -> &[UserWorkspace] {
&self.user_workspaces
}
fn device_id(&self) -> &str {
&self.device_id
}
fn user_token(&self) -> Option<String> {
self.token.clone()
}
fn user_email(&self) -> Option<String> {
self.email.clone()
}
fn encryption_type(&self) -> EncryptionType {
self.encryption_type.clone()
}
}
#[derive(Clone, Debug)]
@ -83,6 +166,7 @@ pub struct UserWorkspace {
pub id: String,
pub name: String,
pub created_at: DateTime<Utc>,
/// The database storage id is used indexing all the database in current workspace.
pub database_storage_id: String,
}
@ -99,7 +183,8 @@ impl UserWorkspace {
#[derive(Serialize, Deserialize, Default, Debug, Clone)]
pub struct UserProfile {
pub id: i64,
#[serde(rename = "id")]
pub uid: i64,
pub email: String,
pub name: String,
pub token: String,
@ -107,51 +192,124 @@ pub struct UserProfile {
pub openai_key: String,
pub workspace_id: String,
pub auth_type: AuthType,
// If the encryption_sign is not empty, which means the user has enabled the encryption.
pub encryption_type: EncryptionType,
}
#[derive(Serialize, Deserialize, Debug, Clone, Default, Eq, PartialEq)]
pub enum EncryptionType {
#[default]
NoEncryption,
SelfEncryption(String),
}
impl EncryptionType {
pub fn from_sign(sign: &str) -> Self {
if sign.is_empty() {
EncryptionType::NoEncryption
} else {
EncryptionType::SelfEncryption(sign.to_owned())
}
}
}
impl EncryptionType {
pub fn is_need_encrypt_secret(&self) -> bool {
match self {
EncryptionType::NoEncryption => false,
EncryptionType::SelfEncryption(sign) => !sign.is_empty(),
}
}
}
impl FromStr for EncryptionType {
type Err = serde_json::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
serde_json::from_str(s)
}
}
impl<T> From<(&T, &AuthType)> for UserProfile
where
T: UserAuthResponse,
{
fn from(params: (&T, &AuthType)) -> Self {
let (value, auth_type) = params;
Self {
uid: value.user_id(),
email: value.user_email().unwrap_or_default(),
name: value.user_name().to_owned(),
token: value.user_token().unwrap_or_default(),
icon_url: "".to_owned(),
openai_key: "".to_owned(),
workspace_id: value.latest_workspace().id.to_owned(),
auth_type: auth_type.clone(),
encryption_type: value.encryption_type(),
}
}
}
#[derive(Serialize, Deserialize, Default, Clone, Debug)]
pub struct UpdateUserProfileParams {
pub id: i64,
pub auth_type: AuthType,
pub uid: i64,
pub name: Option<String>,
pub email: Option<String>,
pub password: Option<String>,
pub icon_url: Option<String>,
pub openai_key: Option<String>,
pub encryption_sign: Option<String>,
}
impl UpdateUserProfileParams {
pub fn name(mut self, name: &str) -> Self {
pub fn new(uid: i64) -> Self {
Self {
uid,
..Default::default()
}
}
pub fn with_name(mut self, name: &str) -> Self {
self.name = Some(name.to_owned());
self
}
pub fn email(mut self, email: &str) -> Self {
pub fn with_email(mut self, email: &str) -> Self {
self.email = Some(email.to_owned());
self
}
pub fn password(mut self, password: &str) -> Self {
pub fn with_password(mut self, password: &str) -> Self {
self.password = Some(password.to_owned());
self
}
pub fn icon_url(mut self, icon_url: &str) -> Self {
pub fn with_icon_url(mut self, icon_url: &str) -> Self {
self.icon_url = Some(icon_url.to_owned());
self
}
pub fn openai_key(mut self, openai_key: &str) -> Self {
pub fn with_openai_key(mut self, openai_key: &str) -> Self {
self.openai_key = Some(openai_key.to_owned());
self
}
pub fn with_encryption_type(mut self, encryption_type: EncryptionType) -> Self {
let sign = match encryption_type {
EncryptionType::NoEncryption => "".to_string(),
EncryptionType::SelfEncryption(sign) => sign,
};
self.encryption_sign = Some(sign);
self
}
pub fn is_empty(&self) -> bool {
self.name.is_none()
&& self.email.is_none()
&& self.password.is_none()
&& self.icon_url.is_none()
&& self.openai_key.is_none()
&& self.encryption_sign.is_none()
}
}

View File

@ -8,6 +8,7 @@ edition = "2018"
[dependencies]
flowy-derive = { path = "../../../shared-lib/flowy-derive" }
flowy-sqlite = { path = "../flowy-sqlite", optional = true }
flowy-encrypt = { path = "../flowy-encrypt" }
flowy-error = { path = "../flowy-error", features = ["impl_from_sqlite", "impl_from_dispatch_error"] }
lib-infra = { path = "../../../shared-lib/lib-infra" }
flowy-notification = { path = "../flowy-notification" }

View File

@ -1,6 +1,6 @@
use std::convert::TryInto;
use flowy_derive::ProtoBuf;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_user_deps::entities::*;
use crate::entities::parser::{UserEmail, UserIcon, UserName, UserOpenaiKey, UserPassword};
@ -42,18 +42,42 @@ pub struct UserProfilePB {
#[pb(index = 7)]
pub auth_type: AuthTypePB,
#[pb(index = 8)]
pub encryption_sign: String,
#[pb(index = 9)]
pub encryption_type: EncryptionTypePB,
}
#[derive(ProtoBuf_Enum, Eq, PartialEq, Debug, Clone)]
pub enum EncryptionTypePB {
NoEncryption = 0,
Symmetric = 1,
}
impl Default for EncryptionTypePB {
fn default() -> Self {
Self::NoEncryption
}
}
impl std::convert::From<UserProfile> for UserProfilePB {
fn from(user_profile: UserProfile) -> Self {
let (encryption_sign, encryption_ty) = match user_profile.encryption_type {
EncryptionType::NoEncryption => ("".to_string(), EncryptionTypePB::NoEncryption),
EncryptionType::SelfEncryption(sign) => (sign, EncryptionTypePB::Symmetric),
};
Self {
id: user_profile.id,
id: user_profile.uid,
email: user_profile.email,
name: user_profile.name,
token: user_profile.token,
icon_url: user_profile.icon_url,
openai_key: user_profile.openai_key,
auth_type: user_profile.auth_type.into(),
encryption_sign,
encryption_type: encryption_ty,
}
}
}
@ -77,9 +101,6 @@ pub struct UpdateUserProfilePayloadPB {
#[pb(index = 6, one_of)]
pub openai_key: Option<String>,
#[pb(index = 7)]
pub auth_type: AuthTypePB,
}
impl UpdateUserProfilePayloadPB {
@ -146,13 +167,13 @@ impl TryInto<UpdateUserProfileParams> for UpdateUserProfilePayloadPB {
};
Ok(UpdateUserProfileParams {
id: self.id,
auth_type: self.auth_type.into(),
uid: self.id,
name,
email,
password,
icon_url,
openai_key,
encryption_sign: None,
})
}
}

View File

@ -1,11 +1,11 @@
use std::collections::HashMap;
use std::convert::TryFrom;
use serde::{Deserialize, Serialize};
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use flowy_error::FlowyError;
use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_user_deps::cloud::UserCloudConfig;
use crate::entities::EncryptionTypePB;
#[derive(ProtoBuf, Default, Debug, Clone)]
pub struct UserPreferencesPB {
@ -104,40 +104,53 @@ impl std::default::Default for AppearanceSettingsPB {
}
#[derive(Default, ProtoBuf)]
pub struct SupabaseConfigPB {
pub struct UserCloudConfigPB {
#[pb(index = 1)]
supabase_url: String,
enable_sync: bool,
#[pb(index = 2)]
key: String,
enable_encrypt: bool,
#[pb(index = 3)]
jwt_secret: String,
pub encrypt_secret: String,
}
#[derive(Default, ProtoBuf)]
pub struct UpdateCloudConfigPB {
#[pb(index = 1, one_of)]
pub enable_sync: Option<bool>,
#[pb(index = 2, one_of)]
pub enable_encrypt: Option<bool>,
}
#[derive(Default, ProtoBuf)]
pub struct UserSecretPB {
#[pb(index = 1)]
pub user_id: i64,
#[pb(index = 2)]
pub encryption_secret: String,
#[pb(index = 3)]
pub encryption_type: EncryptionTypePB,
#[pb(index = 4)]
enable_sync: bool,
pub encryption_sign: String,
}
impl TryFrom<SupabaseConfigPB> for SupabaseConfiguration {
type Error = FlowyError;
fn try_from(config: SupabaseConfigPB) -> Result<Self, Self::Error> {
Ok(SupabaseConfiguration {
url: config.supabase_url,
anon_key: config.key,
jwt_secret: config.jwt_secret,
enable_sync: config.enable_sync,
})
}
#[derive(Default, ProtoBuf)]
pub struct UserEncryptionSecretCheckPB {
#[pb(index = 1)]
pub is_need_secret: bool,
}
impl From<SupabaseConfiguration> for SupabaseConfigPB {
fn from(value: SupabaseConfiguration) -> Self {
impl From<UserCloudConfig> for UserCloudConfigPB {
fn from(value: UserCloudConfig) -> Self {
Self {
supabase_url: value.url,
key: value.anon_key,
jwt_secret: value.jwt_secret,
enable_sync: value.enable_sync,
enable_encrypt: value.enable_encrypt,
encrypt_secret: value.encrypt_secret,
}
}
}

View File

@ -1,18 +1,18 @@
use std::convert::TryFrom;
use std::sync::Weak;
use std::{convert::TryInto, sync::Arc};
use serde_json::Value;
use flowy_error::{FlowyError, FlowyResult};
use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_sqlite::kv::StorePreferences;
use flowy_user_deps::entities::*;
use lib_dispatch::prelude::*;
use lib_infra::box_any::BoxAny;
use crate::entities::*;
use crate::manager::{get_supabase_config, UserManager};
use crate::manager::UserManager;
use crate::notification::{send_notification, UserNotification};
use crate::services::cloud_config::{generate_cloud_config, get_cloud_config, save_cloud_config};
fn upgrade_manager(manager: AFPluginState<Weak<UserManager>>) -> FlowyResult<Arc<UserManager>> {
let manager = manager
@ -38,7 +38,6 @@ pub async fn sign_in(
let manager = upgrade_manager(manager)?;
let params: SignInParams = data.into_inner().try_into()?;
let auth_type = params.auth_type.clone();
manager.update_auth_type(&auth_type).await;
let user_profile: UserProfilePB = manager
.sign_in(BoxAny::new(params), auth_type)
@ -64,7 +63,6 @@ pub async fn sign_up(
let manager = upgrade_manager(manager)?;
let params: SignUpParams = data.into_inner().try_into()?;
let auth_type = params.auth_type.clone();
manager.update_auth_type(&auth_type).await;
let user_profile = manager.sign_up(auth_type, BoxAny::new(params)).await?;
data_result_ok(user_profile.into())
@ -175,28 +173,134 @@ pub async fn third_party_auth_handler(
let manager = upgrade_manager(manager)?;
let params = data.into_inner();
let auth_type: AuthType = params.auth_type.into();
manager.update_auth_type(&auth_type).await;
let user_profile = manager.sign_up(auth_type, BoxAny::new(params.map)).await?;
data_result_ok(user_profile.into())
}
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub async fn set_supabase_config_handler(
data: AFPluginData<SupabaseConfigPB>,
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn set_encrypt_secret_handler(
manager: AFPluginState<Weak<UserManager>>,
data: AFPluginData<UserSecretPB>,
store_preferences: AFPluginState<Weak<StorePreferences>>,
) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let config = SupabaseConfiguration::try_from(data.into_inner())?;
manager.save_supabase_config(config);
let store_preferences = upgrade_store_preferences(store_preferences)?;
let data = data.into_inner();
let mut config = get_cloud_config(&store_preferences).unwrap_or_else(|| {
tracing::trace!("Generate default cloud config");
generate_cloud_config(&store_preferences)
});
match data.encryption_type {
EncryptionTypePB::NoEncryption => {
tracing::error!("Encryption type is NoEncryption, but set encrypt secret");
},
EncryptionTypePB::Symmetric => {
manager.check_encryption_sign_with_secret(
data.user_id,
&data.encryption_sign,
&data.encryption_secret,
)?;
config.encrypt_secret = data.encryption_secret;
config.enable_encrypt = true;
manager
.set_encrypt_secret(
data.user_id,
config.encrypt_secret.clone(),
EncryptionType::SelfEncryption(data.encryption_sign),
)
.await?;
},
}
save_cloud_config(data.user_id, &store_preferences, config)?;
manager.resume_sign_up().await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn get_supabase_config_handler(
pub async fn check_encrypt_secret_handler(
manager: AFPluginState<Weak<UserManager>>,
) -> DataResult<UserEncryptionSecretCheckPB, FlowyError> {
let manager = upgrade_manager(manager)?;
let uid = manager.get_session()?.user_id;
let profile = manager.get_user_profile(uid, false).await?;
let is_need_secret = match profile.encryption_type {
EncryptionType::NoEncryption => false,
EncryptionType::SelfEncryption(sign) => {
if sign.is_empty() {
false
} else {
manager.check_encryption_sign(uid, &sign).is_err()
}
},
};
data_result_ok(UserEncryptionSecretCheckPB { is_need_secret })
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn set_cloud_config_handler(
manager: AFPluginState<Weak<UserManager>>,
data: AFPluginData<UpdateCloudConfigPB>,
store_preferences: AFPluginState<Weak<StorePreferences>>,
) -> DataResult<SupabaseConfigPB, FlowyError> {
) -> Result<(), FlowyError> {
let manager = upgrade_manager(manager)?;
let session = manager.get_session()?;
let store_preferences = upgrade_store_preferences(store_preferences)?;
let config = get_supabase_config(&store_preferences).unwrap_or_default();
let update = data.into_inner();
let mut config = get_cloud_config(&store_preferences)
.ok_or(FlowyError::internal().context("Can't find any cloud config"))?;
if let Some(enable_sync) = update.enable_sync {
manager.cloud_services.set_enable_sync(enable_sync);
config.enable_sync = enable_sync;
}
if let Some(enable_encrypt) = update.enable_encrypt {
config.enable_encrypt = enable_encrypt;
if enable_encrypt {
// The encryption secret is generated when the user first enables encryption and will be
// used to validate the encryption secret is correct when the user logs in.
let encryption_sign =
manager.generate_encryption_sign(session.user_id, &config.encrypt_secret)?;
let encryption_type = EncryptionType::SelfEncryption(encryption_sign);
manager
.set_encrypt_secret(
session.user_id,
config.encrypt_secret.clone(),
encryption_type.clone(),
)
.await?;
let params =
UpdateUserProfileParams::new(session.user_id).with_encryption_type(encryption_type);
manager.update_user_profile(params).await?;
}
}
let config_pb = UserCloudConfigPB::from(config.clone());
save_cloud_config(session.user_id, &store_preferences, config)?;
send_notification(
&session.user_id.to_string(),
UserNotification::DidUpdateCloudConfig,
)
.payload(config_pb)
.send();
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub async fn get_cloud_config_handler(
store_preferences: AFPluginState<Weak<StorePreferences>>,
) -> DataResult<UserCloudConfigPB, FlowyError> {
let store_preferences = upgrade_store_preferences(store_preferences)?;
// Generate the default config if the config is not exist
let config = get_cloud_config(&store_preferences)
.unwrap_or_else(|| generate_cloud_config(&store_preferences));
data_result_ok(config.into())
}
@ -279,7 +383,9 @@ pub async fn open_historical_users_handler(
let user = user.into_inner();
let manager = upgrade_manager(manager)?;
let auth_type = AuthType::from(user.auth_type);
manager.open_historical_user(user.user_id, user.device_id, auth_type)?;
manager
.open_historical_user(user.user_id, user.device_id, auth_type)
.await?;
Ok(())
}

View File

@ -6,7 +6,6 @@ use strum_macros::Display;
use flowy_derive::{Flowy_Event, ProtoBuf_Enum};
use flowy_error::FlowyResult;
use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_user_deps::cloud::UserService;
use flowy_user_deps::entities::*;
use lib_dispatch::prelude::*;
@ -35,8 +34,10 @@ pub fn init(user_session: Weak<UserManager>) -> AFPlugin {
.event(UserEvent::SetAppearanceSetting, set_appearance_setting)
.event(UserEvent::GetAppearanceSetting, get_appearance_setting)
.event(UserEvent::GetUserSetting, get_user_setting)
.event(UserEvent::SetSupabaseConfig, set_supabase_config_handler)
.event(UserEvent::GetSupabaseConfig, get_supabase_config_handler)
.event(UserEvent::SetCloudConfig, set_cloud_config_handler)
.event(UserEvent::GetCloudConfig, get_cloud_config_handler)
.event(UserEvent::SetEncryptionSecret, set_encrypt_secret_handler)
.event(UserEvent::CheckEncryptionSign, check_encrypt_secret_handler)
.event(UserEvent::ThirdPartyAuth, third_party_auth_handler)
.event(
UserEvent::GetAllUserWorkspaces,
@ -101,7 +102,8 @@ pub trait UserStatusCallback: Send + Sync + 'static {
/// The user cloud service provider.
/// The provider can be supabase, firebase, aws, or any other cloud service.
pub trait UserCloudServiceProvider: Send + Sync + 'static {
fn set_supabase_config(&self, supabase_config: &SupabaseConfiguration);
fn set_enable_sync(&self, enable_sync: bool);
fn set_encrypt_secret(&self, secret: String);
fn set_auth_type(&self, auth_type: AuthType);
fn set_device_id(&self, device_id: &str);
fn get_user_service(&self) -> Result<Arc<dyn UserService>, FlowyError>;
@ -112,8 +114,12 @@ impl<T> UserCloudServiceProvider for Arc<T>
where
T: UserCloudServiceProvider,
{
fn set_supabase_config(&self, supabase_config: &SupabaseConfiguration) {
(**self).set_supabase_config(supabase_config)
fn set_enable_sync(&self, enable_sync: bool) {
(**self).set_enable_sync(enable_sync)
}
fn set_encrypt_secret(&self, secret: String) {
(**self).set_encrypt_secret(secret)
}
fn set_auth_type(&self, auth_type: AuthType) {
@ -221,13 +227,17 @@ pub enum UserEvent {
#[event(input = "ThirdPartyAuthPB", output = "UserProfilePB")]
ThirdPartyAuth = 10,
/// Set the supabase config. It will be written to the environment variables.
/// Check out the `write_to_env` of [SupabaseConfigPB].
#[event(input = "SupabaseConfigPB")]
SetSupabaseConfig = 13,
#[event(input = "UpdateCloudConfigPB")]
SetCloudConfig = 13,
#[event(output = "SupabaseConfigPB")]
GetSupabaseConfig = 14,
#[event(output = "UserCloudConfigPB")]
GetCloudConfig = 14,
#[event(input = "UserSecretPB")]
SetEncryptionSecret = 15,
#[event(output = "UserEncryptionSecretCheckPB")]
CheckEncryptionSign = 16,
/// Return the all the workspaces of the user
#[event()]

View File

@ -9,8 +9,7 @@ use serde_json::Value;
use tokio::sync::{Mutex, RwLock};
use uuid::Uuid;
use flowy_error::{internal_error, ErrorCode};
use flowy_server_config::supabase_config::SupabaseConfiguration;
use flowy_error::{internal_error, ErrorCode, FlowyResult};
use flowy_sqlite::kv::StorePreferences;
use flowy_sqlite::schema::user_table;
use flowy_sqlite::ConnectionPool;
@ -25,16 +24,15 @@ use crate::event_map::{
use crate::migrations::historical_document::HistoricalEmptyDocumentMigration;
use crate::migrations::local_user_to_cloud::migration_user_to_cloud;
use crate::migrations::migration::UserLocalDataMigration;
use crate::migrations::UserMigrationContext;
use crate::migrations::MigrationUser;
use crate::services::cloud_config::remove_cloud_config;
use crate::services::database::UserDB;
use crate::services::entities::Session;
use crate::services::entities::{ResumableSignUp, Session};
use crate::services::user_awareness::UserAwarenessDataSource;
use crate::services::user_sql::{UserTable, UserTableChangeset};
use crate::services::user_workspace::save_user_workspaces;
use crate::{errors::FlowyError, notification::*};
const SUPABASE_CONFIG_CACHE_KEY: &str = "af_supabase_config";
pub struct UserSessionConfig {
root_dir: String,
@ -62,6 +60,7 @@ pub struct UserManager {
pub(crate) user_awareness: Arc<Mutex<Option<MutexUserAwareness>>>,
pub(crate) user_status_callback: RwLock<Arc<dyn UserStatusCallback>>,
pub(crate) collab_builder: Weak<AppFlowyCollabBuilder>,
resumable_sign_up: Mutex<Option<ResumableSignUp>>,
}
impl UserManager {
@ -82,6 +81,7 @@ impl UserManager {
user_awareness: Arc::new(Default::default()),
user_status_callback,
collab_builder,
resumable_sign_up: Default::default(),
}
}
@ -160,29 +160,18 @@ impl UserManager {
params: BoxAny,
auth_type: AuthType,
) -> Result<UserProfile, FlowyError> {
self.update_auth_type(&auth_type).await;
let response: SignInResponse = self
.cloud_services
.get_user_service()?
.sign_in(params)
.await?;
let session: Session = response.clone().into();
let uid = session.user_id;
let device_id = session.device_id.clone();
let session = Session::from(&response);
self.set_collab_config(&session);
self.set_current_session(Some(session.clone()))?;
self.log_historical_user(
uid,
&response.device_id,
response.name.clone(),
&auth_type,
self.user_dir(uid),
);
let user_workspace = response.latest_workspace.clone();
save_user_workspaces(uid, self.db_pool(uid)?, &response.user_workspaces)?;
let user_profile: UserProfile = self
.save_user(uid, (response, auth_type).into())
.await?
.into();
let latest_workspace = response.latest_workspace.clone();
let user_profile = UserProfile::from((&response, &auth_type));
self.save_auth_data(&response, &auth_type, &session).await?;
let _ = self
.initialize_user_awareness(&session, UserAwarenessDataSource::Remote)
.await;
@ -191,25 +180,23 @@ impl UserManager {
.user_status_callback
.read()
.await
.did_sign_in(user_profile.id, &user_workspace, &device_id)
.did_sign_in(user_profile.uid, &latest_workspace, &session.device_id)
.await
{
tracing::error!("Failed to call did_sign_in callback: {:?}", e);
}
send_sign_in_notification()
.payload::<UserProfilePB>(user_profile.clone().into())
.send();
Ok(user_profile)
}
pub async fn update_auth_type(&self, auth_type: &AuthType) {
pub(crate) async fn update_auth_type(&self, auth_type: &AuthType) {
self
.user_status_callback
.read()
.await
.auth_type_did_changed(auth_type.clone());
self.cloud_services.set_auth_type(auth_type.clone());
}
@ -220,94 +207,117 @@ impl UserManager {
/// and saving workspace information. If a user is signing up with a new profile and previously had guest data,
/// this function may migrate that data over to the new account.
///
#[tracing::instrument(level = "debug", skip(self, params))]
#[tracing::instrument(level = "info", skip(self, params))]
pub async fn sign_up(
&self,
auth_type: AuthType,
params: BoxAny,
) -> Result<UserProfile, FlowyError> {
let old_user = {
if let Ok(old_session) = self.get_session() {
self
.get_user_profile(old_session.user_id, false)
.await
.ok()
.map(|user_profile| UserMigrationContext {
user_profile,
session: old_session,
})
} else {
None
}
};
remove_cloud_config(&self.store_preferences);
self.update_auth_type(&auth_type).await;
let migration_user = self.get_migration_user(&auth_type).await;
let auth_service = self.cloud_services.get_user_service()?;
let response: SignUpResponse = auth_service.sign_up(params).await?;
let mut sign_up_context = SignUpContext {
is_new: response.is_new,
local_folder: None,
};
let user_profile = UserProfile::from((&response, &auth_type));
if user_profile.encryption_type.is_need_encrypt_secret() {
self
.resumable_sign_up
.lock()
.await
.replace(ResumableSignUp {
user_profile: user_profile.clone(),
migration_user,
response,
auth_type,
});
} else {
self
.continue_sign_up(&user_profile, migration_user, response, &auth_type)
.await?;
}
Ok(user_profile)
}
#[tracing::instrument(level = "info", skip(self))]
pub async fn resume_sign_up(&self) -> Result<(), FlowyError> {
let ResumableSignUp {
user_profile,
migration_user,
response,
auth_type,
} = self
.resumable_sign_up
.lock()
.await
.clone()
.ok_or(FlowyError::new(
ErrorCode::Internal,
"No resumable sign up data",
))?;
self
.continue_sign_up(&user_profile, migration_user, response, &auth_type)
.await?;
Ok(())
}
#[tracing::instrument(level = "info", skip_all, err)]
async fn continue_sign_up(
&self,
user_profile: &UserProfile,
migration_user: Option<MigrationUser>,
response: SignUpResponse,
auth_type: &AuthType,
) -> FlowyResult<()> {
let new_session = Session::from(&response);
self.set_current_session(Some(new_session.clone()))?;
self.set_collab_config(&new_session);
let uid = response.user_id;
self.log_historical_user(
uid,
&response.device_id,
response.name.clone(),
&auth_type,
self.user_dir(uid),
);
save_user_workspaces(uid, self.db_pool(uid)?, &response.user_workspaces)?;
let new_user_profile: UserProfile = self
.save_user(uid, (response, auth_type.clone()).into())
.await?
.into();
let user_awareness_source = if sign_up_context.is_new {
let user_awareness_source = if response.is_new_user {
UserAwarenessDataSource::Local
} else {
UserAwarenessDataSource::Remote
};
// Only migrate the data if the user is login in as a guest and sign up as a new user if the current
// auth type is not [AuthType::Local].
if sign_up_context.is_new {
if let Some(old_user) = old_user {
if old_user.user_profile.auth_type == AuthType::Local && !auth_type.is_local() {
let new_user = UserMigrationContext {
user_profile: new_user_profile.clone(),
session: new_session.clone(),
};
tracing::info!(
"Migrate old user data from {:?} to {:?}",
old_user.user_profile.id,
new_user.user_profile.id
);
match self.migrate_local_user_to_cloud(&old_user, &new_user).await {
Ok(folder_data) => sign_up_context.local_folder = folder_data,
Err(e) => tracing::error!("{:?}", e),
}
// close the old user db
let _ = self.database.close(old_user.session.user_id);
let mut sign_up_context = SignUpContext {
is_new: response.is_new_user,
local_folder: None,
};
if response.is_new_user {
if let Some(old_user) = migration_user {
let new_user = MigrationUser {
user_profile: user_profile.clone(),
session: new_session.clone(),
};
tracing::info!(
"Migrate old user data from {:?} to {:?}",
old_user.user_profile.uid,
new_user.user_profile.uid
);
match self.migrate_local_user_to_cloud(&old_user, &new_user).await {
Ok(folder_data) => sign_up_context.local_folder = folder_data,
Err(e) => tracing::error!("{:?}", e),
}
let _ = self.database.close(old_user.session.user_id);
}
}
self
.initialize_user_awareness(&new_session, user_awareness_source)
.await;
let _ = self
self
.user_status_callback
.read()
.await
.did_sign_up(
sign_up_context,
&new_user_profile,
user_profile,
&new_session.user_workspace,
&new_session.device_id,
)
.await;
Ok(new_user_profile)
.await?;
self
.save_auth_data(&response, auth_type, &new_session)
.await?;
Ok(())
}
#[tracing::instrument(level = "info", skip(self))]
@ -315,6 +325,7 @@ impl UserManager {
let session = self.get_session()?;
self.database.close(session.user_id)?;
self.set_current_session(None)?;
remove_cloud_config(&self.store_preferences);
let server = self.cloud_services.get_user_service()?;
tokio::spawn(async move {
@ -337,7 +348,8 @@ impl UserManager {
&self,
params: UpdateUserProfileParams,
) -> Result<(), FlowyError> {
let auth_type = params.auth_type.clone();
let old_user_profile = self.get_user_profile(params.uid, false).await?;
let auth_type = old_user_profile.auth_type.clone();
let session = self.get_session()?;
let changeset = UserTableChangeset::new(params.clone());
diesel_update_table!(
@ -347,13 +359,12 @@ impl UserManager {
);
let session = self.get_session()?;
let user_profile = self.get_user_profile(session.user_id, false).await?;
let profile_pb: UserProfilePB = user_profile.into();
let new_user_profile = self.get_user_profile(session.user_id, false).await?;
send_notification(
&session.user_id.to_string(),
UserNotification::DidUpdateUserProfile,
)
.payload(profile_pb)
.payload(UserProfilePB::from(new_user_profile))
.send();
self
.update_user(&auth_type, session.user_id, None, params)
@ -441,13 +452,6 @@ impl UserManager {
Ok(None)
}
pub fn save_supabase_config(&self, config: SupabaseConfiguration) {
self.cloud_services.set_supabase_config(&config);
let _ = self
.store_preferences
.set_object(SUPABASE_CONFIG_CACHE_KEY, config);
}
async fn update_user(
&self,
_auth_type: &AuthType,
@ -466,7 +470,7 @@ impl UserManager {
Ok(())
}
async fn save_user(&self, uid: i64, user: UserTable) -> Result<UserTable, FlowyError> {
async fn save_user(&self, uid: i64, user: UserTable) -> Result<(), FlowyError> {
let conn = self.db_connection(uid)?;
conn.immediate_transaction(|| {
// delete old user if exists
@ -474,12 +478,12 @@ impl UserManager {
.execute(&*conn)?;
let _ = diesel::insert_into(user_table::table)
.values(user.clone())
.values(user)
.execute(&*conn)?;
Ok::<(), FlowyError>(())
})?;
Ok(user)
Ok(())
}
pub(crate) fn set_current_session(&self, session: Option<Session>) -> Result<(), FlowyError> {
@ -520,6 +524,29 @@ impl UserManager {
}
}
async fn save_auth_data(
&self,
response: &impl UserAuthResponse,
auth_type: &AuthType,
session: &Session,
) -> Result<(), FlowyError> {
let user_profile = UserProfile::from((response, auth_type));
let uid = user_profile.uid;
self.add_historical_user(
uid,
response.device_id(),
response.user_name().to_string(),
auth_type,
self.user_dir(uid),
);
save_user_workspaces(uid, self.db_pool(uid)?, response.user_workspaces())?;
self
.save_user(uid, (user_profile, auth_type.clone()).into())
.await?;
self.set_current_session(Some(session.clone()))?;
Ok(())
}
fn set_collab_config(&self, session: &Session) {
let collab_builder = self.collab_builder.upgrade().unwrap();
collab_builder.set_sync_device(session.device_id.clone());
@ -529,21 +556,18 @@ impl UserManager {
async fn migrate_local_user_to_cloud(
&self,
old_user: &UserMigrationContext,
new_user: &UserMigrationContext,
old_user: &MigrationUser,
new_user: &MigrationUser,
) -> Result<Option<FolderData>, FlowyError> {
let old_collab_db = self.database.get_collab_db(old_user.session.user_id)?;
let new_collab_db = self.database.get_collab_db(new_user.session.user_id)?;
let folder_data = migration_user_to_cloud(old_user, &old_collab_db, new_user, &new_collab_db)?;
// Save the old user workspace setting.
save_user_workspaces(
old_user.session.user_id,
self.database.get_pool(old_user.session.user_id)?,
&[old_user.session.user_workspace.clone()],
)?;
Ok(folder_data)
}
}
pub fn get_supabase_config(
store_preference: &Arc<StorePreferences>,
) -> Option<SupabaseConfiguration> {
store_preference
.get_str(SUPABASE_CONFIG_CACHE_KEY)
.and_then(|s| serde_json::from_str(&s).ok())
.unwrap_or_else(|| SupabaseConfiguration::from_env().ok())
}

View File

@ -2,7 +2,8 @@ use flowy_user_deps::entities::UserProfile;
use crate::services::entities::Session;
pub struct UserMigrationContext {
#[derive(Clone)]
pub struct MigrationUser {
pub user_profile: UserProfile,
pub session: Session,
}

View File

@ -8,14 +8,14 @@ use collab_folder::core::{Folder, FolderData};
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use crate::migrations::UserMigrationContext;
use crate::migrations::MigrationUser;
/// Migration the collab objects of the old user to new user. Currently, it only happens when
/// the user is a local user and try to use AppFlowy cloud service.
pub fn migration_user_to_cloud(
old_user: &UserMigrationContext,
old_user: &MigrationUser,
old_collab_db: &Arc<RocksCollabDB>,
new_user: &UserMigrationContext,
new_user: &MigrationUser,
new_collab_db: &Arc<RocksCollabDB>,
) -> FlowyResult<Option<FolderData>> {
let mut folder_data = None;

View File

@ -10,6 +10,7 @@ pub(crate) enum UserNotification {
DidUserSignIn = 1,
DidUpdateUserProfile = 2,
DidUpdateUserWorkspaces = 3,
DidUpdateCloudConfig = 4,
}
impl std::convert::From<UserNotification> for i32 {

View File

@ -0,0 +1,48 @@
use std::sync::Arc;
use flowy_encrypt::generate_encrypt_secret;
use flowy_error::FlowyResult;
use flowy_sqlite::kv::StorePreferences;
use flowy_user_deps::cloud::UserCloudConfig;
const CLOUD_CONFIG_KEY: &str = "af_user_cloud_config";
pub fn generate_cloud_config(store_preference: &Arc<StorePreferences>) -> UserCloudConfig {
let config = UserCloudConfig::new(generate_encrypt_secret());
let key = cache_key_for_cloud_config();
store_preference.set_object(&key, config.clone()).unwrap();
config
}
pub fn remove_cloud_config(store_preference: &Arc<StorePreferences>) {
let key = cache_key_for_cloud_config();
store_preference.remove(&key);
}
pub fn save_cloud_config(
uid: i64,
store_preference: &Arc<StorePreferences>,
config: UserCloudConfig,
) -> FlowyResult<()> {
let encrypt_secret = config.encrypt_secret.clone();
let key = cache_key_for_cloud_config();
store_preference.set_object(&key, config)?;
store_preference.set_object(&format!("{}-encrypt-secret", uid), encrypt_secret)?;
Ok(())
}
fn cache_key_for_cloud_config() -> String {
CLOUD_CONFIG_KEY.to_string()
}
pub fn get_cloud_config(store_preference: &Arc<StorePreferences>) -> Option<UserCloudConfig> {
let key = cache_key_for_cloud_config();
store_preference.get_object::<UserCloudConfig>(&key)
}
pub fn get_encrypt_secret(store_preference: &Arc<StorePreferences>) -> Option<String> {
let key = cache_key_for_cloud_config();
store_preference
.get_object::<UserCloudConfig>(&key)
.map(|config| config.encrypt_secret)
}

View File

@ -7,10 +7,11 @@ use serde::de::{Deserializer, MapAccess, Visitor};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use flowy_user_deps::entities::AuthType;
use flowy_user_deps::entities::{SignInResponse, SignUpResponse, UserWorkspace};
use flowy_user_deps::entities::{AuthType, UserAuthResponse};
use flowy_user_deps::entities::{SignUpResponse, UserProfile, UserWorkspace};
use crate::entities::AuthTypePB;
use crate::migrations::MigrationUser;
#[derive(Debug, Clone, Serialize)]
pub struct Session {
@ -89,12 +90,15 @@ impl<'de> Deserialize<'de> for Session {
}
}
impl std::convert::From<SignInResponse> for Session {
fn from(resp: SignInResponse) -> Self {
Session {
user_id: resp.user_id,
device_id: resp.device_id,
user_workspace: resp.latest_workspace,
impl<T> From<&T> for Session
where
T: UserAuthResponse,
{
fn from(value: &T) -> Self {
Self {
user_id: value.user_id(),
device_id: value.device_id().to_string(),
user_workspace: value.latest_workspace().clone(),
}
}
}
@ -111,16 +115,6 @@ impl std::convert::From<Session> for String {
}
}
impl From<&SignUpResponse> for Session {
fn from(value: &SignUpResponse) -> Self {
Session {
user_id: value.user_id,
device_id: value.device_id.clone(),
user_workspace: value.latest_workspace.clone(),
}
}
}
#[cfg(test)]
mod tests {
use serde_json::json;
@ -208,3 +202,11 @@ pub struct HistoricalUser {
pub device_id: String,
}
const DEFAULT_AUTH_TYPE: fn() -> AuthType = || AuthType::Local;
#[derive(Clone)]
pub(crate) struct ResumableSignUp {
pub user_profile: UserProfile,
pub response: SignUpResponse,
pub auth_type: AuthType,
pub migration_user: Option<MigrationUser>,
}

View File

@ -7,11 +7,26 @@ use flowy_user_deps::entities::{AuthType, UserWorkspace};
use lib_infra::util::timestamp;
use crate::manager::UserManager;
use crate::migrations::MigrationUser;
use crate::services::entities::{HistoricalUser, HistoricalUsers, Session};
use crate::services::user_workspace_sql::UserWorkspaceTable;
const HISTORICAL_USER: &str = "af_historical_users";
impl UserManager {
pub async fn get_migration_user(&self, auth_type: &AuthType) -> Option<MigrationUser> {
// Only migrate the data if the user is login in as a guest and sign up as a new user if the current
// auth type is not [AuthType::Local].
let session = self.get_session().ok()?;
let user_profile = self.get_user_profile(session.user_id, false).await.ok()?;
if user_profile.auth_type == AuthType::Local && !auth_type.is_local() {
Some(MigrationUser {
user_profile,
session,
})
} else {
None
}
}
/// Logs a user's details for historical tracking.
///
/// This function adds a user's details to a local historical tracking system, useful for
@ -24,7 +39,7 @@ impl UserManager {
/// - `auth_type`: The type of authentication used.
/// - `storage_path`: Path where user data is stored.
///
pub fn log_historical_user(
pub fn add_historical_user(
&self,
uid: i64,
device_id: &str,
@ -67,12 +82,14 @@ impl UserManager {
/// This function facilitates the re-opening of a user's session from historical tracking.
/// It retrieves the user's workspace and establishes a new session for the user.
///
pub fn open_historical_user(
pub async fn open_historical_user(
&self,
uid: i64,
device_id: String,
auth_type: AuthType,
) -> FlowyResult<()> {
debug_assert!(auth_type.is_local());
self.update_auth_type(&auth_type).await;
let conn = self.db_connection(uid)?;
let row = user_workspace_table::dsl::user_workspace_table
.filter(user_workspace_table::uid.eq(uid))
@ -83,8 +100,6 @@ impl UserManager {
device_id,
user_workspace,
};
debug_assert!(auth_type.is_local());
self.cloud_services.set_auth_type(auth_type);
self.set_current_session(Some(session))?;
Ok(())
}

Some files were not shown because too many files have changed in this diff Show More