feat: Chat app download (#5835)

* chore: downlaod chat app

* chore: download app

* chore: disable local ai in billing

* chore: remove watcher

* chore: flutter analyzer
This commit is contained in:
Nathan.fooo 2024-07-30 17:32:30 +08:00 committed by GitHub
parent d5a5a64fcf
commit 5250a151c8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
31 changed files with 886 additions and 422 deletions

View File

@ -0,0 +1,41 @@
import 'dart:async';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:url_launcher/url_launcher.dart' show launchUrl;
part 'download_offline_ai_app_bloc.freezed.dart';
class DownloadOfflineAIBloc
extends Bloc<DownloadOfflineAIEvent, DownloadOfflineAIState> {
DownloadOfflineAIBloc() : super(const DownloadOfflineAIState()) {
on<DownloadOfflineAIEvent>(_handleEvent);
}
Future<void> _handleEvent(
DownloadOfflineAIEvent event,
Emitter<DownloadOfflineAIState> emit,
) async {
await event.when(
started: () async {
final result = await ChatEventGetOfflineAIAppLink().send();
await result.fold(
(app) async {
await launchUrl(Uri.parse(app.link));
},
(err) {},
);
},
);
}
}
@freezed
class DownloadOfflineAIEvent with _$DownloadOfflineAIEvent {
const factory DownloadOfflineAIEvent.started() = _Started;
}
@freezed
class DownloadOfflineAIState with _$DownloadOfflineAIState {
const factory DownloadOfflineAIState() = _DownloadOfflineAIState;
}

View File

@ -20,7 +20,7 @@ class LocalAIChatSettingBloc
listener.start( listener.start(
stateCallback: (newState) { stateCallback: (newState) {
if (!isClosed) { if (!isClosed) {
add(LocalAIChatSettingEvent.updateLLMRunningState(newState.state)); add(LocalAIChatSettingEvent.updatePluginState(newState));
} }
}, },
); );
@ -46,14 +46,14 @@ class LocalAIChatSettingBloc
modelInfo: modelInfo, modelInfo: modelInfo,
models: modelInfo.models, models: modelInfo.models,
selectedLLMModel: modelInfo.selectedModel, selectedLLMModel: modelInfo.selectedModel,
fetchModelInfoState: const LoadingState.finish(), aiModelProgress: const AIModelProgress.finish(),
), ),
); );
}, },
(err) { (err) {
emit( emit(
state.copyWith( state.copyWith(
fetchModelInfoState: LoadingState.finish(error: err), aiModelProgress: AIModelProgress.finish(error: err),
), ),
); );
}, },
@ -68,7 +68,7 @@ class LocalAIChatSettingBloc
emit( emit(
state.copyWith( state.copyWith(
selectedLLMModel: llmModel, selectedLLMModel: llmModel,
localAIInfo: LocalAIProgress.showDownload( progressIndicator: LocalAIProgress.showDownload(
llmResource, llmResource,
llmModel, llmModel,
), ),
@ -80,7 +80,7 @@ class LocalAIChatSettingBloc
state.copyWith( state.copyWith(
selectedLLMModel: llmModel, selectedLLMModel: llmModel,
selectLLMState: const LoadingState.finish(), selectLLMState: const LoadingState.finish(),
localAIInfo: const LocalAIProgress.checkPluginState(), progressIndicator: const LocalAIProgress.checkPluginState(),
), ),
); );
} }
@ -106,7 +106,7 @@ class LocalAIChatSettingBloc
if (llmResource.pendingResources.isEmpty) { if (llmResource.pendingResources.isEmpty) {
emit( emit(
state.copyWith( state.copyWith(
localAIInfo: const LocalAIProgress.checkPluginState(), progressIndicator: const LocalAIProgress.checkPluginState(),
), ),
); );
} else { } else {
@ -115,7 +115,7 @@ class LocalAIChatSettingBloc
if (llmResource.isDownloading) { if (llmResource.isDownloading) {
emit( emit(
state.copyWith( state.copyWith(
localAIInfo: progressIndicator:
LocalAIProgress.startDownloading(state.selectedLLMModel!), LocalAIProgress.startDownloading(state.selectedLLMModel!),
selectLLMState: const LoadingState.finish(), selectLLMState: const LoadingState.finish(),
), ),
@ -124,7 +124,7 @@ class LocalAIChatSettingBloc
} else { } else {
emit( emit(
state.copyWith( state.copyWith(
localAIInfo: LocalAIProgress.showDownload( progressIndicator: LocalAIProgress.showDownload(
llmResource, llmResource,
state.selectedLLMModel!, state.selectedLLMModel!,
), ),
@ -138,7 +138,7 @@ class LocalAIChatSettingBloc
startDownloadModel: (LLMModelPB llmModel) { startDownloadModel: (LLMModelPB llmModel) {
emit( emit(
state.copyWith( state.copyWith(
localAIInfo: LocalAIProgress.startDownloading(llmModel), progressIndicator: LocalAIProgress.startDownloading(llmModel),
selectLLMState: const LoadingState.finish(), selectLLMState: const LoadingState.finish(),
), ),
); );
@ -149,19 +149,39 @@ class LocalAIChatSettingBloc
}, },
finishDownload: () async { finishDownload: () async {
emit( emit(
state.copyWith(localAIInfo: const LocalAIProgress.finishDownload()), state.copyWith(
progressIndicator: const LocalAIProgress.finishDownload(),
),
); );
}, },
updateLLMRunningState: (RunningStatePB newRunningState) { updatePluginState: (LocalAIPluginStatePB pluginState) {
if (newRunningState == RunningStatePB.Stopped) { if (pluginState.offlineAiReady) {
ChatEventRefreshLocalAIModelInfo().send().then((result) {
if (!isClosed) {
add(LocalAIChatSettingEvent.didLoadModelInfo(result));
}
});
if (pluginState.state == RunningStatePB.Stopped) {
emit( emit(
state.copyWith( state.copyWith(
runningState: newRunningState, runningState: pluginState.state,
localAIInfo: const LocalAIProgress.checkPluginState(), progressIndicator: const LocalAIProgress.checkPluginState(),
), ),
); );
} else { } else {
emit(state.copyWith(runningState: newRunningState)); emit(
state.copyWith(
runningState: pluginState.state,
),
);
}
} else {
emit(
state.copyWith(
progressIndicator: const LocalAIProgress.startOfflineAIApp(),
),
);
} }
}, },
); );
@ -183,11 +203,22 @@ class LocalAIChatSettingBloc
/// Handles the event to fetch local AI settings when the application starts. /// Handles the event to fetch local AI settings when the application starts.
Future<void> _handleStarted() async { Future<void> _handleStarted() async {
final result = await ChatEventGetLocalAIPluginState().send();
result.fold(
(pluginState) async {
if (!isClosed) {
add(LocalAIChatSettingEvent.updatePluginState(pluginState));
if (pluginState.offlineAiReady) {
final result = await ChatEventRefreshLocalAIModelInfo().send(); final result = await ChatEventRefreshLocalAIModelInfo().send();
if (!isClosed) { if (!isClosed) {
add(LocalAIChatSettingEvent.didLoadModelInfo(result)); add(LocalAIChatSettingEvent.didLoadModelInfo(result));
} }
} }
}
},
(err) => Log.error(err.toString()),
);
}
@override @override
Future<void> close() async { Future<void> close() async {
@ -214,9 +245,9 @@ class LocalAIChatSettingEvent with _$LocalAIChatSettingEvent {
const factory LocalAIChatSettingEvent.cancelDownload() = _CancelDownload; const factory LocalAIChatSettingEvent.cancelDownload() = _CancelDownload;
const factory LocalAIChatSettingEvent.finishDownload() = _FinishDownload; const factory LocalAIChatSettingEvent.finishDownload() = _FinishDownload;
const factory LocalAIChatSettingEvent.updateLLMRunningState( const factory LocalAIChatSettingEvent.updatePluginState(
RunningStatePB newRunningState, LocalAIPluginStatePB pluginState,
) = _RunningState; ) = _PluginState;
} }
@freezed @freezed
@ -224,29 +255,16 @@ class LocalAIChatSettingState with _$LocalAIChatSettingState {
const factory LocalAIChatSettingState({ const factory LocalAIChatSettingState({
LLMModelInfoPB? modelInfo, LLMModelInfoPB? modelInfo,
LLMModelPB? selectedLLMModel, LLMModelPB? selectedLLMModel,
LocalAIProgress? localAIInfo, LocalAIProgress? progressIndicator,
@Default(LoadingState.loading()) LoadingState fetchModelInfoState, @Default(AIModelProgress.init()) AIModelProgress aiModelProgress,
@Default(LoadingState.loading()) LoadingState selectLLMState, @Default(LoadingState.loading()) LoadingState selectLLMState,
@Default([]) List<LLMModelPB> models, @Default([]) List<LLMModelPB> models,
@Default(RunningStatePB.Connecting) RunningStatePB runningState, @Default(RunningStatePB.Connecting) RunningStatePB runningState,
}) = _LocalAIChatSettingState; }) = _LocalAIChatSettingState;
} }
// @freezed
// class LocalChatAIStateIndicator with _$LocalChatAIStateIndicator {
// // when start downloading the model
// const factory LocalChatAIStateIndicator.error(FlowyError error) = _OnError;
// const factory LocalChatAIStateIndicator.ready(bool isEnabled) = _Ready;
// }
@freezed @freezed
class LocalAIProgress with _$LocalAIProgress { class LocalAIProgress with _$LocalAIProgress {
// when user select a new model, it will call requestDownload
const factory LocalAIProgress.requestDownloadInfo(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _RequestDownload;
// when user comes back to the setting page, it will auto detect current llm state // when user comes back to the setting page, it will auto detect current llm state
const factory LocalAIProgress.showDownload( const factory LocalAIProgress.showDownload(
LocalModelResourcePB llmResource, LocalModelResourcePB llmResource,
@ -257,5 +275,13 @@ class LocalAIProgress with _$LocalAIProgress {
const factory LocalAIProgress.startDownloading(LLMModelPB llmModel) = const factory LocalAIProgress.startDownloading(LLMModelPB llmModel) =
_Downloading; _Downloading;
const factory LocalAIProgress.finishDownload() = _Finish; const factory LocalAIProgress.finishDownload() = _Finish;
const factory LocalAIProgress.checkPluginState() = _PluginState; const factory LocalAIProgress.checkPluginState() = _CheckPluginState;
const factory LocalAIProgress.startOfflineAIApp() = _StartOfflineAIApp;
}
@freezed
class AIModelProgress with _$AIModelProgress {
const factory AIModelProgress.init() = _AIModelProgressInit;
const factory AIModelProgress.loading() = _AIModelDownloading;
const factory AIModelProgress.finish({FlowyError? error}) = _AIModelFinish;
} }

View File

@ -7,6 +7,7 @@ import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:bloc/bloc.dart'; import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:url_launcher/url_launcher.dart' show launchUrl;
part 'plugin_state_bloc.freezed.dart'; part 'plugin_state_bloc.freezed.dart';
class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> { class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
@ -53,20 +54,32 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
); );
}, },
updateState: (LocalAIPluginStatePB pluginState) { updateState: (LocalAIPluginStatePB pluginState) {
// if the offline ai is not started, ask user to start it
if (pluginState.offlineAiReady) {
// Chech state of the plugin
switch (pluginState.state) { switch (pluginState.state) {
case RunningStatePB.Connecting: case RunningStatePB.Connecting:
emit( emit(
const PluginStateState(action: PluginStateAction.loadingPlugin()), const PluginStateState(
action: PluginStateAction.loadingPlugin(),
),
); );
case RunningStatePB.Running: case RunningStatePB.Running:
emit(const PluginStateState(action: PluginStateAction.ready())); emit(const PluginStateState(action: PluginStateAction.ready()));
break; break;
default: default:
emit( emit(
state.copyWith(action: const PluginStateAction.restart()), state.copyWith(action: const PluginStateAction.restartPlugin()),
); );
break; break;
} }
} else {
emit(
const PluginStateState(
action: PluginStateAction.startAIOfflineApp(),
),
);
}
}, },
restartLocalAI: () async { restartLocalAI: () async {
emit( emit(
@ -83,6 +96,15 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
(err) => Log.error(err.toString()), (err) => Log.error(err.toString()),
); );
}, },
downloadOfflineAIApp: () async {
final result = await ChatEventGetOfflineAIAppLink().send();
await result.fold(
(app) async {
await launchUrl(Uri.parse(app.link));
},
(err) {},
);
},
); );
} }
} }
@ -95,6 +117,7 @@ class PluginStateEvent with _$PluginStateEvent {
const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI; const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI;
const factory PluginStateEvent.openModelDirectory() = const factory PluginStateEvent.openModelDirectory() =
_OpenModelStorageDirectory; _OpenModelStorageDirectory;
const factory PluginStateEvent.downloadOfflineAIApp() = _DownloadOfflineAIApp;
} }
@freezed @freezed
@ -109,5 +132,6 @@ class PluginStateAction with _$PluginStateAction {
const factory PluginStateAction.init() = _Init; const factory PluginStateAction.init() = _Init;
const factory PluginStateAction.loadingPlugin() = _LoadingPlugin; const factory PluginStateAction.loadingPlugin() = _LoadingPlugin;
const factory PluginStateAction.ready() = _Ready; const factory PluginStateAction.ready() = _Ready;
const factory PluginStateAction.restart() = _Restart; const factory PluginStateAction.restartPlugin() = _RestartPlugin;
const factory PluginStateAction.startAIOfflineApp() = _StartAIOfflineApp;
} }

View File

@ -292,6 +292,7 @@ class ConfirmPopup extends StatefulWidget {
required this.title, required this.title,
required this.description, required this.description,
required this.onConfirm, required this.onConfirm,
this.onCancel,
this.confirmLabel, this.confirmLabel,
this.confirmButtonColor, this.confirmButtonColor,
}); });
@ -299,6 +300,7 @@ class ConfirmPopup extends StatefulWidget {
final String title; final String title;
final String description; final String description;
final VoidCallback onConfirm; final VoidCallback onConfirm;
final VoidCallback? onCancel;
final Color? confirmButtonColor; final Color? confirmButtonColor;
final ConfirmPopupStyle style; final ConfirmPopupStyle style;
@ -392,7 +394,10 @@ class _ConfirmPopupState extends State<ConfirmPopup> {
); );
case ConfirmPopupStyle.cancelAndOk: case ConfirmPopupStyle.cancelAndOk:
return SpaceCancelOrConfirmButton( return SpaceCancelOrConfirmButton(
onCancel: () => Navigator.of(context).pop(), onCancel: () {
widget.onCancel?.call();
Navigator.of(context).pop();
},
onConfirm: () { onConfirm: () {
widget.onConfirm(); widget.onConfirm();
Navigator.of(context).pop(); Navigator.of(context).pop();

View File

@ -38,7 +38,9 @@ class InitLocalAIIndicator extends StatelessWidget {
], ],
); );
case RunningStatePB.Running: case RunningStatePB.Running:
return Row( return SizedBox(
height: 30,
child: Row(
children: [ children: [
const HSpace(8), const HSpace(8),
const FlowySvg( const FlowySvg(
@ -52,6 +54,7 @@ class InitLocalAIIndicator extends StatelessWidget {
color: const Color(0xFF1E4620), color: const Color(0xFF1E4620),
), ),
], ],
),
); );
case RunningStatePB.Stopped: case RunningStatePB.Stopped:
return Row( return Row(

View File

@ -1,6 +1,7 @@
import 'package:appflowy/generated/flowy_svgs.g.dart'; import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_toggle_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_toggle_bloc.dart';
import 'package:appflowy/workspace/presentation/home/menu/sidebar/space/shared_widget.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading_model.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading_model.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart';
@ -67,14 +68,22 @@ class LocalAIChatSetting extends StatelessWidget {
tapBodyToExpand: false, tapBodyToExpand: false,
tapHeaderToExpand: false, tapHeaderToExpand: false,
), ),
header: const LocalAIChatSettingHeader(), header: const SizedBox.shrink(),
collapsed: const SizedBox.shrink(), collapsed: const SizedBox.shrink(),
expanded: Padding( expanded: Padding(
padding: const EdgeInsets.symmetric(vertical: 6), padding: const EdgeInsets.symmetric(vertical: 6),
// child: _LocalLLMInfoWidget(),
child: Column( child: Column(
crossAxisAlignment: CrossAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.start,
children: [ children: [
Row( BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) {
// If the progress indicator is startOfflineAIApp, then don't show the LLM model.
if (state.progressIndicator ==
const LocalAIProgress.startOfflineAIApp()) {
return const SizedBox.shrink();
} else {
return Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween, mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [ children: [
Flexible( Flexible(
@ -84,36 +93,28 @@ class LocalAIChatSetting extends StatelessWidget {
), ),
), ),
const Spacer(), const Spacer(),
BlocBuilder<LocalAIChatSettingBloc, state.aiModelProgress.when(
LocalAIChatSettingState>( init: () => const SizedBox.shrink(),
builder: (context, state) { loading: () {
return state.fetchModelInfoState.when( return const Expanded(
loading: () => Expanded(
child: Row( child: Row(
children: [ children: [
Flexible( Spacer(),
child: FlowyText( CircularProgressIndicator.adaptive(),
LocaleKeys
.settings_aiPage_keys_fetchLocalModel
.tr(),
),
),
const Spacer(),
const CircularProgressIndicator.adaptive(),
], ],
), ),
),
finish: (err) {
return (err == null)
? const _SelectLocalModelDropdownMenu()
: const SizedBox.shrink();
},
); );
}, },
finish: (err) => (err == null)
? const _SelectLocalModelDropdownMenu()
: const SizedBox.shrink(),
), ),
], ],
);
}
},
), ),
const IntrinsicHeight(child: _LocalLLMInfoWidget()), const IntrinsicHeight(child: _LocalAIStateWidget()),
], ],
), ),
), ),
@ -200,8 +201,8 @@ class _SelectLocalModelDropdownMenu extends StatelessWidget {
} }
} }
class _LocalLLMInfoWidget extends StatelessWidget { class _LocalAIStateWidget extends StatelessWidget {
const _LocalLLMInfoWidget(); const _LocalAIStateWidget();
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
@ -210,15 +211,8 @@ class _LocalLLMInfoWidget extends StatelessWidget {
final error = errorFromState(state); final error = errorFromState(state);
if (error == null) { if (error == null) {
// If the error is null, handle selected llm model. // If the error is null, handle selected llm model.
if (state.localAIInfo != null) { if (state.progressIndicator != null) {
final child = state.localAIInfo!.when( final child = state.progressIndicator!.when(
requestDownloadInfo: (
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) {
_showDownloadDialog(context, llmResource, llmModel);
return const SizedBox.shrink();
},
showDownload: ( showDownload: (
LocalModelResourcePB llmResource, LocalModelResourcePB llmResource,
LLMModelPB llmModel, LLMModelPB llmModel,
@ -241,6 +235,13 @@ class _LocalLLMInfoWidget extends StatelessWidget {
}, },
finishDownload: () => const InitLocalAIIndicator(), finishDownload: () => const InitLocalAIIndicator(),
checkPluginState: () => const PluginStateIndicator(), checkPluginState: () => const PluginStateIndicator(),
startOfflineAIApp: () => OpenOrDownloadOfflineAIApp(
onRetry: () {
context
.read<LocalAIChatSettingBloc>()
.add(const LocalAIChatSettingEvent.refreshAISetting());
},
),
); );
return Padding( return Padding(
@ -266,44 +267,11 @@ class _LocalLLMInfoWidget extends StatelessWidget {
); );
} }
void _showDownloadDialog(
BuildContext context,
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) {
WidgetsBinding.instance.addPostFrameCallback(
(_) {
showDialog(
context: context,
barrierDismissible: false,
useRootNavigator: false,
builder: (dialogContext) {
return _LLMModelDownloadDialog(
llmResource: llmResource,
onOkPressed: () {
context.read<LocalAIChatSettingBloc>().add(
LocalAIChatSettingEvent.startDownloadModel(
llmModel,
),
);
},
onCancelPressed: () {
context.read<LocalAIChatSettingBloc>().add(
const LocalAIChatSettingEvent.cancelDownload(),
);
},
);
},
);
},
debugLabel: 'localModel.download',
);
}
FlowyError? errorFromState(LocalAIChatSettingState state) { FlowyError? errorFromState(LocalAIChatSettingState state) {
final err = state.fetchModelInfoState.when( final err = state.aiModelProgress.when(
loading: () => null, loading: () => null,
finish: (err) => err, finish: (err) => err,
init: () {},
); );
if (err == null) { if (err == null) {
@ -317,39 +285,48 @@ class _LocalLLMInfoWidget extends StatelessWidget {
} }
} }
class _LLMModelDownloadDialog extends StatelessWidget { void _showDownloadDialog(
const _LLMModelDownloadDialog({ BuildContext context,
required this.llmResource, LocalModelResourcePB llmResource,
required this.onOkPressed, LLMModelPB llmModel,
required this.onCancelPressed, ) {
}); if (llmResource.pendingResources.isEmpty) {
final LocalModelResourcePB llmResource; return;
final VoidCallback onOkPressed;
final VoidCallback onCancelPressed;
@override
Widget build(BuildContext context) {
return NavigatorOkCancelDialog(
title: LocaleKeys.settings_aiPage_keys_downloadLLMPrompt.tr(
args: [
llmResource.pendingResources[0].name,
],
),
message: llmResource.pendingResources[0].fileSize == 0
? ""
: LocaleKeys.settings_aiPage_keys_downloadLLMPromptDetail.tr(
args: [
llmResource.pendingResources[0].name,
llmResource.pendingResources[0].fileSize.toString(),
],
),
okTitle: LocaleKeys.button_confirm.tr(),
cancelTitle: LocaleKeys.button_cancel.tr(),
onOkPressed: onOkPressed,
onCancelPressed: onCancelPressed,
titleUpperCase: false,
);
} }
final res = llmResource.pendingResources.first;
String desc = "";
switch (res.resType) {
case PendingResourceTypePB.AIModel:
desc = LocaleKeys.settings_aiPage_keys_downloadLLMPromptDetail.tr(
args: [
llmResource.pendingResources[0].name,
llmResource.pendingResources[0].fileSize,
],
);
break;
case PendingResourceTypePB.OfflineApp:
desc = LocaleKeys.settings_aiPage_keys_downloadAppFlowyOfflineAI.tr();
break;
}
showConfirmDialog(
context: context,
style: ConfirmPopupStyle.cancelAndOk,
title: LocaleKeys.settings_aiPage_keys_downloadLLMPrompt.tr(
args: [res.name],
),
description: desc,
confirmLabel: LocaleKeys.button_confirm.tr(),
onConfirm: () => context.read<LocalAIChatSettingBloc>().add(
LocalAIChatSettingEvent.startDownloadModel(
llmModel,
),
),
onCancel: () => context.read<LocalAIChatSettingBloc>().add(
const LocalAIChatSettingEvent.cancelDownload(),
),
);
} }
class _ShowDownloadIndicator extends StatelessWidget { class _ShowDownloadIndicator extends StatelessWidget {
@ -381,29 +358,7 @@ class _ShowDownloadIndicator extends StatelessWidget {
color: Color(0xFF005483), color: Color(0xFF005483),
), ),
onTap: () { onTap: () {
showDialog( _showDownloadDialog(context, llmResource, llmModel);
context: context,
barrierDismissible: false,
useRootNavigator: false,
builder: (dialogContext) {
return _LLMModelDownloadDialog(
llmResource: llmResource,
onOkPressed: () {
context.read<LocalAIChatSettingBloc>().add(
LocalAIChatSettingEvent.startDownloadModel(
llmModel,
),
);
},
onCancelPressed: () {
context.read<LocalAIChatSettingBloc>().add(
const LocalAIChatSettingEvent
.cancelDownload(),
);
},
);
},
);
}, },
), ),
), ),

View File

@ -1,10 +1,14 @@
import 'package:appflowy/core/helpers/url_launcher.dart';
import 'package:appflowy/generated/flowy_svgs.g.dart'; import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/download_offline_ai_app_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/plugin_state_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/plugin_state_bloc.dart';
import 'package:easy_localization/easy_localization.dart'; import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra/size.dart';
import 'package:flowy_infra_ui/style_widget/button.dart'; import 'package:flowy_infra_ui/style_widget/button.dart';
import 'package:flowy_infra_ui/style_widget/text.dart'; import 'package:flowy_infra_ui/style_widget/text.dart';
import 'package:flowy_infra_ui/widget/spacing.dart'; import 'package:flowy_infra_ui/widget/spacing.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_bloc/flutter_bloc.dart';
@ -21,8 +25,15 @@ class PluginStateIndicator extends StatelessWidget {
return state.action.when( return state.action.when(
init: () => const _InitPlugin(), init: () => const _InitPlugin(),
ready: () => const _LocalAIReadyToUse(), ready: () => const _LocalAIReadyToUse(),
restart: () => const _ReloadButton(), restartPlugin: () => const _ReloadButton(),
loadingPlugin: () => const _InitPlugin(), loadingPlugin: () => const _InitPlugin(),
startAIOfflineApp: () => OpenOrDownloadOfflineAIApp(
onRetry: () {
context
.read<PluginStateBloc>()
.add(const PluginStateEvent.started());
},
),
); );
}, },
), ),
@ -35,9 +46,15 @@ class _InitPlugin extends StatelessWidget {
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
return const SizedBox( return Row(
children: [
FlowyText(LocaleKeys.settings_aiPage_keys_localAIStart.tr()),
const Spacer(),
const SizedBox(
height: 20, height: 20,
child: CircularProgressIndicator.adaptive(), child: CircularProgressIndicator.adaptive(),
),
],
); );
} }
} }
@ -124,3 +141,106 @@ class _LocalAIReadyToUse extends StatelessWidget {
); );
} }
} }
class OpenOrDownloadOfflineAIApp extends StatelessWidget {
const OpenOrDownloadOfflineAIApp({required this.onRetry, super.key});
final VoidCallback onRetry;
@override
Widget build(BuildContext context) {
return BlocProvider(
create: (context) => DownloadOfflineAIBloc(),
child: BlocBuilder<DownloadOfflineAIBloc, DownloadOfflineAIState>(
builder: (context, state) {
return Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
RichText(
maxLines: 3,
textAlign: TextAlign.left,
text: TextSpan(
children: <TextSpan>[
TextSpan(
text:
"${LocaleKeys.settings_aiPage_keys_offlineAIInstruction1.tr()} ",
style: Theme.of(context)
.textTheme
.bodySmall!
.copyWith(height: 1.5),
),
TextSpan(
text:
" ${LocaleKeys.settings_aiPage_keys_offlineAIInstruction2.tr()} ",
style: Theme.of(context).textTheme.bodyMedium!.copyWith(
fontSize: FontSizes.s14,
color: Theme.of(context).colorScheme.primary,
height: 1.5,
),
recognizer: TapGestureRecognizer()
..onTap = () => afLaunchUrlString(
"https://docs.appflowy.io/docs/appflowy/product/appflowy-ai-offline",
),
),
TextSpan(
text:
" ${LocaleKeys.settings_aiPage_keys_offlineAIInstruction3.tr()} ",
style: Theme.of(context)
.textTheme
.bodySmall!
.copyWith(height: 1.5),
),
TextSpan(
text:
"${LocaleKeys.settings_aiPage_keys_offlineAIDownload1.tr()} ",
style: Theme.of(context)
.textTheme
.bodySmall!
.copyWith(height: 1.5),
),
TextSpan(
text:
" ${LocaleKeys.settings_aiPage_keys_offlineAIDownload2.tr()} ",
style: Theme.of(context).textTheme.bodyMedium!.copyWith(
fontSize: FontSizes.s14,
color: Theme.of(context).colorScheme.primary,
height: 1.5,
),
recognizer: TapGestureRecognizer()
..onTap =
() => context.read<DownloadOfflineAIBloc>().add(
const DownloadOfflineAIEvent.started(),
),
),
TextSpan(
text:
" ${LocaleKeys.settings_aiPage_keys_offlineAIDownload3.tr()} ",
style: Theme.of(context)
.textTheme
.bodySmall!
.copyWith(height: 1.5),
),
],
),
),
const SizedBox(
height: 6,
), // Replaced VSpace with SizedBox for simplicity
SizedBox(
height: 30,
child: FlowyButton(
useIntrinsicWidth: true,
margin: const EdgeInsets.symmetric(horizontal: 12),
text: FlowyText(
LocaleKeys.settings_aiPage_keys_activeOfflineAI.tr(),
),
onTap: onRetry,
),
),
],
);
},
),
);
}
}

View File

@ -7,7 +7,6 @@ import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/m
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart'; import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
import 'package:flowy_infra/theme_extension.dart'; import 'package:flowy_infra/theme_extension.dart';
import 'package:flowy_infra_ui/widget/spacing.dart'; import 'package:flowy_infra_ui/widget/spacing.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/generated/locale_keys.g.dart';
@ -129,7 +128,7 @@ class _LocalAIOnBoarding extends StatelessWidget {
child: BlocBuilder<LocalAIOnBoardingBloc, LocalAIOnBoardingState>( child: BlocBuilder<LocalAIOnBoardingBloc, LocalAIOnBoardingState>(
builder: (context, state) { builder: (context, state) {
// Show the local AI settings if the user has purchased the AI Local plan // Show the local AI settings if the user has purchased the AI Local plan
if (kDebugMode || state.isPurchaseAILocal) { if (state.isPurchaseAILocal) {
return const LocalAISetting(); return const LocalAISetting();
} else { } else {
// Show the upgrade to AI Local plan button if the user has not purchased the AI Local plan // Show the upgrade to AI Local plan button if the user has not purchased the AI Local plan

View File

@ -1,5 +1,3 @@
import 'dart:io';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:appflowy/util/int64_extension.dart'; import 'package:appflowy/util/int64_extension.dart';
@ -214,23 +212,23 @@ class _SettingsBillingViewState extends State<SettingsBillingView> {
// Currently, the AI Local tile is only available on macOS // Currently, the AI Local tile is only available on macOS
// TODO(nathan): enable windows and linux // TODO(nathan): enable windows and linux
if (Platform.isMacOS) // if (Platform.isMacOS)
_AITile( // _AITile(
plan: SubscriptionPlanPB.AiLocal, // plan: SubscriptionPlanPB.AiLocal,
label: LocaleKeys // label: LocaleKeys
.settings_billingPage_addons_aiOnDevice_label // .settings_billingPage_addons_aiOnDevice_label
.tr(), // .tr(),
description: LocaleKeys // description: LocaleKeys
.settings_billingPage_addons_aiOnDevice_description, // .settings_billingPage_addons_aiOnDevice_description,
activeDescription: LocaleKeys // activeDescription: LocaleKeys
.settings_billingPage_addons_aiOnDevice_activeDescription, // .settings_billingPage_addons_aiOnDevice_activeDescription,
canceledDescription: LocaleKeys // canceledDescription: LocaleKeys
.settings_billingPage_addons_aiOnDevice_canceledDescription, // .settings_billingPage_addons_aiOnDevice_canceledDescription,
subscriptionInfo: // subscriptionInfo:
state.subscriptionInfo.addOns.firstWhereOrNull( // state.subscriptionInfo.addOns.firstWhereOrNull(
(a) => a.type == WorkspaceAddOnPBType.AddOnAiLocal, // (a) => a.type == WorkspaceAddOnPBType.AddOnAiLocal,
), // ),
), // ),
], ],
), ),
], ],

View File

@ -1,5 +1,3 @@
import 'dart:io';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:appflowy/generated/flowy_svgs.g.dart'; import 'package:appflowy/generated/flowy_svgs.g.dart';
@ -141,43 +139,43 @@ class _SettingsPlanViewState extends State<SettingsPlanView> {
// Currently, the AI Local tile is only available on macOS // Currently, the AI Local tile is only available on macOS
// TODO(nathan): enable windows and linux // TODO(nathan): enable windows and linux
if (Platform.isMacOS) // if (Platform.isMacOS)
Flexible( // Flexible(
child: _AddOnBox( // child: _AddOnBox(
title: LocaleKeys // title: LocaleKeys
.settings_planPage_planUsage_addons_aiOnDevice_title // .settings_planPage_planUsage_addons_aiOnDevice_title
.tr(), // .tr(),
description: LocaleKeys // description: LocaleKeys
.settings_planPage_planUsage_addons_aiOnDevice_description // .settings_planPage_planUsage_addons_aiOnDevice_description
.tr(), // .tr(),
price: LocaleKeys // price: LocaleKeys
.settings_planPage_planUsage_addons_aiOnDevice_price // .settings_planPage_planUsage_addons_aiOnDevice_price
.tr( // .tr(
args: [ // args: [
SubscriptionPlanPB.AiLocal.priceAnnualBilling, // SubscriptionPlanPB.AiLocal.priceAnnualBilling,
], // ],
), // ),
priceInfo: LocaleKeys // priceInfo: LocaleKeys
.settings_planPage_planUsage_addons_aiOnDevice_priceInfo // .settings_planPage_planUsage_addons_aiOnDevice_priceInfo
.tr(), // .tr(),
billingInfo: LocaleKeys // billingInfo: LocaleKeys
.settings_planPage_planUsage_addons_aiOnDevice_billingInfo // .settings_planPage_planUsage_addons_aiOnDevice_billingInfo
.tr( // .tr(
args: [ // args: [
SubscriptionPlanPB.AiLocal.priceMonthBilling, // SubscriptionPlanPB.AiLocal.priceMonthBilling,
], // ],
), // ),
buttonText: state.subscriptionInfo.hasAIOnDevice // buttonText: state.subscriptionInfo.hasAIOnDevice
? LocaleKeys // ? LocaleKeys
.settings_planPage_planUsage_addons_activeLabel // .settings_planPage_planUsage_addons_activeLabel
.tr() // .tr()
: LocaleKeys // : LocaleKeys
.settings_planPage_planUsage_addons_addLabel // .settings_planPage_planUsage_addons_addLabel
.tr(), // .tr(),
isActive: state.subscriptionInfo.hasAIOnDevice, // isActive: state.subscriptionInfo.hasAIOnDevice,
plan: SubscriptionPlanPB.AiLocal, // plan: SubscriptionPlanPB.AiLocal,
), // ),
), // ),
], ],
), ),
], ],

View File

@ -418,6 +418,7 @@ Future<void> showConfirmDialog({
required String title, required String title,
required String description, required String description,
VoidCallback? onConfirm, VoidCallback? onConfirm,
VoidCallback? onCancel,
String? confirmLabel, String? confirmLabel,
ConfirmPopupStyle style = ConfirmPopupStyle.onlyOk, ConfirmPopupStyle style = ConfirmPopupStyle.onlyOk,
}) { }) {
@ -434,6 +435,7 @@ Future<void> showConfirmDialog({
title: title, title: title,
description: description, description: description,
onConfirm: () => onConfirm?.call(), onConfirm: () => onConfirm?.call(),
onCancel: () => onCancel?.call(),
confirmLabel: confirmLabel, confirmLabel: confirmLabel,
style: style, style: style,
), ),

View File

@ -172,7 +172,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -192,7 +192,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -206,7 +206,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -225,7 +225,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -826,7 +826,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
@ -876,7 +876,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -888,7 +888,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -1132,7 +1132,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1157,7 +1157,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1421,7 +1421,7 @@ dependencies = [
"cssparser-macros", "cssparser-macros",
"dtoa-short", "dtoa-short",
"itoa 1.0.6", "itoa 1.0.6",
"phf 0.8.0", "phf 0.11.2",
"smallvec", "smallvec",
] ]
@ -1532,7 +1532,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3038,7 +3038,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -3055,7 +3055,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3487,7 +3487,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -6031,7 +6031,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -53,7 +53,7 @@ collab-user = { version = "0.2" }
# Run the script: # Run the script:
# scripts/tool/update_client_api_rev.sh new_rev_id # scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" } client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
[dependencies] [dependencies]
serde_json.workspace = true serde_json.workspace = true
@ -128,5 +128,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -197,7 +197,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -800,7 +800,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
@ -850,7 +850,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -862,7 +862,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -1115,7 +1115,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1140,7 +1140,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1411,7 +1411,7 @@ dependencies = [
"cssparser-macros", "cssparser-macros",
"dtoa-short", "dtoa-short",
"itoa 1.0.10", "itoa 1.0.10",
"phf 0.8.0", "phf 0.11.2",
"smallvec", "smallvec",
] ]
@ -1522,7 +1522,7 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3105,7 +3105,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -3122,7 +3122,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3559,7 +3559,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -6095,7 +6095,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -52,7 +52,7 @@ collab-user = { version = "0.2" }
# Run the script: # Run the script:
# scripts/tool/update_client_api_rev.sh new_rev_id # scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" } client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
[dependencies] [dependencies]
serde_json.workspace = true serde_json.workspace = true
@ -128,6 +128,6 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }

View File

@ -655,6 +655,7 @@
"llmModel": "Language Model", "llmModel": "Language Model",
"llmModelType": "Language Model Type", "llmModelType": "Language Model Type",
"downloadLLMPrompt": "Download {}", "downloadLLMPrompt": "Download {}",
"downloadAppFlowyOfflineAI": "Downloading AI offline package will enable AI to run on your device. Do you want to continue?",
"downloadLLMPromptDetail": "Downloading {} local model will take up to {} of storage. Do you want to continue?", "downloadLLMPromptDetail": "Downloading {} local model will take up to {} of storage. Do you want to continue?",
"downloadAIModelButton": "Download AI model", "downloadAIModelButton": "Download AI model",
"downloadingModel": "Downloading", "downloadingModel": "Downloading",
@ -667,7 +668,14 @@
"disableLocalAITitle": "Disable local AI", "disableLocalAITitle": "Disable local AI",
"disableLocalAIDescription": "Do you want to disable local AI?", "disableLocalAIDescription": "Do you want to disable local AI?",
"localAIToggleTitle": "Toggle to enable or disable local AI", "localAIToggleTitle": "Toggle to enable or disable local AI",
"fetchLocalModel": "Fetch local model configuration", "offlineAIInstruction1": "Follow the",
"offlineAIInstruction2": "instruction",
"offlineAIInstruction3": "to enable offline AI.",
"offlineAIDownload1": "If you have not downloaded the AppFlowy AI, please",
"offlineAIDownload2": "download",
"offlineAIDownload3": "it first",
"activeOfflineAI": "Active",
"downloadOfflineAI": "Download",
"openModelDirectory": "Open folder" "openModelDirectory": "Open folder"
} }
}, },

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -197,7 +197,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec#8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -718,7 +718,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
@ -768,7 +768,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -780,7 +780,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -993,7 +993,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1018,7 +1018,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1356,7 +1356,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -1739,6 +1739,18 @@ dependencies = [
"getrandom 0.2.10", "getrandom 0.2.10",
] ]
[[package]]
name = "filetime"
version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
dependencies = [
"cfg-if",
"libc",
"redox_syscall 0.4.1",
"windows-sys 0.52.0",
]
[[package]] [[package]]
name = "finl_unicode" name = "finl_unicode"
version = "1.2.0" version = "1.2.0"
@ -1794,6 +1806,7 @@ dependencies = [
"lib-infra", "lib-infra",
"log", "log",
"md5", "md5",
"notify",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"protobuf", "protobuf",
"reqwest", "reqwest",
@ -2457,6 +2470,15 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "fsevent-sys"
version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "fuchsia-cprng" name = "fuchsia-cprng"
version = "0.1.1" version = "0.1.1"
@ -2705,7 +2727,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -2722,7 +2744,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3087,7 +3109,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -3098,6 +3120,26 @@ dependencies = [
"tracing", "tracing",
] ]
[[package]]
name = "inotify"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
dependencies = [
"bitflags 1.3.2",
"inotify-sys",
"libc",
]
[[package]]
name = "inotify-sys"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "inout" name = "inout"
version = "0.1.3" version = "0.1.3"
@ -3181,6 +3223,26 @@ dependencies = [
"simple_asn1", "simple_asn1",
] ]
[[package]]
name = "kqueue"
version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c"
dependencies = [
"kqueue-sys",
"libc",
]
[[package]]
name = "kqueue-sys"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
dependencies = [
"bitflags 1.3.2",
"libc",
]
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
version = "1.4.0" version = "1.4.0"
@ -3583,6 +3645,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0"
dependencies = [ dependencies = [
"libc", "libc",
"log",
"wasi 0.11.0+wasi-snapshot-preview1", "wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys 0.48.0", "windows-sys 0.48.0",
] ]
@ -3642,6 +3705,25 @@ dependencies = [
"minimal-lexical", "minimal-lexical",
] ]
[[package]]
name = "notify"
version = "6.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
dependencies = [
"bitflags 2.4.0",
"crossbeam-channel",
"filetime",
"fsevent-sys",
"inotify",
"kqueue",
"libc",
"log",
"mio",
"walkdir",
"windows-sys 0.48.0",
]
[[package]] [[package]]
name = "ntapi" name = "ntapi"
version = "0.4.1" version = "0.4.1"
@ -4678,6 +4760,15 @@ dependencies = [
"bitflags 1.3.2", "bitflags 1.3.2",
] ]
[[package]]
name = "redox_syscall"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.9.5" version = "1.9.5"
@ -5233,7 +5324,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=c2a839ba8bf9ead44679eb08f3a9680467b767ca#c2a839ba8bf9ead44679eb08f3a9680467b767ca" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=0062c950677f7f633f5b7edabc827a35d3bc92c3#0062c950677f7f633f5b7edabc827a35d3bc92c3"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -99,8 +99,8 @@ zip = "2.1.3"
# Run the script.add_workspace_members: # Run the script.add_workspace_members:
# scripts/tool/update_client_api_rev.sh new_rev_id # scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" } client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "c2a839ba8bf9ead44679eb08f3a9680467b767ca" } client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "0062c950677f7f633f5b7edabc827a35d3bc92c3" }
[profile.dev] [profile.dev]
opt-level = 0 opt-level = 0
@ -147,5 +147,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8ef7d3e4c38fbf92ff9b3630fe79017e95a496ec" }

View File

@ -1,6 +1,6 @@
use bytes::Bytes; use bytes::Bytes;
pub use client_api::entity::ai_dto::{ pub use client_api::entity::ai_dto::{
AppFlowyAIPlugin, CompletionType, LLMModel, LocalAIConfig, ModelInfo, RelatedQuestion, AppFlowyOfflineAI, CompletionType, LLMModel, LocalAIConfig, ModelInfo, RelatedQuestion,
RepeatedRelatedQuestion, StringOrMessage, RepeatedRelatedQuestion, StringOrMessage,
}; };
pub use client_api::entity::{ pub use client_api::entity::{

View File

@ -44,6 +44,9 @@ md5 = "0.7.0"
zip = { workspace = true, features = ["deflate"] } zip = { workspace = true, features = ["deflate"] }
zip-extensions = "0.8.0" zip-extensions = "0.8.0"
[target.'cfg(any(target_os = "macos", target_os = "linux", target_os = "windows"))'.dependencies]
notify = "6.1.1"
[dev-dependencies] [dev-dependencies]
dotenv = "0.15.0" dotenv = "0.15.0"
uuid.workspace = true uuid.workspace = true

View File

@ -21,7 +21,7 @@ pub trait ChatUserService: Send + Sync + 'static {
fn device_id(&self) -> Result<String, FlowyError>; fn device_id(&self) -> Result<String, FlowyError>;
fn workspace_id(&self) -> Result<String, FlowyError>; fn workspace_id(&self) -> Result<String, FlowyError>;
fn sqlite_connection(&self, uid: i64) -> Result<DBConnection, FlowyError>; fn sqlite_connection(&self, uid: i64) -> Result<DBConnection, FlowyError>;
fn user_data_dir(&self) -> Result<PathBuf, FlowyError>; fn data_root_dir(&self) -> Result<PathBuf, FlowyError>;
} }
pub struct ChatManager { pub struct ChatManager {

View File

@ -1,6 +1,7 @@
use crate::local_ai::local_llm_chat::LLMModelInfo; use crate::local_ai::local_llm_chat::LLMModelInfo;
use appflowy_plugin::core::plugin::RunningState; use appflowy_plugin::core::plugin::RunningState;
use crate::local_ai::local_llm_resource::PendingResource;
use flowy_chat_pub::cloud::{ use flowy_chat_pub::cloud::{
ChatMessage, LLMModel, RelatedQuestion, RepeatedChatMessage, RepeatedRelatedQuestion, ChatMessage, LLMModel, RelatedQuestion, RepeatedChatMessage, RepeatedRelatedQuestion,
}; };
@ -360,16 +361,38 @@ pub struct PendingResourcePB {
pub name: String, pub name: String,
#[pb(index = 2)] #[pb(index = 2)]
pub file_size: i64, pub file_size: String,
#[pb(index = 3)] #[pb(index = 3)]
pub requirements: String, pub requirements: String,
#[pb(index = 4)]
pub res_type: PendingResourceTypePB,
}
#[derive(Debug, Default, Clone, ProtoBuf_Enum, PartialEq, Eq, Copy)]
pub enum PendingResourceTypePB {
#[default]
OfflineApp = 0,
AIModel = 1,
}
impl From<PendingResource> for PendingResourceTypePB {
fn from(value: PendingResource) -> Self {
match value {
PendingResource::OfflineApp { .. } => PendingResourceTypePB::OfflineApp,
PendingResource::ModelInfoRes { .. } => PendingResourceTypePB::AIModel,
}
}
} }
#[derive(Default, ProtoBuf, Clone, Debug)] #[derive(Default, ProtoBuf, Clone, Debug)]
pub struct LocalAIPluginStatePB { pub struct LocalAIPluginStatePB {
#[pb(index = 1)] #[pb(index = 1)]
pub state: RunningStatePB, pub state: RunningStatePB,
#[pb(index = 2)]
pub offline_ai_ready: bool,
} }
#[derive(Debug, Default, Clone, ProtoBuf_Enum, PartialEq, Eq, Copy)] #[derive(Debug, Default, Clone, ProtoBuf_Enum, PartialEq, Eq, Copy)]
@ -416,3 +439,9 @@ pub struct LocalModelStoragePB {
#[pb(index = 1)] #[pb(index = 1)]
pub file_path: String, pub file_path: String,
} }
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct OfflineAIPB {
#[pb(index = 1)]
pub link: String,
}

View File

@ -374,3 +374,22 @@ pub(crate) async fn get_model_storage_directory_handler(
.get_model_storage_directory()?; .get_model_storage_directory()?;
data_result_ok(LocalModelStoragePB { file_path }) data_result_ok(LocalModelStoragePB { file_path })
} }
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_offline_app_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<OfflineAIPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let (tx, rx) = oneshot::channel::<Result<String, FlowyError>>();
tokio::spawn(async move {
let link = chat_manager
.local_ai_controller
.get_offline_ai_app_download_link()
.await?;
let _ = tx.send(Ok(link));
Ok::<_, FlowyError>(())
});
let link = rx.await??;
data_result_ok(OfflineAIPB { link })
}

View File

@ -57,6 +57,7 @@ pub fn init(chat_manager: Weak<ChatManager>) -> AFPlugin {
ChatEvent::GetModelStorageDirectory, ChatEvent::GetModelStorageDirectory,
get_model_storage_directory_handler, get_model_storage_directory_handler,
) )
.event(ChatEvent::GetOfflineAIAppLink, get_offline_app_handler)
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
@ -133,4 +134,7 @@ pub enum ChatEvent {
#[event(output = "LocalModelStoragePB")] #[event(output = "LocalModelStoragePB")]
GetModelStorageDirectory = 21, GetModelStorageDirectory = 21,
#[event(output = "OfflineAIPB")]
GetOfflineAIAppLink = 22,
} }

View File

@ -6,23 +6,22 @@ use anyhow::Error;
use appflowy_local_ai::chat_plugin::{AIPluginConfig, LocalChatLLMChat}; use appflowy_local_ai::chat_plugin::{AIPluginConfig, LocalChatLLMChat};
use appflowy_plugin::manager::PluginManager; use appflowy_plugin::manager::PluginManager;
use appflowy_plugin::util::is_apple_silicon; use appflowy_plugin::util::is_apple_silicon;
use flowy_chat_pub::cloud::{AppFlowyAIPlugin, ChatCloudService, LLMModel, LocalAIConfig}; use flowy_chat_pub::cloud::{AppFlowyOfflineAI, ChatCloudService, LLMModel, LocalAIConfig};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use flowy_sqlite::kv::KVStorePreferences; use flowy_sqlite::kv::KVStorePreferences;
use futures::Sink; use futures::Sink;
use lib_infra::async_trait::async_trait; use lib_infra::async_trait::async_trait;
use parking_lot::Mutex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::ops::Deref; use std::ops::Deref;
use parking_lot::Mutex;
use std::sync::Arc; use std::sync::Arc;
use tokio_stream::StreamExt; use tokio_stream::StreamExt;
use tracing::{debug, error, info, trace}; use tracing::{debug, error, info, trace};
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct LLMSetting { pub struct LLMSetting {
pub plugin: AppFlowyAIPlugin, pub app: AppFlowyOfflineAI,
pub llm_model: LLMModel, pub llm_model: LLMModel,
} }
@ -59,22 +58,6 @@ impl LocalAIController {
cloud_service: Arc<dyn ChatCloudService>, cloud_service: Arc<dyn ChatCloudService>,
) -> Self { ) -> Self {
let llm_chat = Arc::new(LocalChatLLMChat::new(plugin_manager)); let llm_chat = Arc::new(LocalChatLLMChat::new(plugin_manager));
let mut rx = llm_chat.subscribe_running_state();
let _weak_store_preferences = Arc::downgrade(&store_preferences);
tokio::spawn(async move {
while let Some(state) = rx.next().await {
info!("[AI Plugin] state: {:?}", state);
let new_state = RunningStatePB::from(state);
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(LocalAIPluginStatePB { state: new_state })
.send();
}
});
let res_impl = LLMResourceServiceImpl { let res_impl = LLMResourceServiceImpl {
user_service: user_service.clone(), user_service: user_service.clone(),
cloud_service, cloud_service,
@ -85,6 +68,24 @@ impl LocalAIController {
let llm_res = Arc::new(LLMResourceController::new(user_service, res_impl, tx)); let llm_res = Arc::new(LLMResourceController::new(user_service, res_impl, tx));
let current_chat_id = Mutex::new(None); let current_chat_id = Mutex::new(None);
let mut running_state_rx = llm_chat.subscribe_running_state();
let offline_ai_ready = llm_res.is_offline_ai_ready();
tokio::spawn(async move {
while let Some(state) = running_state_rx.next().await {
info!("[AI Plugin] state: {:?}", state);
let new_state = RunningStatePB::from(state);
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(LocalAIPluginStatePB {
state: new_state,
offline_ai_ready,
})
.send();
}
});
let this = Self { let this = Self {
llm_chat, llm_chat,
llm_res, llm_res,
@ -195,11 +196,6 @@ impl LocalAIController {
return Err(FlowyError::local_ai_unavailable()); return Err(FlowyError::local_ai_unavailable());
} }
let llm_chat = self.llm_chat.clone();
match llm_chat.destroy_chat_plugin().await {
Ok(_) => info!("[AI Plugin] destroy plugin successfully"),
Err(err) => error!("[AI Plugin] failed to destroy plugin: {:?}", err),
}
let state = self.llm_res.use_local_llm(llm_id)?; let state = self.llm_res.use_local_llm(llm_id)?;
// Re-initialize the plugin if the setting is updated and ready to use // Re-initialize the plugin if the setting is updated and ready to use
if self.llm_res.is_resource_ready() { if self.llm_res.is_resource_ready() {
@ -230,9 +226,11 @@ impl LocalAIController {
} }
pub fn get_chat_plugin_state(&self) -> LocalAIPluginStatePB { pub fn get_chat_plugin_state(&self) -> LocalAIPluginStatePB {
let offline_ai_ready = self.llm_res.is_offline_ai_ready();
let state = self.llm_chat.get_plugin_running_state(); let state = self.llm_chat.get_plugin_running_state();
LocalAIPluginStatePB { LocalAIPluginStatePB {
state: RunningStatePB::from(state), state: RunningStatePB::from(state),
offline_ai_ready,
} }
} }
@ -252,6 +250,10 @@ impl LocalAIController {
.map(|path| path.to_string_lossy().to_string()) .map(|path| path.to_string_lossy().to_string())
} }
pub async fn get_offline_ai_app_download_link(&self) -> FlowyResult<String> {
self.llm_res.get_offline_ai_app_download_link().await
}
pub async fn toggle_local_ai(&self) -> FlowyResult<bool> { pub async fn toggle_local_ai(&self) -> FlowyResult<bool> {
let enabled = !self let enabled = !self
.store_preferences .store_preferences
@ -317,6 +319,7 @@ fn initialize_chat_plugin(
ret: Option<tokio::sync::oneshot::Sender<()>>, ret: Option<tokio::sync::oneshot::Sender<()>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let llm_chat = llm_chat.clone(); let llm_chat = llm_chat.clone();
tokio::spawn(async move { tokio::spawn(async move {
trace!("[AI Plugin] config: {:?}", chat_config); trace!("[AI Plugin] config: {:?}", chat_config);
if is_apple_silicon().await.unwrap_or(false) { if is_apple_silicon().await.unwrap_or(false) {

View File

@ -1,24 +1,25 @@
use crate::chat_manager::ChatUserService; use crate::chat_manager::ChatUserService;
use crate::entities::{LocalModelResourcePB, PendingResourcePB}; use crate::entities::{LocalModelResourcePB, PendingResourcePB, PendingResourceTypePB};
use crate::local_ai::local_llm_chat::{LLMModelInfo, LLMSetting}; use crate::local_ai::local_llm_chat::{LLMModelInfo, LLMSetting};
use crate::local_ai::model_request::download_model; use crate::local_ai::model_request::download_model;
use appflowy_local_ai::chat_plugin::AIPluginConfig; use appflowy_local_ai::chat_plugin::AIPluginConfig;
use flowy_chat_pub::cloud::{LLMModel, LocalAIConfig, ModelInfo}; use flowy_chat_pub::cloud::{LLMModel, LocalAIConfig, ModelInfo};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use futures::Sink; use futures::Sink;
use futures_util::SinkExt; use futures_util::SinkExt;
use lib_infra::async_trait::async_trait; use lib_infra::async_trait::async_trait;
use parking_lot::RwLock; use parking_lot::RwLock;
use appflowy_local_ai::plugin_request::download_plugin; use lib_infra::util::{get_operating_system, OperatingSystem};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
use crate::local_ai::watch::{watch_path, WatchContext};
use tokio::fs::{self}; use tokio::fs::{self};
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
use tracing::{debug, error, info, instrument, trace, warn}; use tracing::{debug, error, info, instrument, trace, warn};
use zip_extensions::zip_extract;
#[async_trait] #[async_trait]
pub trait LLMResourceService: Send + Sync + 'static { pub trait LLMResourceService: Send + Sync + 'static {
@ -29,12 +30,17 @@ pub trait LLMResourceService: Send + Sync + 'static {
fn is_rag_enabled(&self) -> bool; fn is_rag_enabled(&self) -> bool;
} }
const PLUGIN_DIR: &str = "plugin";
const LLM_MODEL_DIR: &str = "models"; const LLM_MODEL_DIR: &str = "models";
const DOWNLOAD_FINISH: &str = "finish"; const DOWNLOAD_FINISH: &str = "finish";
#[derive(Debug, Clone)]
pub enum WatchDiskEvent {
Create,
Remove,
}
pub enum PendingResource { pub enum PendingResource {
PluginRes, OfflineApp,
ModelInfoRes(Vec<ModelInfo>), ModelInfoRes(Vec<ModelInfo>),
} }
#[derive(Clone)] #[derive(Clone)]
@ -62,6 +68,9 @@ pub struct LLMResourceController {
ai_config: RwLock<Option<LocalAIConfig>>, ai_config: RwLock<Option<LocalAIConfig>>,
download_task: Arc<RwLock<Option<DownloadTask>>>, download_task: Arc<RwLock<Option<DownloadTask>>>,
resource_notify: tokio::sync::mpsc::Sender<()>, resource_notify: tokio::sync::mpsc::Sender<()>,
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
offline_app_disk_watch: RwLock<Option<WatchContext>>,
offline_app_state_sender: tokio::sync::broadcast::Sender<WatchDiskEvent>,
} }
impl LLMResourceController { impl LLMResourceController {
@ -70,6 +79,7 @@ impl LLMResourceController {
resource_service: impl LLMResourceService, resource_service: impl LLMResourceService,
resource_notify: tokio::sync::mpsc::Sender<()>, resource_notify: tokio::sync::mpsc::Sender<()>,
) -> Self { ) -> Self {
let (offline_app_ready_sender, _) = tokio::sync::broadcast::channel(1);
let llm_setting = RwLock::new(resource_service.retrieve_setting()); let llm_setting = RwLock::new(resource_service.retrieve_setting());
Self { Self {
user_service, user_service,
@ -78,6 +88,43 @@ impl LLMResourceController {
ai_config: Default::default(), ai_config: Default::default(),
download_task: Default::default(), download_task: Default::default(),
resource_notify, resource_notify,
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
offline_app_disk_watch: Default::default(),
offline_app_state_sender: offline_app_ready_sender,
}
}
#[allow(dead_code)]
pub fn subscribe_offline_app_state(&self) -> tokio::sync::broadcast::Receiver<WatchDiskEvent> {
self.offline_app_state_sender.subscribe()
}
fn set_llm_setting(&self, llm_setting: LLMSetting) {
let offline_app_path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
*self.llm_setting.write() = Some(llm_setting);
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
{
let is_diff = self
.offline_app_disk_watch
.read()
.as_ref()
.map(|watch_context| watch_context.path == offline_app_path)
.unwrap_or(true);
// If the offline app path is different from the current watch path, update the watch path.
if is_diff {
if let Ok((watcher, mut rx)) = watch_path(offline_app_path) {
let offline_app_ready_sender = self.offline_app_state_sender.clone();
tokio::spawn(async move {
while let Some(event) = rx.recv().await {
info!("Offline app file changed: {:?}", event);
let _ = offline_app_ready_sender.send(event);
}
});
self.offline_app_disk_watch.write().replace(watcher);
}
}
} }
} }
@ -89,6 +136,24 @@ impl LLMResourceController {
} }
} }
pub fn is_offline_ai_ready(&self) -> bool {
match self.llm_setting.read().as_ref() {
None => {
trace!("[LLM Resource] No local ai setting found");
false
},
Some(setting) => {
let path = self.offline_app_path(&setting.app.ai_plugin_name);
path.exists()
},
}
}
pub async fn get_offline_ai_app_download_link(&self) -> FlowyResult<String> {
let ai_config = self.fetch_ai_config().await?;
Ok(ai_config.plugin.url)
}
/// Retrieves model information and updates the current model settings. /// Retrieves model information and updates the current model settings.
#[instrument(level = "debug", skip_all, err)] #[instrument(level = "debug", skip_all, err)]
pub async fn refresh_llm_resource(&self) -> FlowyResult<LLMModelInfo> { pub async fn refresh_llm_resource(&self) -> FlowyResult<LLMModelInfo> {
@ -101,10 +166,10 @@ impl LLMResourceController {
let selected_model = self.select_model(&ai_config)?; let selected_model = self.select_model(&ai_config)?;
let llm_setting = LLMSetting { let llm_setting = LLMSetting {
plugin: ai_config.plugin.clone(), app: ai_config.plugin.clone(),
llm_model: selected_model.clone(), llm_model: selected_model.clone(),
}; };
self.llm_setting.write().replace(llm_setting.clone()); self.set_llm_setting(llm_setting.clone());
self.resource_service.store_setting(llm_setting)?; self.resource_service.store_setting(llm_setting)?;
Ok(LLMModelInfo { Ok(LLMModelInfo {
@ -130,12 +195,12 @@ impl LLMResourceController {
.ok_or_else(|| FlowyError::local_ai().with_context("No local ai config found"))?; .ok_or_else(|| FlowyError::local_ai().with_context("No local ai config found"))?;
let llm_setting = LLMSetting { let llm_setting = LLMSetting {
plugin: package, app: package,
llm_model: llm_config.clone(), llm_model: llm_config.clone(),
}; };
trace!("[LLM Resource] Selected AI setting: {:?}", llm_setting); trace!("[LLM Resource] Selected AI setting: {:?}", llm_setting);
*self.llm_setting.write() = Some(llm_setting.clone()); self.set_llm_setting(llm_setting.clone());
self.resource_service.store_setting(llm_setting)?; self.resource_service.store_setting(llm_setting)?;
self.get_local_llm_state() self.get_local_llm_state()
} }
@ -157,17 +222,19 @@ impl LLMResourceController {
let pending_resources: Vec<_> = pending_resources let pending_resources: Vec<_> = pending_resources
.into_iter() .into_iter()
.flat_map(|res| match res { .flat_map(|res| match res {
PendingResource::PluginRes => vec![PendingResourcePB { PendingResource::OfflineApp => vec![PendingResourcePB {
name: "AppFlowy Plugin".to_string(), name: "AppFlowy Plugin".to_string(),
file_size: 0, file_size: "0 GB".to_string(),
requirements: "".to_string(), requirements: "".to_string(),
res_type: PendingResourceTypePB::OfflineApp,
}], }],
PendingResource::ModelInfoRes(model_infos) => model_infos PendingResource::ModelInfoRes(model_infos) => model_infos
.into_iter() .into_iter()
.map(|model_info| PendingResourcePB { .map(|model_info| PendingResourcePB {
name: model_info.name, name: model_info.name,
file_size: model_info.file_size, file_size: bytes_to_readable_format(model_info.file_size as u64),
requirements: model_info.requirements, requirements: model_info.requirements,
res_type: PendingResourceTypePB::AIModel,
}) })
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
}) })
@ -189,11 +256,10 @@ impl LLMResourceController {
None => Err(FlowyError::local_ai().with_context("Can't find any llm config")), None => Err(FlowyError::local_ai().with_context("Can't find any llm config")),
Some(llm_setting) => { Some(llm_setting) => {
let mut resources = vec![]; let mut resources = vec![];
let plugin_path = self.plugin_path(&llm_setting.plugin.etag)?; let plugin_path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
if !plugin_path.exists() { if !plugin_path.exists() {
trace!("[LLM Resource] Plugin file not found: {:?}", plugin_path); trace!("[LLM Resource] offline plugin not found: {:?}", plugin_path);
resources.push(PendingResource::PluginRes); resources.push(PendingResource::OfflineApp);
} }
let chat_model = self.model_path(&llm_setting.llm_model.chat_model.file_name)?; let chat_model = self.model_path(&llm_setting.llm_model.chat_model.file_name)?;
@ -271,12 +337,12 @@ impl LLMResourceController {
*self.download_task.write() = Some(download_task.clone()); *self.download_task.write() = Some(download_task.clone());
progress_notify(download_task.tx.subscribe()); progress_notify(download_task.tx.subscribe());
let plugin_dir = self.user_plugin_folder()?; // let plugin_dir = self.user_plugin_folder()?;
if !plugin_dir.exists() { // if !plugin_dir.exists() {
fs::create_dir_all(&plugin_dir).await.map_err(|err| { // fs::create_dir_all(&plugin_dir).await.map_err(|err| {
FlowyError::local_ai().with_context(format!("Failed to create plugin dir: {:?}", err)) // FlowyError::local_ai().with_context(format!("Failed to create plugin dir: {:?}", err))
})?; // })?;
} // }
let model_dir = self.user_model_folder()?; let model_dir = self.user_model_folder()?;
if !model_dir.exists() { if !model_dir.exists() {
@ -286,42 +352,42 @@ impl LLMResourceController {
} }
tokio::spawn(async move { tokio::spawn(async move {
let plugin_file_etag_dir = plugin_dir.join(&llm_setting.plugin.etag); // let plugin_file_etag_dir = plugin_dir.join(&llm_setting.app.etag);
// We use the ETag as the identifier for the plugin file. If a file with the given ETag // We use the ETag as the identifier for the plugin file. If a file with the given ETag
// already exists, skip downloading it. // already exists, skip downloading it.
if !plugin_file_etag_dir.exists() { // if !plugin_file_etag_dir.exists() {
let plugin_progress_tx = download_task.tx.clone(); // let plugin_progress_tx = download_task.tx.clone();
info!( // info!(
"[LLM Resource] Downloading plugin: {:?}", // "[LLM Resource] Downloading plugin: {:?}",
llm_setting.plugin.etag // llm_setting.app.etag
); // );
let file_name = format!("{}.zip", llm_setting.plugin.etag); // let file_name = format!("{}.zip", llm_setting.app.etag);
let zip_plugin_file = download_plugin( // let zip_plugin_file = download_plugin(
&llm_setting.plugin.url, // &llm_setting.app.url,
&plugin_dir, // &plugin_dir,
&file_name, // &file_name,
Some(download_task.cancel_token.clone()), // Some(download_task.cancel_token.clone()),
Some(Arc::new(move |downloaded, total_size| { // Some(Arc::new(move |downloaded, total_size| {
let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0); // let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress)); // let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress));
})), // })),
Some(Duration::from_millis(100)), // Some(Duration::from_millis(100)),
) // )
.await?; // .await?;
//
// unzip file // // unzip file
info!( // info!(
"[LLM Resource] unzip {:?} to {:?}", // "[LLM Resource] unzip {:?} to {:?}",
zip_plugin_file, plugin_file_etag_dir // zip_plugin_file, plugin_file_etag_dir
); // );
zip_extract(&zip_plugin_file, &plugin_file_etag_dir)?; // zip_extract(&zip_plugin_file, &plugin_file_etag_dir)?;
//
// delete zip file // // delete zip file
info!("[LLM Resource] Delete zip file: {:?}", file_name); // info!("[LLM Resource] Delete zip file: {:?}", file_name);
if let Err(err) = fs::remove_file(&zip_plugin_file).await { // if let Err(err) = fs::remove_file(&zip_plugin_file).await {
error!("Failed to delete zip file: {:?}", err); // error!("Failed to delete zip file: {:?}", err);
} // }
} // }
// After download the plugin, start downloading models // After download the plugin, start downloading models
let chat_model_file = ( let chat_model_file = (
@ -391,7 +457,7 @@ impl LLMResourceController {
Ok(()) Ok(())
} }
#[instrument(level = "debug", skip_all, err)] #[instrument(level = "info", skip_all, err)]
pub fn get_chat_config(&self, rag_enabled: bool) -> FlowyResult<AIPluginConfig> { pub fn get_chat_config(&self, rag_enabled: bool) -> FlowyResult<AIPluginConfig> {
if !self.is_resource_ready() { if !self.is_resource_ready() {
return Err(FlowyError::local_ai().with_context("Local AI resources are not ready")); return Err(FlowyError::local_ai().with_context("Local AI resources are not ready"));
@ -405,9 +471,25 @@ impl LLMResourceController {
.ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?; .ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?;
let model_dir = self.user_model_folder()?; let model_dir = self.user_model_folder()?;
let bin_path = self let bin_path = match get_operating_system() {
.plugin_path(&llm_setting.plugin.etag)? OperatingSystem::MacOS => {
.join(llm_setting.plugin.name); let path = self.offline_app_path(&llm_setting.app.ai_plugin_name);
if !path.exists() {
return Err(FlowyError::new(
ErrorCode::AIOfflineNotInstalled,
format!("AppFlowy Offline not installed at path: {:?}", path),
));
}
path
},
_ => {
return Err(
FlowyError::local_ai_unavailable()
.with_context("Local AI not available on current platform"),
);
},
};
let chat_model_path = model_dir.join(&llm_setting.llm_model.chat_model.file_name); let chat_model_path = model_dir.join(&llm_setting.llm_model.chat_model.file_name);
let mut config = AIPluginConfig::new(bin_path, chat_model_path)?; let mut config = AIPluginConfig::new(bin_path, chat_model_path)?;
@ -474,16 +556,12 @@ impl LLMResourceController {
Ok(selected_model) Ok(selected_model)
} }
fn user_plugin_folder(&self) -> FlowyResult<PathBuf> {
self.resource_dir().map(|dir| dir.join(PLUGIN_DIR))
}
pub(crate) fn user_model_folder(&self) -> FlowyResult<PathBuf> { pub(crate) fn user_model_folder(&self) -> FlowyResult<PathBuf> {
self.resource_dir().map(|dir| dir.join(LLM_MODEL_DIR)) self.resource_dir().map(|dir| dir.join(LLM_MODEL_DIR))
} }
fn plugin_path(&self, etag: &str) -> FlowyResult<PathBuf> { pub(crate) fn offline_app_path(&self, plugin_name: &str) -> PathBuf {
self.user_plugin_folder().map(|dir| dir.join(etag)) PathBuf::from(format!("/usr/local/bin/{}", plugin_name))
} }
fn model_path(&self, model_file_name: &str) -> FlowyResult<PathBuf> { fn model_path(&self, model_file_name: &str) -> FlowyResult<PathBuf> {
@ -493,7 +571,19 @@ impl LLMResourceController {
} }
pub(crate) fn resource_dir(&self) -> FlowyResult<PathBuf> { pub(crate) fn resource_dir(&self) -> FlowyResult<PathBuf> {
let user_data_dir = self.user_service.user_data_dir()?; let user_data_dir = self.user_service.data_root_dir()?;
Ok(user_data_dir.join("llm")) Ok(user_data_dir.join("ai"))
}
}
fn bytes_to_readable_format(bytes: u64) -> String {
const BYTES_IN_GIGABYTE: u64 = 1024 * 1024 * 1024;
const BYTES_IN_MEGABYTE: u64 = 1024 * 1024;
if bytes >= BYTES_IN_GIGABYTE {
let gigabytes = (bytes as f64) / (BYTES_IN_GIGABYTE as f64);
format!("{:.1} GB", gigabytes)
} else {
let megabytes = (bytes as f64) / (BYTES_IN_MEGABYTE as f64);
format!("{:.2} MB", megabytes)
} }
} }

View File

@ -1,3 +1,6 @@
pub mod local_llm_chat; pub mod local_llm_chat;
pub mod local_llm_resource; pub mod local_llm_resource;
mod model_request; mod model_request;
#[cfg(any(target_os = "windows", target_os = "macos", target_os = "linux"))]
pub mod watch;

View File

@ -0,0 +1,38 @@
use crate::local_ai::local_llm_resource::WatchDiskEvent;
use flowy_error::{FlowyError, FlowyResult};
use notify::{Event, RecursiveMode, Watcher};
use std::path::PathBuf;
use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver};
use tracing::error;
pub struct WatchContext {
#[allow(dead_code)]
watcher: notify::RecommendedWatcher,
pub path: PathBuf,
}
pub fn watch_path(path: PathBuf) -> FlowyResult<(WatchContext, UnboundedReceiver<WatchDiskEvent>)> {
let (tx, rx) = unbounded_channel();
let mut watcher = notify::recommended_watcher(move |res: Result<Event, _>| match res {
Ok(event) => match event.kind {
notify::EventKind::Create(_) => {
if let Err(err) = tx.send(WatchDiskEvent::Create) {
error!("watch send error: {:?}", err)
}
},
notify::EventKind::Remove(_) => {
if let Err(err) = tx.send(WatchDiskEvent::Remove) {
error!("watch send error: {:?}", err)
}
},
_ => {},
},
Err(e) => error!("watch error: {:?}", e),
})
.map_err(|err| FlowyError::internal().with_context(err))?;
watcher
.watch(&path, RecursiveMode::Recursive)
.map_err(|err| FlowyError::internal().with_context(err))?;
Ok((WatchContext { watcher, path }, rx))
}

View File

@ -52,7 +52,9 @@ impl ChatUserService for ChatUserServiceImpl {
self.upgrade_user()?.get_sqlite_connection(uid) self.upgrade_user()?.get_sqlite_connection(uid)
} }
fn user_data_dir(&self) -> Result<PathBuf, FlowyError> { fn data_root_dir(&self) -> Result<PathBuf, FlowyError> {
self.upgrade_user()?.get_user_data_dir() Ok(PathBuf::from(
self.upgrade_user()?.get_application_root_dir(),
))
} }
} }

View File

@ -301,6 +301,9 @@ pub enum ErrorCode {
#[error("Unsupported file format")] #[error("Unsupported file format")]
UnsupportedFileFormat = 104, UnsupportedFileFormat = 104,
#[error("AI offline not started")]
AIOfflineNotInstalled = 105,
} }
impl ErrorCode { impl ErrorCode {

View File

@ -217,7 +217,7 @@ where
let try_get_client = self.server.try_get_client(); let try_get_client = self.server.try_get_client();
FutureResult::new(async move { FutureResult::new(async move {
let workspaces = try_get_client?.get_workspaces().await?; let workspaces = try_get_client?.get_workspaces().await?;
to_user_workspaces(workspaces.0) to_user_workspaces(workspaces)
}) })
} }