chore: enable local ai and local ai chat (#5755)

* chore: enable local ai and local ai chat

* chore: config for chat with file

* chore: flutter anaylzer
This commit is contained in:
Nathan.fooo 2024-07-18 20:54:35 +08:00 committed by GitHub
parent f36e3ae378
commit 5bbf174ffd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
38 changed files with 1515 additions and 647 deletions

View File

@ -13,7 +13,6 @@
"type": "dart",
"env": {
"RUST_LOG": "debug",
"RUST_BACKTRACE": "1"
},
// uncomment the following line to testing performance.
// "flutterMode": "profile",

View File

@ -1,5 +1,6 @@
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
@ -9,13 +10,12 @@ part 'chat_file_bloc.freezed.dart';
class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
ChatFileBloc({
required String chatId,
dynamic message,
}) : listener = LocalLLMListener(),
super(ChatFileState.initial(message)) {
super(const ChatFileState()) {
listener.start(
stateCallback: (pluginState) {
chatStateCallback: (chatState) {
if (!isClosed) {
add(ChatFileEvent.updateLocalAIState(pluginState));
add(ChatFileEvent.updateChatState(chatState));
}
},
);
@ -24,26 +24,30 @@ class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
(event, emit) async {
await event.when(
initial: () async {
final result = await ChatEventGetPluginState().send();
final result = await ChatEventGetLocalAIChatState().send();
result.fold(
(pluginState) {
(chatState) {
if (!isClosed) {
add(ChatFileEvent.updateLocalAIState(pluginState));
add(
ChatFileEvent.updateChatState(chatState),
);
}
},
(err) {},
(err) {
Log.error(err.toString());
},
);
},
newFile: (String filePath) {
final payload = ChatFilePB(filePath: filePath, chatId: chatId);
ChatEventChatWithFile(payload).send();
},
updateLocalAIState: (PluginStatePB pluginState) {
updateChatState: (LocalAIChatPB chatState) {
// Only user enable chat with file and the plugin is already running
final supportChatWithFile = chatState.fileEnabled &&
chatState.pluginState.state == RunningStatePB.Running;
emit(
state.copyWith(
supportChatWithFile:
pluginState.state == RunningStatePB.Running,
),
state.copyWith(supportChatWithFile: supportChatWithFile),
);
},
);
@ -64,20 +68,19 @@ class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
class ChatFileEvent with _$ChatFileEvent {
const factory ChatFileEvent.initial() = Initial;
const factory ChatFileEvent.newFile(String filePath) = _NewFile;
const factory ChatFileEvent.updateLocalAIState(PluginStatePB pluginState) =
_UpdateLocalAIState;
const factory ChatFileEvent.updateChatState(LocalAIChatPB chatState) =
_UpdateChatState;
}
@freezed
class ChatFileState with _$ChatFileState {
const factory ChatFileState({
required String text,
@Default(false) bool supportChatWithFile,
}) = _ChatFileState;
factory ChatFileState.initial(dynamic text) {
return ChatFileState(
text: text is String ? text : "",
);
}
}
@freezed
class LocalAIChatFileIndicator with _$LocalAIChatFileIndicator {
const factory LocalAIChatFileIndicator.ready(bool isEnabled) = _Ready;
const factory LocalAIChatFileIndicator.loading() = _Loading;
}

View File

@ -0,0 +1,73 @@
import 'dart:async';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'chat_input_bloc.freezed.dart';
class ChatInputBloc extends Bloc<ChatInputEvent, ChatInputState> {
ChatInputBloc()
: listener = LocalLLMListener(),
super(const ChatInputState(aiType: _AppFlowyAI())) {
listener.start(
stateCallback: (pluginState) {
if (!isClosed) {
add(ChatInputEvent.updateState(pluginState));
}
},
);
on<ChatInputEvent>(_handleEvent);
}
final LocalLLMListener listener;
@override
Future<void> close() async {
await listener.stop();
return super.close();
}
Future<void> _handleEvent(
ChatInputEvent event,
Emitter<ChatInputState> emit,
) async {
await event.when(
started: () async {
final result = await ChatEventGetLocalAIPluginState().send();
result.fold(
(pluginState) {
if (!isClosed) {
add(ChatInputEvent.updateState(pluginState));
}
},
(err) => Log.error(err.toString()),
);
},
updateState: (LocalAIPluginStatePB aiPluginState) {
emit(const ChatInputState(aiType: _AppFlowyAI()));
},
);
}
}
@freezed
class ChatInputEvent with _$ChatInputEvent {
const factory ChatInputEvent.started() = _Started;
const factory ChatInputEvent.updateState(LocalAIPluginStatePB aiPluginState) =
_UpdatePluginState;
}
@freezed
class ChatInputState with _$ChatInputState {
const factory ChatInputState({required AIType aiType}) = _ChatInputState;
}
@freezed
class AIType with _$AIType {
const factory AIType.appflowyAI() = _AppFlowyAI;
const factory AIType.localAI() = _LocalAI;
}

View File

@ -1,4 +1,5 @@
import 'package:appflowy/plugins/ai_chat/application/chat_file_bloc.dart';
import 'package:appflowy/plugins/ai_chat/application/chat_input_bloc.dart';
import 'package:desktop_drop/desktop_drop.dart';
import 'package:flutter/material.dart';
import 'package:flutter/services.dart';
@ -67,28 +68,40 @@ class AIChatPage extends StatelessWidget {
@override
Widget build(BuildContext context) {
if (userProfile.authenticator == AuthenticatorPB.AppFlowyCloud) {
return BlocProvider(
create: (context) => ChatFileBloc(chatId: view.id.toString()),
return MultiBlocProvider(
providers: [
BlocProvider(
create: (_) => ChatFileBloc(chatId: view.id.toString()),
),
BlocProvider(
create: (_) => ChatBloc(
view: view,
userProfile: userProfile,
)..add(const ChatEvent.initialLoad()),
),
BlocProvider(create: (_) => ChatInputBloc()),
],
child: BlocBuilder<ChatFileBloc, ChatFileState>(
builder: (context, state) {
return state.supportChatWithFile
? DropTarget(
onDragDone: (DropDoneDetails detail) async {
for (final file in detail.files) {
context
.read<ChatFileBloc>()
.add(ChatFileEvent.newFile(file.path));
}
},
child: _ChatContentPage(
view: view,
userProfile: userProfile,
),
)
: _ChatContentPage(
view: view,
userProfile: userProfile,
);
Widget child = _ChatContentPage(
view: view,
userProfile: userProfile,
);
// If the chat supports file upload, wrap the chat content with a drop target
if (state.supportChatWithFile) {
child = DropTarget(
onDragDone: (DropDoneDetails detail) async {
for (final file in detail.files) {
context
.read<ChatFileBloc>()
.add(ChatFileEvent.newFile(file.path));
}
},
child: child,
);
}
return child;
},
),
);
@ -146,67 +159,61 @@ class _ChatContentPageState extends State<_ChatContentPage> {
Flexible(
child: ConstrainedBox(
constraints: const BoxConstraints(maxWidth: 784),
child: BlocProvider(
create: (_) => ChatBloc(
view: widget.view,
userProfile: widget.userProfile,
)..add(const ChatEvent.initialLoad()),
child: BlocBuilder<ChatBloc, ChatState>(
builder: (blocContext, state) => Chat(
messages: state.messages,
onSendPressed: (_) {
// We use custom bottom widget for chat input, so
// do not need to handle this event.
},
customBottomWidget: buildChatInput(blocContext),
user: _user,
theme: buildTheme(context),
onEndReached: () async {
if (state.hasMorePrevMessage &&
state.loadingPreviousStatus !=
const LoadingState.loading()) {
blocContext
.read<ChatBloc>()
.add(const ChatEvent.startLoadingPrevMessage());
}
},
emptyState: BlocBuilder<ChatBloc, ChatState>(
builder: (_, state) => state.initialLoadingStatus ==
const LoadingState.finish()
? Padding(
padding: AIChatUILayout.welcomePagePadding,
child: ChatWelcomePage(
onSelectedQuestion: (question) => blocContext
.read<ChatBloc>()
.add(ChatEvent.sendMessage(question)),
child: BlocBuilder<ChatBloc, ChatState>(
builder: (blocContext, state) => Chat(
messages: state.messages,
onSendPressed: (_) {
// We use custom bottom widget for chat input, so
// do not need to handle this event.
},
customBottomWidget: buildChatInput(blocContext),
user: _user,
theme: buildTheme(context),
onEndReached: () async {
if (state.hasMorePrevMessage &&
state.loadingPreviousStatus !=
const LoadingState.loading()) {
blocContext
.read<ChatBloc>()
.add(const ChatEvent.startLoadingPrevMessage());
}
},
emptyState: BlocBuilder<ChatBloc, ChatState>(
builder: (_, state) =>
state.initialLoadingStatus == const LoadingState.finish()
? Padding(
padding: AIChatUILayout.welcomePagePadding,
child: ChatWelcomePage(
onSelectedQuestion: (question) => blocContext
.read<ChatBloc>()
.add(ChatEvent.sendMessage(question)),
),
)
: const Center(
child: CircularProgressIndicator.adaptive(),
),
)
: const Center(
child: CircularProgressIndicator.adaptive(),
),
),
messageWidthRatio: AIChatUILayout.messageWidthRatio,
textMessageBuilder: (
textMessage, {
required messageWidth,
required showName,
}) =>
_buildAITextMessage(blocContext, textMessage),
bubbleBuilder: (
child, {
required message,
required nextMessageInGroup,
}) {
if (message.author.id == _user.id) {
return ChatUserMessageBubble(
message: message,
child: child,
);
}
return _buildAIBubble(message, blocContext, state, child);
},
),
messageWidthRatio: AIChatUILayout.messageWidthRatio,
textMessageBuilder: (
textMessage, {
required messageWidth,
required showName,
}) =>
_buildAITextMessage(blocContext, textMessage),
bubbleBuilder: (
child, {
required message,
required nextMessageInGroup,
}) {
if (message.author.id == _user.id) {
return ChatUserMessageBubble(
message: message,
child: child,
);
}
return _buildAIBubble(message, blocContext, state, child);
},
),
),
),
@ -338,31 +345,40 @@ class _ChatContentPageState extends State<_ChatContentPage> {
return ClipRect(
child: Padding(
padding: AIChatUILayout.safeAreaInsets(context),
child: Column(
children: [
BlocSelector<ChatBloc, ChatState, LoadingState>(
selector: (state) => state.streamingStatus,
builder: (context, state) {
return ChatInput(
chatId: widget.view.id,
onSendPressed: (message) =>
onSendPressed(context, message.text),
isStreaming: state != const LoadingState.finish(),
onStopStreaming: () {
context.read<ChatBloc>().add(const ChatEvent.stopStream());
},
);
},
),
const VSpace(6),
Opacity(
opacity: 0.6,
child: FlowyText(
LocaleKeys.chat_aiMistakePrompt.tr(),
fontSize: 12,
child: BlocBuilder<ChatInputBloc, ChatInputState>(
builder: (context, state) {
return state.aiType.when(
appflowyAI: () => Column(
children: [
BlocSelector<ChatBloc, ChatState, LoadingState>(
selector: (state) => state.streamingStatus,
builder: (context, state) {
return ChatInput(
chatId: widget.view.id,
onSendPressed: (message) =>
onSendPressed(context, message.text),
isStreaming: state != const LoadingState.finish(),
onStopStreaming: () {
context
.read<ChatBloc>()
.add(const ChatEvent.stopStream());
},
);
},
),
const VSpace(6),
Opacity(
opacity: 0.6,
child: FlowyText(
LocaleKeys.chat_aiMistakePrompt.tr(),
fontSize: 12,
),
),
],
),
),
],
localAI: () => const SizedBox.shrink(),
);
},
),
),
);

View File

@ -49,7 +49,9 @@ class _ChatInputState extends State<ChatInput> {
return KeyEventResult.ignored;
}
if (event is KeyDownEvent) {
_handleSendPressed();
if (!widget.isStreaming) {
_handleSendPressed();
}
}
return KeyEventResult.handled;
} else {
@ -78,17 +80,13 @@ class _ChatInputState extends State<ChatInput> {
}
void _handleSendPressed() {
if (widget.isStreaming) {
widget.onStopStreaming();
} else {
final trimmedText = _textController.text.trim();
if (trimmedText != '') {
final partialText = types.PartialText(text: trimmedText);
widget.onSendPressed(partialText);
final trimmedText = _textController.text.trim();
if (trimmedText != '') {
final partialText = types.PartialText(text: trimmedText);
widget.onSendPressed(partialText);
if (widget.options.inputClearMode == InputClearMode.always) {
_textController.clear();
}
if (widget.options.inputClearMode == InputClearMode.always) {
_textController.clear();
}
}
}
@ -139,7 +137,6 @@ class _ChatInputState extends State<ChatInput> {
padding: textPadding,
child: TextField(
controller: _textController,
readOnly: widget.isStreaming,
focusNode: _inputFocusNode,
decoration: InputDecoration(
border: InputBorder.none,
@ -151,7 +148,6 @@ class _ChatInputState extends State<ChatInput> {
style: TextStyle(
color: AFThemeExtension.of(context).textColor,
),
enabled: widget.options.enabled,
autocorrect: widget.options.autocorrect,
autofocus: widget.options.autofocus,
enableSuggestions: widget.options.enableSuggestions,
@ -176,7 +172,10 @@ class _ChatInputState extends State<ChatInput> {
padding: buttonPadding,
child: AccessoryButton(
onSendPressed: () {
_handleSendPressed();
if (!widget.isStreaming) {
widget.onStopStreaming();
_handleSendPressed();
}
},
onStopStreaming: () {
widget.onStopStreaming();

View File

@ -4,6 +4,7 @@ import 'dart:isolate';
import 'package:appflowy/plugins/ai_chat/application/chat_bloc.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
@ -40,7 +41,7 @@ class DownloadModelBloc extends Bloc<DownloadModelEvent, DownloadModelState> {
add(const DownloadModelEvent.downloadFinish());
},
onError: (err) {
// emit(state.copyWith(downloadError: err));
Log.error(err);
},
);
@ -67,6 +68,12 @@ class DownloadModelBloc extends Bloc<DownloadModelEvent, DownloadModelState> {
},
);
}
@override
Future<void> close() async {
await state.downloadStream?.dispose();
return super.close();
}
}
@freezed

View File

@ -1,253 +1,93 @@
import 'dart:async';
import 'package:appflowy/plugins/ai_chat/application/chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'local_ai_bloc.freezed.dart';
class LocalAISettingBloc
extends Bloc<LocalAISettingEvent, LocalAISettingState> {
LocalAISettingBloc()
: listener = LocalLLMListener(),
super(const LocalAISettingState()) {
listener.start(
stateCallback: (newState) {
if (!isClosed) {
add(LocalAISettingEvent.updateLLMRunningState(newState.state));
}
},
);
on<LocalAISettingEvent>(_handleEvent);
class LocalAIToggleBloc extends Bloc<LocalAIToggleEvent, LocalAIToggleState> {
LocalAIToggleBloc() : super(const LocalAIToggleState()) {
on<LocalAIToggleEvent>(_handleEvent);
}
final LocalLLMListener listener;
/// Handles incoming events and dispatches them to the appropriate handler.
Future<void> _handleEvent(
LocalAISettingEvent event,
Emitter<LocalAISettingState> emit,
LocalAIToggleEvent event,
Emitter<LocalAIToggleState> emit,
) async {
await event.when(
started: _handleStarted,
didLoadModelInfo: (FlowyResult<LLMModelInfoPB, FlowyError> result) {
result.fold(
(modelInfo) {
_fetchCurremtLLMState();
emit(
state.copyWith(
modelInfo: modelInfo,
models: modelInfo.models,
selectedLLMModel: modelInfo.selectedModel,
fetchModelInfoState: const LoadingState.finish(),
),
);
},
(err) {
emit(
state.copyWith(
fetchModelInfoState: LoadingState.finish(error: err),
),
);
},
);
started: () async {
final result = await ChatEventGetLocalAIState().send();
_handleResult(emit, result);
},
selectLLMConfig: (LLMModelPB llmModel) async {
final result = await ChatEventUpdateLocalLLM(llmModel).send();
result.fold(
(llmResource) {
// If all resources are downloaded, show reload plugin
if (llmResource.pendingResources.isNotEmpty) {
emit(
state.copyWith(
selectedLLMModel: llmModel,
localAIInfo: LocalAIProgress.showDownload(
llmResource,
llmModel,
),
selectLLMState: const LoadingState.finish(),
),
);
} else {
emit(
state.copyWith(
selectedLLMModel: llmModel,
selectLLMState: const LoadingState.finish(),
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
}
},
(err) {
emit(
state.copyWith(
selectLLMState: LoadingState.finish(error: err),
),
);
},
);
},
refreshLLMState: (LocalModelResourcePB llmResource) {
if (state.selectedLLMModel == null) {
Log.error(
'Unexpected null selected config. It should be set already',
);
return;
}
// reload plugin if all resources are downloaded
if (llmResource.pendingResources.isEmpty) {
emit(
state.copyWith(
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
if (state.selectedLLMModel != null) {
// Go to download page if the selected model is downloading
if (llmResource.isDownloading) {
emit(
state.copyWith(
localAIInfo:
LocalAIProgress.startDownloading(state.selectedLLMModel!),
selectLLMState: const LoadingState.finish(),
),
);
return;
} else {
emit(
state.copyWith(
localAIInfo: LocalAIProgress.showDownload(
llmResource,
state.selectedLLMModel!,
),
selectLLMState: const LoadingState.finish(),
),
);
}
}
}
},
startDownloadModel: (LLMModelPB llmModel) {
toggle: () async {
emit(
state.copyWith(
localAIInfo: LocalAIProgress.startDownloading(llmModel),
selectLLMState: const LoadingState.finish(),
pageIndicator: const LocalAIToggleStateIndicator.loading(),
),
);
unawaited(
ChatEventToggleLocalAI().send().then(
(result) {
if (!isClosed) {
add(LocalAIToggleEvent.handleResult(result));
}
},
),
);
},
cancelDownload: () async {
final _ = await ChatEventCancelDownloadLLMResource().send();
_fetchCurremtLLMState();
handleResult: (result) {
_handleResult(emit, result);
},
finishDownload: () async {
);
}
void _handleResult(
Emitter<LocalAIToggleState> emit,
FlowyResult<LocalAIPB, FlowyError> result,
) {
result.fold(
(localAI) {
emit(
state.copyWith(localAIInfo: const LocalAIProgress.finishDownload()),
state.copyWith(
pageIndicator: LocalAIToggleStateIndicator.ready(localAI.enabled),
),
);
},
updateLLMRunningState: (RunningStatePB newRunningState) {
if (newRunningState == RunningStatePB.Stopped) {
emit(
state.copyWith(
runningState: newRunningState,
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
emit(state.copyWith(runningState: newRunningState));
}
},
);
}
void _fetchCurremtLLMState() async {
final result = await ChatEventGetLocalLLMState().send();
result.fold(
(llmResource) {
if (!isClosed) {
add(LocalAISettingEvent.refreshLLMState(llmResource));
}
},
(err) {
Log.error(err);
emit(
state.copyWith(
pageIndicator: LocalAIToggleStateIndicator.error(err),
),
);
},
);
}
/// Handles the event to fetch local AI settings when the application starts.
Future<void> _handleStarted() async {
final result = await ChatEventRefreshLocalAIModelInfo().send();
if (!isClosed) {
add(LocalAISettingEvent.didLoadModelInfo(result));
}
}
@override
Future<void> close() async {
await listener.stop();
return super.close();
}
}
@freezed
class LocalAISettingEvent with _$LocalAISettingEvent {
const factory LocalAISettingEvent.started() = _Started;
const factory LocalAISettingEvent.didLoadModelInfo(
FlowyResult<LLMModelInfoPB, FlowyError> result,
) = _ModelInfo;
const factory LocalAISettingEvent.selectLLMConfig(LLMModelPB config) =
_SelectLLMConfig;
const factory LocalAISettingEvent.refreshLLMState(
LocalModelResourcePB llmResource,
) = _RefreshLLMResource;
const factory LocalAISettingEvent.startDownloadModel(LLMModelPB llmModel) =
_StartDownloadModel;
const factory LocalAISettingEvent.cancelDownload() = _CancelDownload;
const factory LocalAISettingEvent.finishDownload() = _FinishDownload;
const factory LocalAISettingEvent.updateLLMRunningState(
RunningStatePB newRunningState,
) = _RunningState;
class LocalAIToggleEvent with _$LocalAIToggleEvent {
const factory LocalAIToggleEvent.started() = _Started;
const factory LocalAIToggleEvent.toggle() = _Toggle;
const factory LocalAIToggleEvent.handleResult(
FlowyResult<LocalAIPB, FlowyError> result,
) = _HandleResult;
}
@freezed
class LocalAISettingState with _$LocalAISettingState {
const factory LocalAISettingState({
LLMModelInfoPB? modelInfo,
LLMModelPB? selectedLLMModel,
LocalAIProgress? localAIInfo,
@Default(LoadingState.loading()) LoadingState fetchModelInfoState,
@Default(LoadingState.loading()) LoadingState selectLLMState,
@Default([]) List<LLMModelPB> models,
@Default(RunningStatePB.Connecting) RunningStatePB runningState,
}) = _LocalAISettingState;
class LocalAIToggleState with _$LocalAIToggleState {
const factory LocalAIToggleState({
@Default(LocalAIToggleStateIndicator.loading())
LocalAIToggleStateIndicator pageIndicator,
}) = _LocalAIToggleState;
}
@freezed
class LocalAIProgress with _$LocalAIProgress {
// when user select a new model, it will call requestDownload
const factory LocalAIProgress.requestDownloadInfo(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _RequestDownload;
// when user comes back to the setting page, it will auto detect current llm state
const factory LocalAIProgress.showDownload(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _DownloadNeeded;
class LocalAIToggleStateIndicator with _$LocalAIToggleStateIndicator {
// when start downloading the model
const factory LocalAIProgress.startDownloading(LLMModelPB llmModel) =
_Downloading;
const factory LocalAIProgress.finishDownload() = _Finish;
const factory LocalAIProgress.checkPluginState() = _PluginState;
const factory LocalAIToggleStateIndicator.error(FlowyError error) = _OnError;
const factory LocalAIToggleStateIndicator.ready(bool isEnabled) = _Ready;
const factory LocalAIToggleStateIndicator.loading() = _Loading;
}

View File

@ -0,0 +1,261 @@
import 'dart:async';
import 'package:appflowy/plugins/ai_chat/application/chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'local_ai_chat_bloc.freezed.dart';
class LocalAIChatSettingBloc
extends Bloc<LocalAIChatSettingEvent, LocalAIChatSettingState> {
LocalAIChatSettingBloc()
: listener = LocalLLMListener(),
super(const LocalAIChatSettingState()) {
listener.start(
stateCallback: (newState) {
if (!isClosed) {
add(LocalAIChatSettingEvent.updateLLMRunningState(newState.state));
}
},
);
on<LocalAIChatSettingEvent>(_handleEvent);
}
final LocalLLMListener listener;
/// Handles incoming events and dispatches them to the appropriate handler.
Future<void> _handleEvent(
LocalAIChatSettingEvent event,
Emitter<LocalAIChatSettingState> emit,
) async {
await event.when(
refreshAISetting: _handleStarted,
didLoadModelInfo: (FlowyResult<LLMModelInfoPB, FlowyError> result) {
result.fold(
(modelInfo) {
_fetchCurremtLLMState();
emit(
state.copyWith(
modelInfo: modelInfo,
models: modelInfo.models,
selectedLLMModel: modelInfo.selectedModel,
fetchModelInfoState: const LoadingState.finish(),
),
);
},
(err) {
emit(
state.copyWith(
fetchModelInfoState: LoadingState.finish(error: err),
),
);
},
);
},
selectLLMConfig: (LLMModelPB llmModel) async {
final result = await ChatEventUpdateLocalLLM(llmModel).send();
result.fold(
(llmResource) {
// If all resources are downloaded, show reload plugin
if (llmResource.pendingResources.isNotEmpty) {
emit(
state.copyWith(
selectedLLMModel: llmModel,
localAIInfo: LocalAIProgress.showDownload(
llmResource,
llmModel,
),
selectLLMState: const LoadingState.finish(),
),
);
} else {
emit(
state.copyWith(
selectedLLMModel: llmModel,
selectLLMState: const LoadingState.finish(),
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
}
},
(err) {
emit(
state.copyWith(
selectLLMState: LoadingState.finish(error: err),
),
);
},
);
},
refreshLLMState: (LocalModelResourcePB llmResource) {
if (state.selectedLLMModel == null) {
Log.error(
'Unexpected null selected config. It should be set already',
);
return;
}
// reload plugin if all resources are downloaded
if (llmResource.pendingResources.isEmpty) {
emit(
state.copyWith(
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
if (state.selectedLLMModel != null) {
// Go to download page if the selected model is downloading
if (llmResource.isDownloading) {
emit(
state.copyWith(
localAIInfo:
LocalAIProgress.startDownloading(state.selectedLLMModel!),
selectLLMState: const LoadingState.finish(),
),
);
return;
} else {
emit(
state.copyWith(
localAIInfo: LocalAIProgress.showDownload(
llmResource,
state.selectedLLMModel!,
),
selectLLMState: const LoadingState.finish(),
),
);
}
}
}
},
startDownloadModel: (LLMModelPB llmModel) {
emit(
state.copyWith(
localAIInfo: LocalAIProgress.startDownloading(llmModel),
selectLLMState: const LoadingState.finish(),
),
);
},
cancelDownload: () async {
final _ = await ChatEventCancelDownloadLLMResource().send();
_fetchCurremtLLMState();
},
finishDownload: () async {
emit(
state.copyWith(localAIInfo: const LocalAIProgress.finishDownload()),
);
},
updateLLMRunningState: (RunningStatePB newRunningState) {
if (newRunningState == RunningStatePB.Stopped) {
emit(
state.copyWith(
runningState: newRunningState,
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
emit(state.copyWith(runningState: newRunningState));
}
},
);
}
void _fetchCurremtLLMState() async {
final result = await ChatEventGetLocalLLMState().send();
result.fold(
(llmResource) {
if (!isClosed) {
add(LocalAIChatSettingEvent.refreshLLMState(llmResource));
}
},
(err) {
Log.error(err);
},
);
}
/// Handles the event to fetch local AI settings when the application starts.
Future<void> _handleStarted() async {
final result = await ChatEventRefreshLocalAIModelInfo().send();
if (!isClosed) {
add(LocalAIChatSettingEvent.didLoadModelInfo(result));
}
}
@override
Future<void> close() async {
await listener.stop();
return super.close();
}
}
@freezed
class LocalAIChatSettingEvent with _$LocalAIChatSettingEvent {
const factory LocalAIChatSettingEvent.refreshAISetting() = _RefreshAISetting;
const factory LocalAIChatSettingEvent.didLoadModelInfo(
FlowyResult<LLMModelInfoPB, FlowyError> result,
) = _ModelInfo;
const factory LocalAIChatSettingEvent.selectLLMConfig(LLMModelPB config) =
_SelectLLMConfig;
const factory LocalAIChatSettingEvent.refreshLLMState(
LocalModelResourcePB llmResource,
) = _RefreshLLMResource;
const factory LocalAIChatSettingEvent.startDownloadModel(
LLMModelPB llmModel,
) = _StartDownloadModel;
const factory LocalAIChatSettingEvent.cancelDownload() = _CancelDownload;
const factory LocalAIChatSettingEvent.finishDownload() = _FinishDownload;
const factory LocalAIChatSettingEvent.updateLLMRunningState(
RunningStatePB newRunningState,
) = _RunningState;
}
@freezed
class LocalAIChatSettingState with _$LocalAIChatSettingState {
const factory LocalAIChatSettingState({
LLMModelInfoPB? modelInfo,
LLMModelPB? selectedLLMModel,
LocalAIProgress? localAIInfo,
@Default(LoadingState.loading()) LoadingState fetchModelInfoState,
@Default(LoadingState.loading()) LoadingState selectLLMState,
@Default([]) List<LLMModelPB> models,
@Default(RunningStatePB.Connecting) RunningStatePB runningState,
}) = _LocalAIChatSettingState;
}
// @freezed
// class LocalChatAIStateIndicator with _$LocalChatAIStateIndicator {
// // when start downloading the model
// const factory LocalChatAIStateIndicator.error(FlowyError error) = _OnError;
// const factory LocalChatAIStateIndicator.ready(bool isEnabled) = _Ready;
// }
@freezed
class LocalAIProgress with _$LocalAIProgress {
// when user select a new model, it will call requestDownload
const factory LocalAIProgress.requestDownloadInfo(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _RequestDownload;
// when user comes back to the setting page, it will auto detect current llm state
const factory LocalAIProgress.showDownload(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _DownloadNeeded;
// when start downloading the model
const factory LocalAIProgress.startDownloading(LLMModelPB llmModel) =
_Downloading;
const factory LocalAIProgress.finishDownload() = _Finish;
const factory LocalAIProgress.checkPluginState() = _PluginState;
}

View File

@ -0,0 +1,95 @@
import 'dart:async';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'local_ai_chat_toggle_bloc.freezed.dart';
class LocalAIChatToggleBloc
extends Bloc<LocalAIChatToggleEvent, LocalAIChatToggleState> {
LocalAIChatToggleBloc() : super(const LocalAIChatToggleState()) {
on<LocalAIChatToggleEvent>(_handleEvent);
}
Future<void> _handleEvent(
LocalAIChatToggleEvent event,
Emitter<LocalAIChatToggleState> emit,
) async {
await event.when(
started: () async {
final result = await ChatEventGetLocalAIChatState().send();
_handleResult(emit, result);
},
toggle: () async {
emit(
state.copyWith(
pageIndicator: const LocalAIChatToggleStateIndicator.loading(),
),
);
unawaited(
ChatEventToggleLocalAIChat().send().then(
(result) {
if (!isClosed) {
add(LocalAIChatToggleEvent.handleResult(result));
}
},
),
);
},
handleResult: (result) {
_handleResult(emit, result);
},
);
}
void _handleResult(
Emitter<LocalAIChatToggleState> emit,
FlowyResult<LocalAIChatPB, FlowyError> result,
) {
result.fold(
(localAI) {
emit(
state.copyWith(
pageIndicator:
LocalAIChatToggleStateIndicator.ready(localAI.enabled),
),
);
},
(err) {
emit(
state.copyWith(
pageIndicator: LocalAIChatToggleStateIndicator.error(err),
),
);
},
);
}
}
@freezed
class LocalAIChatToggleEvent with _$LocalAIChatToggleEvent {
const factory LocalAIChatToggleEvent.started() = _Started;
const factory LocalAIChatToggleEvent.toggle() = _Toggle;
const factory LocalAIChatToggleEvent.handleResult(
FlowyResult<LocalAIChatPB, FlowyError> result,
) = _HandleResult;
}
@freezed
class LocalAIChatToggleState with _$LocalAIChatToggleState {
const factory LocalAIChatToggleState({
@Default(LocalAIChatToggleStateIndicator.loading())
LocalAIChatToggleStateIndicator pageIndicator,
}) = _LocalAIChatToggleState;
}
@freezed
class LocalAIChatToggleStateIndicator with _$LocalAIChatToggleStateIndicator {
const factory LocalAIChatToggleStateIndicator.error(FlowyError error) =
_OnError;
const factory LocalAIChatToggleStateIndicator.ready(bool isEnabled) = _Ready;
const factory LocalAIChatToggleStateIndicator.loading() = _Loading;
}

View File

@ -2,19 +2,19 @@ import 'dart:async';
import 'dart:typed_data';
import 'package:appflowy/plugins/ai_chat/application/chat_notification.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/notification.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/protobuf.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-notification/subject.pb.dart';
import 'package:appflowy_backend/rust_stream.dart';
import 'package:appflowy_result/appflowy_result.dart';
typedef PluginStateCallback = void Function(PluginStatePB state);
typedef PluginStateCallback = void Function(LocalAIPluginStatePB state);
typedef LocalAIChatCallback = void Function(LocalAIChatPB chatState);
class LocalLLMListener {
LocalLLMListener() {
_parser =
ChatNotificationParser(id: "appflowy_chat_plugin", callback: _callback);
ChatNotificationParser(id: "appflowy_ai_plugin", callback: _callback);
_subscription = RustStreamReceiver.listen(
(observable) => _parser?.parse(observable),
);
@ -24,12 +24,15 @@ class LocalLLMListener {
ChatNotificationParser? _parser;
PluginStateCallback? stateCallback;
LocalAIChatCallback? chatStateCallback;
void Function()? finishStreamingCallback;
void start({
PluginStateCallback? stateCallback,
LocalAIChatCallback? chatStateCallback,
}) {
this.stateCallback = stateCallback;
this.chatStateCallback = chatStateCallback;
}
void _callback(
@ -39,7 +42,10 @@ class LocalLLMListener {
result.map((r) {
switch (ty) {
case ChatNotification.UpdateChatPluginState:
stateCallback?.call(PluginStatePB.fromBuffer(r));
stateCallback?.call(LocalAIPluginStatePB.fromBuffer(r));
break;
case ChatNotification.UpdateLocalChatAI:
chatStateCallback?.call(LocalAIChatPB.fromBuffer(r));
break;
default:
break;

View File

@ -11,7 +11,11 @@ part 'plugin_state_bloc.freezed.dart';
class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
PluginStateBloc()
: listener = LocalLLMListener(),
super(const PluginStateState(action: PluginStateAction.init())) {
super(
const PluginStateState(
action: PluginStateAction.init(),
),
) {
listener.start(
stateCallback: (pluginState) {
if (!isClosed) {
@ -37,7 +41,7 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
) async {
await event.when(
started: () async {
final result = await ChatEventGetPluginState().send();
final result = await ChatEventGetLocalAIPluginState().send();
result.fold(
(pluginState) {
if (!isClosed) {
@ -47,20 +51,24 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
(err) => Log.error(err.toString()),
);
},
updateState: (PluginStatePB pluginState) {
updateState: (LocalAIPluginStatePB pluginState) {
switch (pluginState.state) {
case RunningStatePB.Connecting:
emit(
const PluginStateState(action: PluginStateAction.loadingPlugin()),
);
case RunningStatePB.Running:
emit(const PluginStateState(action: PluginStateAction.ready()));
break;
default:
emit(
state.copyWith(action: const PluginStateAction.reloadRequired()),
state.copyWith(action: const PluginStateAction.restart()),
);
break;
}
},
restartLocalAI: () {
ChatEventRestartLocalAI().send();
ChatEventRestartLocalAIChat().send();
},
);
}
@ -69,7 +77,7 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
@freezed
class PluginStateEvent with _$PluginStateEvent {
const factory PluginStateEvent.started() = _Started;
const factory PluginStateEvent.updateState(PluginStatePB pluginState) =
const factory PluginStateEvent.updateState(LocalAIPluginStatePB pluginState) =
_UpdatePluginState;
const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI;
}
@ -83,6 +91,7 @@ class PluginStateState with _$PluginStateState {
@freezed
class PluginStateAction with _$PluginStateAction {
const factory PluginStateAction.init() = _Init;
const factory PluginStateAction.loadingPlugin() = _LoadingPlugin;
const factory PluginStateAction.ready() = _Ready;
const factory PluginStateAction.reloadRequired() = _ReloadRequired;
const factory PluginStateAction.restart() = _Restart;
}

View File

@ -1,6 +1,6 @@
import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra_ui/style_widget/text.dart';
@ -22,7 +22,7 @@ class InitLocalAIIndicator extends StatelessWidget {
),
child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 6, vertical: 4),
child: BlocBuilder<LocalAISettingBloc, LocalAISettingState>(
child: BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) {
switch (state.runningState) {
case RunningStatePB.Connecting:

View File

@ -1,73 +1,168 @@
import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_toggle_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:expandable/expandable.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart';
import 'package:flutter/material.dart';
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/settings_ai_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/shared/af_dropdown_menu_entry.dart';
import 'package:appflowy/workspace/presentation/settings/shared/settings_dropdown.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
class LocalModelConfig extends StatelessWidget {
const LocalModelConfig({super.key});
class LocalAIChatSetting extends StatelessWidget {
const LocalAIChatSetting({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<SettingsAIBloc, SettingsAIState>(
builder: (context, state) {
if (state.aiSettings == null) {
return const SizedBox.shrink();
}
return MultiBlocProvider(
providers: [
BlocProvider(create: (context) => LocalAIChatSettingBloc()),
BlocProvider(
create: (context) => LocalAIChatToggleBloc()
..add(const LocalAIChatToggleEvent.started()),
),
],
child: ExpandableNotifier(
child: BlocListener<LocalAIChatToggleBloc, LocalAIChatToggleState>(
listener: (context, state) {
// Listen to the toggle state and expand the panel if the state is ready.
final controller = ExpandableController.of(
context,
required: true,
)!;
if (state.aiSettings!.aiModel != AIModelPB.LocalAIModel) {
return const SizedBox.shrink();
}
return BlocProvider(
create: (context) =>
LocalAISettingBloc()..add(const LocalAISettingEvent.started()),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: Column(
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Flexible(
child: FlowyText.medium(
LocaleKeys.settings_aiPage_keys_llmModel.tr(),
fontSize: 14,
),
),
const Spacer(),
BlocBuilder<LocalAISettingBloc, LocalAISettingState>(
builder: (context, state) {
return state.fetchModelInfoState.when(
loading: () =>
const CircularProgressIndicator.adaptive(),
finish: (err) {
return (err == null)
? const _SelectLocalModelDropdownMenu()
: const SizedBox.shrink();
},
// Neet to wrap with WidgetsBinding.instance.addPostFrameCallback otherwise the
// ExpandablePanel not expanded sometimes. Maybe because the ExpandablePanel is not
// built yet when the listener is called.
WidgetsBinding.instance.addPostFrameCallback(
(_) {
state.pageIndicator.when(
error: (_) => controller.expanded = false,
ready: (enabled) {
controller.expanded = enabled;
context.read<LocalAIChatSettingBloc>().add(
const LocalAIChatSettingEvent.refreshAISetting(),
);
},
),
],
),
const IntrinsicHeight(child: _LocalLLMInfoWidget()),
],
},
loading: () => controller.expanded = false,
);
},
debugLabel: 'LocalAI.showLocalAIChatSetting',
);
},
child: ExpandablePanel(
theme: const ExpandableThemeData(
headerAlignment: ExpandablePanelHeaderAlignment.center,
tapBodyToCollapse: false,
hasIcon: false,
tapBodyToExpand: false,
tapHeaderToExpand: false,
),
header: const LocalAIChatSettingHeader(),
collapsed: const SizedBox.shrink(),
expanded: Padding(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 6),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Flexible(
child: FlowyText.medium(
LocaleKeys.settings_aiPage_keys_llmModel.tr(),
fontSize: 14,
),
),
const Spacer(),
BlocBuilder<LocalAIChatSettingBloc,
LocalAIChatSettingState>(
builder: (context, state) {
return state.fetchModelInfoState.when(
loading: () => Expanded(
child: Row(
children: [
Flexible(
child: FlowyText(
LocaleKeys
.settings_aiPage_keys_fetchLocalModel
.tr(),
),
),
const Spacer(),
const CircularProgressIndicator.adaptive(),
],
),
),
finish: (err) {
return (err == null)
? const _SelectLocalModelDropdownMenu()
: const SizedBox.shrink();
},
);
},
),
],
),
const IntrinsicHeight(child: _LocalLLMInfoWidget()),
],
),
),
),
),
),
);
}
}
class LocalAIChatSettingHeader extends StatelessWidget {
const LocalAIChatSettingHeader({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<LocalAIChatToggleBloc, LocalAIChatToggleState>(
builder: (context, state) {
return state.pageIndicator.when(
error: (error) {
return const SizedBox.shrink();
},
loading: () {
return Row(
children: [
FlowyText(
LocaleKeys.settings_aiPage_keys_localAIStart.tr(),
),
const Spacer(),
const CircularProgressIndicator.adaptive(),
const HSpace(8),
],
);
},
ready: (isEnabled) {
return Row(
children: [
const FlowyText('Enable Local AI Chat'),
const Spacer(),
Toggle(
value: isEnabled,
onChanged: (value) {
context
.read<LocalAIChatToggleBloc>()
.add(const LocalAIChatToggleEvent.toggle());
},
),
],
);
},
);
},
);
@ -79,13 +174,13 @@ class _SelectLocalModelDropdownMenu extends StatelessWidget {
@override
Widget build(BuildContext context) {
return BlocBuilder<LocalAISettingBloc, LocalAISettingState>(
return BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) {
return Flexible(
child: SettingsDropdown<LLMModelPB>(
key: const Key('_SelectLocalModelDropdownMenu'),
onChanged: (model) => context.read<LocalAISettingBloc>().add(
LocalAISettingEvent.selectLLMConfig(model),
onChanged: (model) => context.read<LocalAIChatSettingBloc>().add(
LocalAIChatSettingEvent.selectLLMConfig(model),
),
selectedOption: state.selectedLLMModel!,
options: state.models
@ -110,7 +205,7 @@ class _LocalLLMInfoWidget extends StatelessWidget {
@override
Widget build(BuildContext context) {
return BlocBuilder<LocalAISettingBloc, LocalAISettingState>(
return BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) {
final error = errorFromState(state);
if (error == null) {
@ -137,11 +232,11 @@ class _LocalLLMInfoWidget extends StatelessWidget {
key: UniqueKey(),
llmModel: llmModel,
onFinish: () => context
.read<LocalAISettingBloc>()
.add(const LocalAISettingEvent.finishDownload()),
.read<LocalAIChatSettingBloc>()
.add(const LocalAIChatSettingEvent.finishDownload()),
onCancel: () => context
.read<LocalAISettingBloc>()
.add(const LocalAISettingEvent.cancelDownload()),
.read<LocalAIChatSettingBloc>()
.add(const LocalAIChatSettingEvent.cancelDownload()),
);
},
finishDownload: () => const InitLocalAIIndicator(),
@ -149,16 +244,19 @@ class _LocalLLMInfoWidget extends StatelessWidget {
);
return Padding(
padding: const EdgeInsets.only(top: 14),
padding: const EdgeInsets.only(top: 8),
child: child,
);
} else {
return const SizedBox.shrink();
}
} else {
return FlowyText(
error.msg,
maxLines: 10,
return Opacity(
opacity: 0.5,
child: FlowyText(
error.msg,
maxLines: 10,
),
);
}
},
@ -180,15 +278,15 @@ class _LocalLLMInfoWidget extends StatelessWidget {
return _LLMModelDownloadDialog(
llmResource: llmResource,
onOkPressed: () {
context.read<LocalAISettingBloc>().add(
LocalAISettingEvent.startDownloadModel(
context.read<LocalAIChatSettingBloc>().add(
LocalAIChatSettingEvent.startDownloadModel(
llmModel,
),
);
},
onCancelPressed: () {
context.read<LocalAISettingBloc>().add(
const LocalAISettingEvent.cancelDownload(),
context.read<LocalAIChatSettingBloc>().add(
const LocalAIChatSettingEvent.cancelDownload(),
);
},
);
@ -199,7 +297,7 @@ class _LocalLLMInfoWidget extends StatelessWidget {
);
}
FlowyError? errorFromState(LocalAISettingState state) {
FlowyError? errorFromState(LocalAIChatSettingState state) {
final err = state.fetchModelInfoState.when(
loading: () => null,
finish: (err) => err,
@ -261,7 +359,7 @@ class _ShowDownloadIndicator extends StatelessWidget {
@override
Widget build(BuildContext context) {
return BlocBuilder<LocalAISettingBloc, LocalAISettingState>(
return BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) {
return Row(
children: [
@ -288,15 +386,16 @@ class _ShowDownloadIndicator extends StatelessWidget {
return _LLMModelDownloadDialog(
llmResource: llmResource,
onOkPressed: () {
context.read<LocalAISettingBloc>().add(
LocalAISettingEvent.startDownloadModel(
context.read<LocalAIChatSettingBloc>().add(
LocalAIChatSettingEvent.startDownloadModel(
llmModel,
),
);
},
onCancelPressed: () {
context.read<LocalAISettingBloc>().add(
const LocalAISettingEvent.cancelDownload(),
context.read<LocalAIChatSettingBloc>().add(
const LocalAIChatSettingEvent
.cancelDownload(),
);
},
);

View File

@ -0,0 +1,162 @@
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/local_ai_chat_setting.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:expandable/expandable.dart';
import 'package:flowy_infra_ui/style_widget/text.dart';
import 'package:flutter/material.dart';
import 'package:appflowy/workspace/application/settings/ai/settings_ai_bloc.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
class LocalAISetting extends StatefulWidget {
const LocalAISetting({super.key});
@override
State<LocalAISetting> createState() => _LocalAISettingState();
}
class _LocalAISettingState extends State<LocalAISetting> {
@override
Widget build(BuildContext context) {
return BlocBuilder<SettingsAIBloc, SettingsAIState>(
builder: (context, state) {
if (state.aiSettings == null) {
return const SizedBox.shrink();
}
return BlocProvider(
create: (context) =>
LocalAIToggleBloc()..add(const LocalAIToggleEvent.started()),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: ExpandableNotifier(
child: BlocListener<LocalAIToggleBloc, LocalAIToggleState>(
listener: (context, state) {
final controller =
ExpandableController.of(context, required: true)!;
state.pageIndicator.when(
error: (_) => controller.expanded = false,
ready: (enabled) => controller.expanded = enabled,
loading: () => controller.expanded = false,
);
},
child: ExpandablePanel(
theme: const ExpandableThemeData(
headerAlignment: ExpandablePanelHeaderAlignment.center,
tapBodyToCollapse: false,
hasIcon: false,
tapBodyToExpand: false,
tapHeaderToExpand: false,
),
header: const LocalAISettingHeader(),
collapsed: const SizedBox.shrink(),
expanded: Column(
children: [
DecoratedBox(
decoration: BoxDecoration(
color: Theme.of(context)
.colorScheme
.surfaceContainerHighest,
borderRadius:
const BorderRadius.all(Radius.circular(4)),
),
child: const Padding(
padding: EdgeInsets.only(
left: 12.0,
top: 6,
bottom: 6,
),
child: LocalAIChatSetting(),
),
),
],
),
),
),
),
),
);
},
);
}
}
class LocalAISettingHeader extends StatelessWidget {
const LocalAISettingHeader({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<LocalAIToggleBloc, LocalAIToggleState>(
builder: (context, state) {
return state.pageIndicator.when(
error: (error) {
return const SizedBox.shrink();
},
loading: () {
return const CircularProgressIndicator.adaptive();
},
ready: (isEnabled) {
return Row(
children: [
FlowyText(
LocaleKeys.settings_aiPage_keys_localAIToggleTitle.tr(),
),
const Spacer(),
Toggle(
value: isEnabled,
onChanged: (value) {
if (isEnabled) {
showDialog(
context: context,
barrierDismissible: false,
useRootNavigator: false,
builder: (dialogContext) {
return _ToggleLocalAIDialog(
onOkPressed: () {
context
.read<LocalAIToggleBloc>()
.add(const LocalAIToggleEvent.toggle());
},
onCancelPressed: () {},
);
},
);
} else {
context
.read<LocalAIToggleBloc>()
.add(const LocalAIToggleEvent.toggle());
}
},
),
],
);
},
);
},
);
}
}
class _ToggleLocalAIDialog extends StatelessWidget {
const _ToggleLocalAIDialog({
required this.onOkPressed,
required this.onCancelPressed,
});
final VoidCallback onOkPressed;
final VoidCallback onCancelPressed;
@override
Widget build(BuildContext context) {
return NavigatorOkCancelDialog(
title: LocaleKeys.settings_aiPage_keys_disableLocalAIDialog.tr(),
okTitle: LocaleKeys.button_confirm.tr(),
cancelTitle: LocaleKeys.button_cancel.tr(),
onOkPressed: onOkPressed,
onCancelPressed: onCancelPressed,
titleUpperCase: false,
);
}
}

View File

@ -61,7 +61,6 @@ List<AIModelPB> _availableModels = [
AIModelPB.Claude3Sonnet,
AIModelPB.GPT35,
AIModelPB.GPT4o,
// AIModelPB.LocalAIModel,
];
String _titleForAIModel(AIModelPB model) {
@ -76,8 +75,6 @@ String _titleForAIModel(AIModelPB model) {
return "GPT-3.5";
case AIModelPB.GPT4o:
return "GPT-4o";
case AIModelPB.LocalAIModel:
return "Local";
default:
Log.error("Unknown AI model: $model, fallback to default");
return "Default";

View File

@ -21,7 +21,8 @@ class CheckPluginStateIndicator extends StatelessWidget {
return state.action.when(
init: () => const _InitPlugin(),
ready: () => const _ReadyToUse(),
reloadRequired: () => const _ReloadButton(),
restart: () => const _ReloadButton(),
loadingPlugin: () => const _InitPlugin(),
);
},
),
@ -78,32 +79,29 @@ class _ReadyToUse extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Padding(
padding: const EdgeInsets.symmetric(horizontal: 6, vertical: 4),
child: DecoratedBox(
decoration: const BoxDecoration(
color: Color(0xFFEDF7ED),
borderRadius: BorderRadius.all(
Radius.circular(4),
),
return DecoratedBox(
decoration: const BoxDecoration(
color: Color(0xFFEDF7ED),
borderRadius: BorderRadius.all(
Radius.circular(4),
),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: Row(
children: [
const HSpace(8),
const FlowySvg(
FlowySvgs.download_success_s,
color: Color(0xFF2E7D32),
),
const HSpace(6),
FlowyText(
LocaleKeys.settings_aiPage_keys_localAILoaded.tr(),
fontSize: 11,
color: const Color(0xFF1E4620),
),
],
),
),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 8),
child: Row(
children: [
const HSpace(8),
const FlowySvg(
FlowySvgs.download_success_s,
color: Color(0xFF2E7D32),
),
const HSpace(6),
FlowyText(
LocaleKeys.settings_aiPage_keys_localAILoaded.tr(),
fontSize: 11,
color: const Color(0xFF1E4620),
),
],
),
),
);

View File

@ -1,4 +1,3 @@
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/local_ai_config.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/model_selection.dart';
import 'package:flutter/material.dart';
@ -44,10 +43,7 @@ class SettingsAIView extends StatelessWidget {
const AIModelSelection(),
];
if (state.aiSettings != null &&
state.aiSettings!.aiModel == AIModelPB.LocalAIModel) {
children.add(const LocalModelConfig());
}
// children.add(const LocalAISetting());
children.add(const _AISearchToggle(value: false));

View File

@ -172,7 +172,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]]
name = "app-error"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bincode",
@ -192,7 +192,7 @@ dependencies = [
[[package]]
name = "appflowy-ai-client"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bytes",
@ -206,7 +206,7 @@ dependencies = [
[[package]]
name = "appflowy-local-ai"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [
"anyhow",
"appflowy-plugin",
@ -225,7 +225,7 @@ dependencies = [
[[package]]
name = "appflowy-plugin"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [
"anyhow",
"cfg-if",
@ -826,7 +826,7 @@ dependencies = [
[[package]]
name = "client-api"
version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"again",
"anyhow",
@ -876,7 +876,7 @@ dependencies = [
[[package]]
name = "client-api-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"collab-entity",
"collab-rt-entity",
@ -888,7 +888,7 @@ dependencies = [
[[package]]
name = "client-websocket"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"futures-channel",
"futures-util",
@ -1128,7 +1128,7 @@ dependencies = [
[[package]]
name = "collab-rt-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bincode",
@ -1153,7 +1153,7 @@ dependencies = [
[[package]]
name = "collab-rt-protocol"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"async-trait",
@ -1417,7 +1417,7 @@ dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa 1.0.6",
"phf 0.11.2",
"phf 0.8.0",
"smallvec",
]
@ -1528,7 +1528,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]]
name = "database-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",
@ -3028,7 +3028,7 @@ dependencies = [
[[package]]
name = "gotrue"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"futures-util",
@ -3045,7 +3045,7 @@ dependencies = [
[[package]]
name = "gotrue-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",
@ -3477,7 +3477,7 @@ dependencies = [
[[package]]
name = "infra"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bytes",
@ -6021,7 +6021,7 @@ dependencies = [
[[package]]
name = "shared-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",

View File

@ -53,7 +53,7 @@ collab-user = { version = "0.2" }
# Run the script:
# scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" }
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
[dependencies]
serde_json.workspace = true
@ -128,5 +128,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" }
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]]
name = "app-error"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]]
name = "appflowy-ai-client"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bytes",
@ -197,7 +197,7 @@ dependencies = [
[[package]]
name = "appflowy-local-ai"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [
"anyhow",
"appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]]
name = "appflowy-plugin"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [
"anyhow",
"cfg-if",
@ -800,7 +800,7 @@ dependencies = [
[[package]]
name = "client-api"
version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"again",
"anyhow",
@ -850,7 +850,7 @@ dependencies = [
[[package]]
name = "client-api-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"collab-entity",
"collab-rt-entity",
@ -862,7 +862,7 @@ dependencies = [
[[package]]
name = "client-websocket"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"futures-channel",
"futures-util",
@ -1111,7 +1111,7 @@ dependencies = [
[[package]]
name = "collab-rt-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bincode",
@ -1136,7 +1136,7 @@ dependencies = [
[[package]]
name = "collab-rt-protocol"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"async-trait",
@ -1407,7 +1407,7 @@ dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa 1.0.10",
"phf 0.11.2",
"phf 0.8.0",
"smallvec",
]
@ -1518,7 +1518,7 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
[[package]]
name = "database-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",
@ -3095,7 +3095,7 @@ dependencies = [
[[package]]
name = "gotrue"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"futures-util",
@ -3112,7 +3112,7 @@ dependencies = [
[[package]]
name = "gotrue-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",
@ -3549,7 +3549,7 @@ dependencies = [
[[package]]
name = "infra"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bytes",
@ -6085,7 +6085,7 @@ dependencies = [
[[package]]
name = "shared-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",

View File

@ -52,7 +52,7 @@ collab-user = { version = "0.2" }
# Run the script:
# scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" }
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
[dependencies]
serde_json.workspace = true
@ -128,6 +128,6 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" }
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }

View File

@ -638,10 +638,14 @@
"downloadAIModelButton": "Download AI model",
"downloadingModel": "Downloading",
"localAILoaded": "Local AI Model successfully added and ready to use",
"localAILoading": "Local AI Model is loading...",
"localAIStopped": "Local AI Model stopped",
"localAIStart": "Local AI Chat is starting...",
"localAILoading": "Local AI Chat Model is loading...",
"localAIStopped": "Local AI stopped",
"failToLoadLocalAI": "Failed to start local AI",
"restartLocalAI": "Restart Local AI",
"disableLocalAIDialog": "Do you want to disable local AI?",
"localAIToggleTitle": "Toggle to enable or disable local AI",
"fetchLocalModel": "Fetch local model configuration",
"title": "AI API Keys",
"openAILabel": "OpenAI API key",
"openAITooltip": "You can find your Secret API key on the API key page",

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]]
name = "app-error"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]]
name = "appflowy-ai-client"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bytes",
@ -197,7 +197,7 @@ dependencies = [
[[package]]
name = "appflowy-local-ai"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [
"anyhow",
"appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]]
name = "appflowy-plugin"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [
"anyhow",
"cfg-if",
@ -718,7 +718,7 @@ dependencies = [
[[package]]
name = "client-api"
version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"again",
"anyhow",
@ -768,7 +768,7 @@ dependencies = [
[[package]]
name = "client-api-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"collab-entity",
"collab-rt-entity",
@ -780,7 +780,7 @@ dependencies = [
[[package]]
name = "client-websocket"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"futures-channel",
"futures-util",
@ -989,7 +989,7 @@ dependencies = [
[[package]]
name = "collab-rt-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bincode",
@ -1014,7 +1014,7 @@ dependencies = [
[[package]]
name = "collab-rt-protocol"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"async-trait",
@ -1352,7 +1352,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]]
name = "database-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",
@ -2695,7 +2695,7 @@ dependencies = [
[[package]]
name = "gotrue"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"futures-util",
@ -2712,7 +2712,7 @@ dependencies = [
[[package]]
name = "gotrue-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",
@ -3077,7 +3077,7 @@ dependencies = [
[[package]]
name = "infra"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"bytes",
@ -5223,7 +5223,7 @@ dependencies = [
[[package]]
name = "shared-entity"
version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [
"anyhow",
"app-error",

View File

@ -99,8 +99,8 @@ zip = "2.1.3"
# Run the script.add_workspace_members:
# scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" }
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
[profile.dev]
opt-level = 1
@ -151,5 +151,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" }
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }

View File

@ -3,7 +3,7 @@ use crate::entities::{
ChatMessageErrorPB, ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB,
};
use crate::middleware::chat_service_mw::ChatServiceMiddleware;
use crate::notification::{send_notification, ChatNotification};
use crate::notification::{make_notification, ChatNotification};
use crate::persistence::{insert_chat_messages, select_chat_messages, ChatMessageTable};
use allo_isolate::Isolate;
use flowy_chat_pub::cloud::{ChatCloudService, ChatMessage, ChatMessageType, MessageCursor};
@ -138,7 +138,7 @@ impl Chat {
chat_id: chat_id.clone(),
error_message: err.to_string(),
};
send_notification(&chat_id, ChatNotification::StreamChatMessageError)
make_notification(&chat_id, ChatNotification::StreamChatMessageError)
.payload(pb)
.send();
return Err(err);
@ -153,14 +153,14 @@ impl Chat {
chat_id: chat_id.clone(),
error_message: err.to_string(),
};
send_notification(&chat_id, ChatNotification::StreamChatMessageError)
make_notification(&chat_id, ChatNotification::StreamChatMessageError)
.payload(pb)
.send();
return Err(err);
},
}
send_notification(&chat_id, ChatNotification::FinishStreaming).send();
make_notification(&chat_id, ChatNotification::FinishStreaming).send();
if stream_buffer.lock().await.is_empty() {
return Ok(());
}
@ -193,7 +193,7 @@ impl Chat {
vec![answer.clone()],
)?;
let pb = ChatMessagePB::from(answer);
send_notification(chat_id, ChatNotification::DidReceiveChatMessage)
make_notification(chat_id, ChatNotification::DidReceiveChatMessage)
.payload(pb)
.send();
@ -234,7 +234,7 @@ impl Chat {
has_more: true,
total: 0,
};
send_notification(&self.chat_id, ChatNotification::DidLoadPrevChatMessage)
make_notification(&self.chat_id, ChatNotification::DidLoadPrevChatMessage)
.payload(pb.clone())
.send();
return Ok(pb);
@ -355,11 +355,11 @@ impl Chat {
} else {
*prev_message_state.write().await = PrevMessageState::NoMore;
}
send_notification(&chat_id, ChatNotification::DidLoadPrevChatMessage)
make_notification(&chat_id, ChatNotification::DidLoadPrevChatMessage)
.payload(pb)
.send();
} else {
send_notification(&chat_id, ChatNotification::DidLoadLatestChatMessage)
make_notification(&chat_id, ChatNotification::DidLoadLatestChatMessage)
.payload(pb)
.send();
}

View File

@ -46,8 +46,8 @@ impl ChatManager {
cloud_service.clone(),
));
if local_ai_controller.is_ready() {
if let Err(err) = local_ai_controller.initialize() {
if local_ai_controller.can_init() {
if let Err(err) = local_ai_controller.initialize_chat_plugin(None) {
error!("[AI Plugin] failed to initialize local ai: {:?}", err);
}
}
@ -86,7 +86,7 @@ impl ChatManager {
pub async fn close_chat(&self, chat_id: &str) -> Result<(), FlowyError> {
trace!("close chat: {}", chat_id);
if self.local_ai_controller.is_ready() {
if self.local_ai_controller.is_running() {
info!("[AI Plugin] notify close chat: {}", chat_id);
self.local_ai_controller.close_chat(chat_id);
}
@ -97,7 +97,7 @@ impl ChatManager {
if let Some((_, chat)) = self.chats.remove(chat_id) {
chat.close();
if self.local_ai_controller.is_ready() {
if self.local_ai_controller.is_running() {
info!("[AI Plugin] notify close chat: {}", chat_id);
self.local_ai_controller.close_chat(chat_id);
}

View File

@ -367,7 +367,7 @@ pub struct PendingResourcePB {
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct PluginStatePB {
pub struct LocalAIPluginStatePB {
#[pb(index = 1)]
pub state: RunningStatePB,
}
@ -392,3 +392,21 @@ impl From<RunningState> for RunningStatePB {
}
}
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct LocalAIPB {
#[pb(index = 1)]
pub enabled: bool,
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct LocalAIChatPB {
#[pb(index = 1)]
pub enabled: bool,
#[pb(index = 2)]
pub file_enabled: bool,
#[pb(index = 3)]
pub plugin_state: LocalAIPluginStatePB,
}

View File

@ -1,4 +1,5 @@
use flowy_chat_pub::cloud::ChatMessageType;
use std::path::PathBuf;
use allo_isolate::Isolate;
@ -9,6 +10,7 @@ use validator::Validate;
use crate::chat_manager::ChatManager;
use crate::entities::*;
use crate::local_ai::local_llm_chat::LLMModelInfo;
use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
use crate::tools::AITools;
use flowy_error::{FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
@ -131,6 +133,17 @@ pub(crate) async fn refresh_local_ai_info_handler(
let (tx, rx) = oneshot::channel::<Result<LLMModelInfo, FlowyError>>();
tokio::spawn(async move {
let model_info = chat_manager.local_ai_controller.refresh().await;
if model_info.is_err() {
if let Some(llm_model) = chat_manager.local_ai_controller.get_current_model() {
let model_info = LLMModelInfo {
selected_model: llm_model.clone(),
models: vec![llm_model],
};
let _ = tx.send(Ok(model_info));
return;
}
}
let _ = tx.send(model_info);
});
@ -147,7 +160,7 @@ pub(crate) async fn update_local_llm_model_handler(
let chat_manager = upgrade_chat_manager(chat_manager)?;
let state = chat_manager
.local_ai_controller
.use_local_llm(data.llm_id)
.select_local_llm(data.llm_id)
.await?;
data_result_ok(state)
}
@ -229,17 +242,99 @@ pub(crate) async fn cancel_download_llm_resource_handler(
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_plugin_state_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<PluginStatePB, FlowyError> {
) -> DataResult<LocalAIPluginStatePB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let state = chat_manager.local_ai_controller.get_plugin_state();
let state = chat_manager.local_ai_controller.get_chat_plugin_state();
data_result_ok(state)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn toggle_local_ai_chat_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIChatPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager
.local_ai_controller
.toggle_local_ai_chat()
.await?;
let file_enabled = chat_manager.local_ai_controller.is_rag_enabled();
let plugin_state = chat_manager.local_ai_controller.get_chat_plugin_state();
let pb = LocalAIChatPB {
enabled,
file_enabled,
plugin_state,
};
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateLocalChatAI,
)
.payload(pb.clone())
.send();
data_result_ok(pb)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn restart_local_ai_handler(
pub(crate) async fn toggle_local_ai_chat_file_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIChatPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.is_chat_enabled();
let file_enabled = chat_manager
.local_ai_controller
.toggle_local_ai_chat_rag()
.await?;
let plugin_state = chat_manager.local_ai_controller.get_chat_plugin_state();
let pb = LocalAIChatPB {
enabled,
file_enabled,
plugin_state,
};
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateLocalChatAI,
)
.payload(pb.clone())
.send();
data_result_ok(pb)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_local_ai_chat_state_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIChatPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.is_chat_enabled();
let file_enabled = chat_manager.local_ai_controller.is_rag_enabled();
let plugin_state = chat_manager.local_ai_controller.get_chat_plugin_state();
data_result_ok(LocalAIChatPB {
enabled,
file_enabled,
plugin_state,
})
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn restart_local_ai_chat_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> Result<(), FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
chat_manager.local_ai_controller.restart();
chat_manager.local_ai_controller.restart_chat_plugin();
Ok(())
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn toggle_local_ai_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.toggle_local_ai().await?;
data_result_ok(LocalAIPB { enabled })
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_local_ai_state_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.is_enabled();
data_result_ok(LocalAIPB { enabled })
}

View File

@ -40,8 +40,19 @@ pub fn init(chat_manager: Weak<ChatManager>) -> AFPlugin {
ChatEvent::CancelDownloadLLMResource,
cancel_download_llm_resource_handler,
)
.event(ChatEvent::GetPluginState, get_plugin_state_handler)
.event(ChatEvent::RestartLocalAI, restart_local_ai_handler)
.event(ChatEvent::GetLocalAIPluginState, get_plugin_state_handler)
.event(ChatEvent::ToggleLocalAIChat, toggle_local_ai_chat_handler)
.event(
ChatEvent::GetLocalAIChatState,
get_local_ai_chat_state_handler,
)
.event(ChatEvent::RestartLocalAIChat, restart_local_ai_chat_handler)
.event(ChatEvent::ToggleLocalAI, toggle_local_ai_handler)
.event(ChatEvent::GetLocalAIState, get_local_ai_state_handler)
.event(
ChatEvent::ToggleChatWithFile,
toggle_local_ai_chat_file_handler,
)
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
@ -90,9 +101,29 @@ pub enum ChatEvent {
#[event()]
CancelDownloadLLMResource = 13,
#[event(output = "PluginStatePB")]
GetPluginState = 14,
#[event(output = "LocalAIPluginStatePB")]
GetLocalAIPluginState = 14,
#[event(output = "LocalAIChatPB")]
ToggleLocalAIChat = 15,
/// Return Local AI Chat State
#[event(output = "LocalAIChatPB")]
GetLocalAIChatState = 16,
/// Restart local AI chat. When plugin quit or user terminate in task manager or activity monitor,
/// the plugin will need to restart.
#[event()]
RestartLocalAIChat = 17,
/// Enable or disable local AI
#[event(output = "LocalAIPB")]
ToggleLocalAI = 18,
/// Return LocalAIPB that contains the current state of the local AI
#[event(output = "LocalAIPB")]
GetLocalAIState = 19,
#[event()]
RestartLocalAI = 15,
ToggleChatWithFile = 20,
}

View File

@ -1,15 +1,15 @@
use crate::chat_manager::ChatUserService;
use crate::entities::{
ChatStatePB, LocalModelResourcePB, ModelTypePB, PluginStatePB, RunningStatePB,
ChatStatePB, LocalAIPluginStatePB, LocalModelResourcePB, ModelTypePB, RunningStatePB,
};
use crate::local_ai::llm_resource::{LLMResourceController, LLMResourceService};
use crate::notification::{send_notification, ChatNotification};
use crate::local_ai::local_llm_resource::{LLMResourceController, LLMResourceService};
use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
use anyhow::Error;
use appflowy_local_ai::chat_plugin::{AIPluginConfig, LocalChatLLMChat};
use appflowy_plugin::manager::PluginManager;
use appflowy_plugin::util::is_apple_silicon;
use flowy_chat_pub::cloud::{AppFlowyAIPlugin, ChatCloudService, LLMModel, LocalAIConfig};
use flowy_error::FlowyResult;
use flowy_error::{FlowyError, FlowyResult};
use flowy_sqlite::kv::KVStorePreferences;
use futures::Sink;
use lib_infra::async_trait::async_trait;
@ -33,11 +33,16 @@ pub struct LLMModelInfo {
pub models: Vec<LLMModel>,
}
const LOCAL_AI_SETTING_KEY: &str = "local_ai_setting";
const APPFLOWY_LOCAL_AI_ENABLED: &str = "appflowy_local_ai_enabled";
const APPFLOWY_LOCAL_AI_CHAT_ENABLED: &str = "appflowy_local_ai_chat_enabled";
const APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED: &str = "appflowy_local_ai_chat_rag_enabled";
const LOCAL_AI_SETTING_KEY: &str = "appflowy_local_ai_setting:v0";
pub struct LocalAIController {
llm_chat: Arc<LocalChatLLMChat>,
llm_res: Arc<LLMResourceController>,
current_chat_id: Mutex<Option<String>>,
store_preferences: Arc<KVStorePreferences>,
}
impl Deref for LocalAIController {
@ -57,15 +62,17 @@ impl LocalAIController {
) -> Self {
let llm_chat = Arc::new(LocalChatLLMChat::new(plugin_manager));
let mut rx = llm_chat.subscribe_running_state();
let _weak_store_preferences = Arc::downgrade(&store_preferences);
tokio::spawn(async move {
while let Some(state) = rx.next().await {
info!("[AI Plugin] state: {:?}", state);
let new_state = RunningStatePB::from(state);
send_notification(
"appflowy_chat_plugin",
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(PluginStatePB { state: new_state })
.payload(LocalAIPluginStatePB { state: new_state })
.send();
}
});
@ -73,17 +80,26 @@ impl LocalAIController {
let res_impl = LLMResourceServiceImpl {
user_service: user_service.clone(),
cloud_service,
store_preferences,
store_preferences: store_preferences.clone(),
};
let (tx, mut rx) = tokio::sync::mpsc::channel(1);
let llm_res = Arc::new(LLMResourceController::new(user_service, res_impl, tx));
let current_chat_id = Mutex::new(None);
let cloned_llm_chat = llm_chat.clone();
let cloned_llm_res = llm_res.clone();
let this = Self {
llm_chat,
llm_res,
current_chat_id,
store_preferences,
};
let rag_enabled = this.is_rag_enabled();
let cloned_llm_chat = this.llm_chat.clone();
let cloned_llm_res = this.llm_res.clone();
tokio::spawn(async move {
while rx.recv().await.is_some() {
if let Ok(chat_config) = cloned_llm_res.get_ai_plugin_config() {
if let Ok(chat_config) = cloned_llm_res.get_chat_config(rag_enabled) {
if let Err(err) = initialize_chat_plugin(&cloned_llm_chat, chat_config) {
error!("[AI Plugin] failed to setup plugin: {:?}", err);
}
@ -91,30 +107,82 @@ impl LocalAIController {
}
});
Self {
llm_chat,
llm_res,
current_chat_id: Default::default(),
}
this
}
pub async fn refresh(&self) -> FlowyResult<LLMModelInfo> {
self.llm_res.refresh_llm_resource().await
}
pub fn initialize(&self) -> FlowyResult<()> {
let chat_config = self.llm_res.get_ai_plugin_config()?;
pub fn initialize_chat_plugin(
&self,
ret: Option<tokio::sync::oneshot::Sender<()>>,
) -> FlowyResult<()> {
let mut chat_config = self.llm_res.get_chat_config(self.is_rag_enabled())?;
let llm_chat = self.llm_chat.clone();
initialize_chat_plugin(&llm_chat, chat_config)?;
tokio::spawn(async move {
trace!("[AI Plugin] config: {:?}", chat_config);
if is_apple_silicon().await.unwrap_or(false) {
chat_config = chat_config.with_device("gpu");
}
match llm_chat.init_chat_plugin(chat_config).await {
Ok(_) => {
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: true,
})
.send();
},
Err(err) => {
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: false,
})
.send();
error!("[AI Plugin] failed to setup plugin: {:?}", err);
},
}
if let Some(ret) = ret {
let _ = ret.send(());
}
});
Ok(())
}
/// Returns true if the local AI is enabled and ready to use.
pub fn is_ready(&self) -> bool {
self.llm_res.is_resource_ready()
pub fn can_init(&self) -> bool {
self.is_enabled() && self.llm_res.is_resource_ready()
}
pub fn is_running(&self) -> bool {
self.llm_chat.get_plugin_running_state().is_ready()
}
pub fn is_enabled(&self) -> bool {
self.store_preferences.get_bool(APPFLOWY_LOCAL_AI_ENABLED)
}
pub fn is_chat_enabled(&self) -> bool {
self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED)
}
pub fn is_rag_enabled(&self) -> bool {
self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED)
}
pub fn open_chat(&self, chat_id: &str) {
if !self.is_ready() {
if !self.is_running() {
return;
}
@ -149,7 +217,11 @@ impl LocalAIController {
});
}
pub async fn use_local_llm(&self, llm_id: i64) -> FlowyResult<LocalModelResourcePB> {
pub async fn select_local_llm(&self, llm_id: i64) -> FlowyResult<LocalModelResourcePB> {
if !self.is_enabled() {
return Err(FlowyError::local_ai_unavailable());
}
let llm_chat = self.llm_chat.clone();
match llm_chat.destroy_chat_plugin().await {
Ok(_) => info!("[AI Plugin] destroy plugin successfully"),
@ -158,7 +230,7 @@ impl LocalAIController {
let state = self.llm_res.use_local_llm(llm_id)?;
// Re-initialize the plugin if the setting is updated and ready to use
if self.llm_res.is_resource_ready() {
self.initialize()?;
self.initialize_chat_plugin(None)?;
}
Ok(state)
}
@ -167,6 +239,10 @@ impl LocalAIController {
self.llm_res.get_local_llm_state()
}
pub fn get_current_model(&self) -> Option<LLMModel> {
self.llm_res.get_selected_model()
}
pub async fn start_downloading<T>(&self, progress_sink: T) -> FlowyResult<String>
where
T: Sink<String, Error = anyhow::Error> + Unpin + Sync + Send + 'static,
@ -180,20 +256,77 @@ impl LocalAIController {
Ok(())
}
pub fn get_plugin_state(&self) -> PluginStatePB {
pub fn get_chat_plugin_state(&self) -> LocalAIPluginStatePB {
let state = self.llm_chat.get_plugin_running_state();
PluginStatePB {
LocalAIPluginStatePB {
state: RunningStatePB::from(state),
}
}
pub fn restart(&self) {
if let Ok(chat_config) = self.llm_res.get_ai_plugin_config() {
pub fn restart_chat_plugin(&self) {
let rag_enabled = self.is_rag_enabled();
if let Ok(chat_config) = self.llm_res.get_chat_config(rag_enabled) {
if let Err(err) = initialize_chat_plugin(&self.llm_chat, chat_config) {
error!("[AI Plugin] failed to setup plugin: {:?}", err);
}
}
}
pub async fn toggle_local_ai(&self) -> FlowyResult<bool> {
let enabled = !self.store_preferences.get_bool(APPFLOWY_LOCAL_AI_ENABLED);
self
.store_preferences
.set_bool(APPFLOWY_LOCAL_AI_ENABLED, enabled)?;
// when enable local ai. we need to check if chat is enabled, if enabled, we need to init chat plugin
// otherwise, we need to destroy the plugin
if enabled {
let chat_enabled = self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED);
self.enable_chat_plugin(chat_enabled).await?;
} else {
self.enable_chat_plugin(false).await?;
}
Ok(enabled)
}
pub async fn toggle_local_ai_chat(&self) -> FlowyResult<bool> {
let enabled = !self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED);
self
.store_preferences
.set_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED, enabled)?;
self.enable_chat_plugin(enabled).await?;
Ok(enabled)
}
pub async fn toggle_local_ai_chat_rag(&self) -> FlowyResult<bool> {
let enabled = !self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED);
self
.store_preferences
.set_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED, enabled)?;
Ok(enabled)
}
async fn enable_chat_plugin(&self, enabled: bool) -> FlowyResult<()> {
if enabled {
let (tx, rx) = tokio::sync::oneshot::channel();
if let Err(err) = self.initialize_chat_plugin(Some(tx)) {
error!("[AI Plugin] failed to initialize local ai: {:?}", err);
}
let _ = rx.await;
} else {
if let Err(err) = self.llm_chat.destroy_chat_plugin().await {
error!("[AI Plugin] failed to destroy plugin: {:?}", err);
}
}
Ok(())
}
}
fn initialize_chat_plugin(
@ -208,24 +341,26 @@ fn initialize_chat_plugin(
}
match llm_chat.init_chat_plugin(chat_config).await {
Ok(_) => {
send_notification(
"appflowy_chat_plugin",
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: true,
});
})
.send();
},
Err(err) => {
send_notification(
"appflowy_chat_plugin",
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: false,
});
})
.send();
error!("[AI Plugin] failed to setup plugin: {:?}", err);
},
}
@ -240,7 +375,7 @@ pub struct LLMResourceServiceImpl {
}
#[async_trait]
impl LLMResourceService for LLMResourceServiceImpl {
async fn get_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error> {
async fn fetch_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error> {
let workspace_id = self.user_service.workspace_id()?;
let config = self
.cloud_service
@ -249,16 +384,22 @@ impl LLMResourceService for LLMResourceServiceImpl {
Ok(config)
}
fn store(&self, setting: LLMSetting) -> Result<(), Error> {
fn store_setting(&self, setting: LLMSetting) -> Result<(), Error> {
self
.store_preferences
.set_object(LOCAL_AI_SETTING_KEY, setting)?;
Ok(())
}
fn retrieve(&self) -> Option<LLMSetting> {
fn retrieve_setting(&self) -> Option<LLMSetting> {
self
.store_preferences
.get_object::<LLMSetting>(LOCAL_AI_SETTING_KEY)
}
fn is_rag_enabled(&self) -> bool {
self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED)
}
}

View File

@ -14,16 +14,19 @@ use parking_lot::RwLock;
use appflowy_local_ai::plugin_request::download_plugin;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use tokio::fs::{self};
use tokio_util::sync::CancellationToken;
use tracing::{debug, error, info, instrument, trace};
use tracing::{debug, error, info, instrument, trace, warn};
use zip_extensions::zip_extract;
#[async_trait]
pub trait LLMResourceService: Send + Sync + 'static {
async fn get_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error>;
fn store(&self, setting: LLMSetting) -> Result<(), anyhow::Error>;
fn retrieve(&self) -> Option<LLMSetting>;
/// Get local ai configuration from remote server
async fn fetch_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error>;
fn store_setting(&self, setting: LLMSetting) -> Result<(), anyhow::Error>;
fn retrieve_setting(&self) -> Option<LLMSetting>;
fn is_rag_enabled(&self) -> bool;
}
const PLUGIN_DIR: &str = "plugin";
@ -41,7 +44,7 @@ pub struct DownloadTask {
}
impl DownloadTask {
pub fn new() -> Self {
let (tx, _) = tokio::sync::broadcast::channel(5);
let (tx, _) = tokio::sync::broadcast::channel(100);
let cancel_token = CancellationToken::new();
Self { cancel_token, tx }
}
@ -67,7 +70,7 @@ impl LLMResourceController {
resource_service: impl LLMResourceService,
resource_notify: tokio::sync::mpsc::Sender<()>,
) -> Self {
let llm_setting = RwLock::new(resource_service.retrieve());
let llm_setting = RwLock::new(resource_service.retrieve_setting());
Self {
user_service,
resource_service: Arc::new(resource_service),
@ -102,7 +105,7 @@ impl LLMResourceController {
llm_model: selected_model.clone(),
};
self.llm_setting.write().replace(llm_setting.clone());
self.resource_service.store(llm_setting)?;
self.resource_service.store_setting(llm_setting)?;
Ok(LLMModelInfo {
selected_model,
@ -133,7 +136,7 @@ impl LLMResourceController {
trace!("[LLM Resource] Selected AI setting: {:?}", llm_setting);
*self.llm_setting.write() = Some(llm_setting.clone());
self.resource_service.store(llm_setting)?;
self.resource_service.store_setting(llm_setting)?;
self.get_local_llm_state()
}
@ -302,6 +305,7 @@ impl LLMResourceController {
let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress));
})),
Some(Duration::from_millis(100)),
)
.await?;
@ -342,7 +346,11 @@ impl LLMResourceController {
let cloned_model_name = model_name.clone();
let progress = Arc::new(move |downloaded, total_size| {
let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
let _ = plugin_progress_tx.send(format!("{}:progress:{}", cloned_model_name, progress));
if let Err(err) =
plugin_progress_tx.send(format!("{}:progress:{}", cloned_model_name, progress))
{
warn!("Failed to send progress: {:?}", err);
}
});
match download_model(
&url,
@ -384,7 +392,7 @@ impl LLMResourceController {
}
#[instrument(level = "debug", skip_all, err)]
pub fn get_ai_plugin_config(&self) -> FlowyResult<AIPluginConfig> {
pub fn get_chat_config(&self, rag_enabled: bool) -> FlowyResult<AIPluginConfig> {
if !self.is_resource_ready() {
return Err(FlowyError::local_ai().with_context("Local AI resources are not ready"));
}
@ -397,27 +405,26 @@ impl LLMResourceController {
.ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?;
let model_dir = self.user_model_folder()?;
let resource_dir = self.resource_dir()?;
let bin_path = self
.plugin_path(&llm_setting.plugin.etag)?
.join(llm_setting.plugin.name);
let chat_model_path = model_dir.join(&llm_setting.llm_model.chat_model.file_name);
let embedding_model_path = model_dir.join(&llm_setting.llm_model.embedding_model.file_name);
let mut config = AIPluginConfig::new(bin_path, chat_model_path)?;
//
let persist_directory = resource_dir.join("rag");
if !persist_directory.exists() {
std::fs::create_dir_all(&persist_directory)?;
if rag_enabled {
let resource_dir = self.resource_dir()?;
let embedding_model_path = model_dir.join(&llm_setting.llm_model.embedding_model.file_name);
let persist_directory = resource_dir.join("vectorstore");
if !persist_directory.exists() {
std::fs::create_dir_all(&persist_directory)?;
}
config.set_rag_enabled(&embedding_model_path, &persist_directory)?;
}
// Enable RAG when the embedding model path is set
config.set_rag_enabled(&embedding_model_path, &persist_directory)?;
if cfg!(debug_assertions) {
config = config.with_verbose(true);
}
trace!("[AI Chat] use config: {:?}", config);
Ok(config)
}
@ -425,7 +432,7 @@ impl LLMResourceController {
async fn fetch_ai_config(&self) -> FlowyResult<LocalAIConfig> {
self
.resource_service
.get_local_ai_config()
.fetch_local_ai_config()
.await
.map_err(|err| {
error!("[LLM Resource] Failed to fetch local ai config: {:?}", err);
@ -434,6 +441,14 @@ impl LLMResourceController {
})
}
pub fn get_selected_model(&self) -> Option<LLMModel> {
self
.llm_setting
.read()
.as_ref()
.map(|setting| setting.llm_model.clone())
}
/// Selects the appropriate model based on the current settings or defaults to the first model.
fn select_model(&self, ai_config: &LocalAIConfig) -> FlowyResult<LLMModel> {
let selected_model = match self.llm_setting.read().as_ref() {
@ -477,7 +492,7 @@ impl LLMResourceController {
.map(|dir| dir.join(model_file_name))
}
fn resource_dir(&self) -> FlowyResult<PathBuf> {
pub(crate) fn resource_dir(&self) -> FlowyResult<PathBuf> {
let user_data_dir = self.user_service.user_data_dir()?;
Ok(user_data_dir.join("llm"))
}

View File

@ -1,3 +1,3 @@
pub mod llm_resource;
pub mod local_llm_chat;
pub mod local_llm_resource;
mod model_request;

View File

@ -5,6 +5,7 @@ use reqwest::{Client, Response, StatusCode};
use sha2::{Digest, Sha256};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::fs::{self, File};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
@ -29,6 +30,11 @@ pub async fn download_model(
let mut part_file = File::create(&partial_path).await?;
let mut downloaded: u64 = 0;
let debounce_duration = Duration::from_millis(100);
let mut last_update = Instant::now()
.checked_sub(debounce_duration)
.unwrap_or(Instant::now());
while let Some(chunk) = response.chunk().await? {
if let Some(cancel_token) = &cancel_token {
if cancel_token.is_cancelled() {
@ -42,7 +48,11 @@ pub async fn download_model(
downloaded += chunk.len() as u64;
if let Some(progress_callback) = &progress_callback {
progress_callback(downloaded, total_size_in_bytes);
let now = Instant::now();
if now.duration_since(last_update) >= debounce_duration {
progress_callback(downloaded, total_size_in_bytes);
last_update = now;
}
}
}

View File

@ -1,7 +1,7 @@
use crate::chat_manager::ChatUserService;
use crate::entities::{ChatStatePB, ModelTypePB};
use crate::local_ai::local_llm_chat::LocalAIController;
use crate::notification::{send_notification, ChatNotification};
use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
use crate::persistence::select_single_message;
use appflowy_plugin::error::PluginError;
@ -53,14 +53,15 @@ impl ChatServiceMiddleware {
err,
PluginError::PluginNotConnected | PluginError::PeerDisconnect
) {
send_notification(
"appflowy_chat_plugin",
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: false,
});
})
.send();
}
}
}
@ -106,7 +107,7 @@ impl ChatCloudService for ChatServiceMiddleware {
chat_id: &str,
message_id: i64,
) -> Result<StreamAnswer, FlowyError> {
if self.local_llm_controller.is_ready() {
if self.local_llm_controller.is_running() {
let content = self.get_message_content(message_id)?;
match self
.local_llm_controller
@ -137,7 +138,7 @@ impl ChatCloudService for ChatServiceMiddleware {
chat_id: &str,
question_message_id: i64,
) -> Result<ChatMessage, FlowyError> {
if self.local_llm_controller.is_ready() {
if self.local_llm_controller.is_running() {
let content = self.get_message_content(question_message_id)?;
match self
.local_llm_controller
@ -182,7 +183,7 @@ impl ChatCloudService for ChatServiceMiddleware {
chat_id: &str,
message_id: i64,
) -> FutureResult<RepeatedRelatedQuestion, FlowyError> {
if self.local_llm_controller.is_ready() {
if self.local_llm_controller.is_running() {
FutureResult::new(async move {
Ok(RepeatedRelatedQuestion {
message_id,
@ -202,7 +203,7 @@ impl ChatCloudService for ChatServiceMiddleware {
text: &str,
complete_type: CompletionType,
) -> Result<StreamComplete, FlowyError> {
if self.local_llm_controller.is_ready() {
if self.local_llm_controller.is_running() {
return Err(
FlowyError::not_support().with_context("completion with local ai is not supported yet"),
);
@ -220,7 +221,7 @@ impl ChatCloudService for ChatServiceMiddleware {
file_path: PathBuf,
chat_id: &str,
) -> Result<(), FlowyError> {
if self.local_llm_controller.is_ready() {
if self.local_llm_controller.is_running() {
self
.local_llm_controller
.index_file(chat_id, file_path)

View File

@ -2,7 +2,7 @@ use flowy_derive::ProtoBuf_Enum;
use flowy_notification::NotificationBuilder;
const CHAT_OBSERVABLE_SOURCE: &str = "Chat";
pub const APPFLOWY_AI_NOTIFICATION_KEY: &str = "appflowy_ai_plugin";
#[derive(ProtoBuf_Enum, Debug, Default)]
pub enum ChatNotification {
#[default]
@ -13,7 +13,7 @@ pub enum ChatNotification {
StreamChatMessageError = 4,
FinishStreaming = 5,
UpdateChatPluginState = 6,
LocalAIResourceNeeded = 7,
UpdateLocalChatAI = 7,
}
impl std::convert::From<ChatNotification> for i32 {
@ -30,13 +30,13 @@ impl std::convert::From<i32> for ChatNotification {
4 => ChatNotification::StreamChatMessageError,
5 => ChatNotification::FinishStreaming,
6 => ChatNotification::UpdateChatPluginState,
7 => ChatNotification::LocalAIResourceNeeded,
7 => ChatNotification::UpdateLocalChatAI,
_ => ChatNotification::Unknown,
}
}
}
#[tracing::instrument(level = "trace")]
pub(crate) fn send_notification(id: &str, ty: ChatNotification) -> NotificationBuilder {
pub(crate) fn make_notification(id: &str, ty: ChatNotification) -> NotificationBuilder {
NotificationBuilder::new(id, ty, CHAT_OBSERVABLE_SOURCE)
}

View File

@ -3,6 +3,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::time::Duration;
use crate::af_cloud::define::ServerUser;
use anyhow::Error;
use client_api::collab_sync::ServerCollabMessage;
use client_api::entity::ai_dto::AIModel;
@ -12,8 +13,18 @@ use client_api::ws::{
ConnectState, WSClient, WSClientConfig, WSConnectStateReceiver, WebSocketChannel,
};
use client_api::{Client, ClientConfiguration};
use flowy_chat_pub::cloud::ChatCloudService;
use flowy_database_pub::cloud::DatabaseCloudService;
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::{ErrorCode, FlowyError};
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_search_pub::cloud::SearchCloudService;
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_storage_pub::cloud::StorageCloudService;
use flowy_user_pub::cloud::{UserCloudService, UserUpdate};
use flowy_user_pub::entities::UserTokenState;
use lib_dispatch::prelude::af_spawn;
use rand::Rng;
use semver::Version;
use tokio::select;
@ -23,17 +34,6 @@ use tokio_util::sync::CancellationToken;
use tracing::{error, event, info, warn};
use uuid::Uuid;
use crate::af_cloud::define::ServerUser;
use flowy_database_pub::cloud::DatabaseCloudService;
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::{ErrorCode, FlowyError};
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_storage_pub::cloud::StorageCloudService;
use flowy_user_pub::cloud::{UserCloudService, UserUpdate};
use flowy_user_pub::entities::UserTokenState;
use lib_dispatch::prelude::af_spawn;
use crate::af_cloud::impls::{
AFCloudChatCloudServiceImpl, AFCloudDatabaseCloudServiceImpl, AFCloudDocumentCloudServiceImpl,
AFCloudFileStorageServiceImpl, AFCloudFolderCloudServiceImpl, AFCloudUserAuthServiceImpl,

View File

@ -399,7 +399,6 @@ pub enum AIModelPB {
GPT4o = 2,
Claude3Sonnet = 3,
Claude3Opus = 4,
LocalAIModel = 5,
}
impl AIModelPB {
@ -410,7 +409,6 @@ impl AIModelPB {
AIModelPB::GPT4o => "gpt-4o",
AIModelPB::Claude3Sonnet => "claude-3-sonnet",
AIModelPB::Claude3Opus => "claude-3-opus",
AIModelPB::LocalAIModel => "local",
}
}
}
@ -424,7 +422,6 @@ impl FromStr for AIModelPB {
"gpt-4o" => Ok(AIModelPB::GPT4o),
"claude-3-sonnet" => Ok(AIModelPB::Claude3Sonnet),
"claude-3-opus" => Ok(AIModelPB::Claude3Opus),
"local" => Ok(AIModelPB::LocalAIModel),
_ => Ok(AIModelPB::DefaultModel),
}
}

View File

@ -489,10 +489,7 @@ impl UserManager {
&self,
updated_settings: UpdateUserWorkspaceSettingPB,
) -> FlowyResult<()> {
let ai_model = updated_settings
.ai_model
.as_ref()
.map(|model| model.to_str().to_string());
let ai_model = updated_settings.ai_model.clone();
let workspace_id = updated_settings.workspace_id.clone();
let cloud_service = self.cloud_services.get_user_service()?;
let settings = cloud_service
@ -505,13 +502,13 @@ impl UserManager {
.payload(pb)
.send();
if let Some(ai_model) = ai_model {
if let Err(err) = self.cloud_services.set_ai_model(&ai_model) {
if let Some(ai_model) = &ai_model {
if let Err(err) = self.cloud_services.set_ai_model(ai_model.to_str()) {
error!("Set ai model failed: {}", err);
}
let conn = self.db_connection(uid)?;
let params = UpdateUserProfileParams::new(uid).with_ai_model(&ai_model);
let params = UpdateUserProfileParams::new(uid).with_ai_model(ai_model.to_str());
upsert_user_profile_change(uid, conn, UserTableChangeset::new(params))?;
}
Ok(())
@ -520,7 +517,6 @@ impl UserManager {
pub async fn get_workspace_settings(&self, workspace_id: &str) -> FlowyResult<UseAISettingPB> {
let cloud_service = self.cloud_services.get_user_service()?;
let settings = cloud_service.get_workspace_setting(workspace_id).await?;
let uid = self.user_id()?;
let conn = self.db_connection(uid)?;
let params = UpdateUserProfileParams::new(uid).with_ai_model(&settings.ai_model);