chore: enable local ai and local ai chat (#5755)

* chore: enable local ai and local ai chat

* chore: config for chat with file

* chore: flutter anaylzer
This commit is contained in:
Nathan.fooo 2024-07-18 20:54:35 +08:00 committed by GitHub
parent f36e3ae378
commit 5bbf174ffd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
38 changed files with 1515 additions and 647 deletions

View File

@ -13,7 +13,6 @@
"type": "dart", "type": "dart",
"env": { "env": {
"RUST_LOG": "debug", "RUST_LOG": "debug",
"RUST_BACKTRACE": "1"
}, },
// uncomment the following line to testing performance. // uncomment the following line to testing performance.
// "flutterMode": "profile", // "flutterMode": "profile",

View File

@ -1,5 +1,6 @@
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart'; import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart'; import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:freezed_annotation/freezed_annotation.dart';
@ -9,13 +10,12 @@ part 'chat_file_bloc.freezed.dart';
class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> { class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
ChatFileBloc({ ChatFileBloc({
required String chatId, required String chatId,
dynamic message,
}) : listener = LocalLLMListener(), }) : listener = LocalLLMListener(),
super(ChatFileState.initial(message)) { super(const ChatFileState()) {
listener.start( listener.start(
stateCallback: (pluginState) { chatStateCallback: (chatState) {
if (!isClosed) { if (!isClosed) {
add(ChatFileEvent.updateLocalAIState(pluginState)); add(ChatFileEvent.updateChatState(chatState));
} }
}, },
); );
@ -24,26 +24,30 @@ class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
(event, emit) async { (event, emit) async {
await event.when( await event.when(
initial: () async { initial: () async {
final result = await ChatEventGetPluginState().send(); final result = await ChatEventGetLocalAIChatState().send();
result.fold( result.fold(
(pluginState) { (chatState) {
if (!isClosed) { if (!isClosed) {
add(ChatFileEvent.updateLocalAIState(pluginState)); add(
ChatFileEvent.updateChatState(chatState),
);
} }
}, },
(err) {}, (err) {
Log.error(err.toString());
},
); );
}, },
newFile: (String filePath) { newFile: (String filePath) {
final payload = ChatFilePB(filePath: filePath, chatId: chatId); final payload = ChatFilePB(filePath: filePath, chatId: chatId);
ChatEventChatWithFile(payload).send(); ChatEventChatWithFile(payload).send();
}, },
updateLocalAIState: (PluginStatePB pluginState) { updateChatState: (LocalAIChatPB chatState) {
// Only user enable chat with file and the plugin is already running
final supportChatWithFile = chatState.fileEnabled &&
chatState.pluginState.state == RunningStatePB.Running;
emit( emit(
state.copyWith( state.copyWith(supportChatWithFile: supportChatWithFile),
supportChatWithFile:
pluginState.state == RunningStatePB.Running,
),
); );
}, },
); );
@ -64,20 +68,19 @@ class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
class ChatFileEvent with _$ChatFileEvent { class ChatFileEvent with _$ChatFileEvent {
const factory ChatFileEvent.initial() = Initial; const factory ChatFileEvent.initial() = Initial;
const factory ChatFileEvent.newFile(String filePath) = _NewFile; const factory ChatFileEvent.newFile(String filePath) = _NewFile;
const factory ChatFileEvent.updateLocalAIState(PluginStatePB pluginState) = const factory ChatFileEvent.updateChatState(LocalAIChatPB chatState) =
_UpdateLocalAIState; _UpdateChatState;
} }
@freezed @freezed
class ChatFileState with _$ChatFileState { class ChatFileState with _$ChatFileState {
const factory ChatFileState({ const factory ChatFileState({
required String text,
@Default(false) bool supportChatWithFile, @Default(false) bool supportChatWithFile,
}) = _ChatFileState; }) = _ChatFileState;
}
factory ChatFileState.initial(dynamic text) {
return ChatFileState( @freezed
text: text is String ? text : "", class LocalAIChatFileIndicator with _$LocalAIChatFileIndicator {
); const factory LocalAIChatFileIndicator.ready(bool isEnabled) = _Ready;
} const factory LocalAIChatFileIndicator.loading() = _Loading;
} }

View File

@ -0,0 +1,73 @@
import 'dart:async';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'chat_input_bloc.freezed.dart';
class ChatInputBloc extends Bloc<ChatInputEvent, ChatInputState> {
ChatInputBloc()
: listener = LocalLLMListener(),
super(const ChatInputState(aiType: _AppFlowyAI())) {
listener.start(
stateCallback: (pluginState) {
if (!isClosed) {
add(ChatInputEvent.updateState(pluginState));
}
},
);
on<ChatInputEvent>(_handleEvent);
}
final LocalLLMListener listener;
@override
Future<void> close() async {
await listener.stop();
return super.close();
}
Future<void> _handleEvent(
ChatInputEvent event,
Emitter<ChatInputState> emit,
) async {
await event.when(
started: () async {
final result = await ChatEventGetLocalAIPluginState().send();
result.fold(
(pluginState) {
if (!isClosed) {
add(ChatInputEvent.updateState(pluginState));
}
},
(err) => Log.error(err.toString()),
);
},
updateState: (LocalAIPluginStatePB aiPluginState) {
emit(const ChatInputState(aiType: _AppFlowyAI()));
},
);
}
}
@freezed
class ChatInputEvent with _$ChatInputEvent {
const factory ChatInputEvent.started() = _Started;
const factory ChatInputEvent.updateState(LocalAIPluginStatePB aiPluginState) =
_UpdatePluginState;
}
@freezed
class ChatInputState with _$ChatInputState {
const factory ChatInputState({required AIType aiType}) = _ChatInputState;
}
@freezed
class AIType with _$AIType {
const factory AIType.appflowyAI() = _AppFlowyAI;
const factory AIType.localAI() = _LocalAI;
}

View File

@ -1,4 +1,5 @@
import 'package:appflowy/plugins/ai_chat/application/chat_file_bloc.dart'; import 'package:appflowy/plugins/ai_chat/application/chat_file_bloc.dart';
import 'package:appflowy/plugins/ai_chat/application/chat_input_bloc.dart';
import 'package:desktop_drop/desktop_drop.dart'; import 'package:desktop_drop/desktop_drop.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter/services.dart'; import 'package:flutter/services.dart';
@ -67,28 +68,40 @@ class AIChatPage extends StatelessWidget {
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
if (userProfile.authenticator == AuthenticatorPB.AppFlowyCloud) { if (userProfile.authenticator == AuthenticatorPB.AppFlowyCloud) {
return BlocProvider( return MultiBlocProvider(
create: (context) => ChatFileBloc(chatId: view.id.toString()), providers: [
BlocProvider(
create: (_) => ChatFileBloc(chatId: view.id.toString()),
),
BlocProvider(
create: (_) => ChatBloc(
view: view,
userProfile: userProfile,
)..add(const ChatEvent.initialLoad()),
),
BlocProvider(create: (_) => ChatInputBloc()),
],
child: BlocBuilder<ChatFileBloc, ChatFileState>( child: BlocBuilder<ChatFileBloc, ChatFileState>(
builder: (context, state) { builder: (context, state) {
return state.supportChatWithFile Widget child = _ChatContentPage(
? DropTarget( view: view,
onDragDone: (DropDoneDetails detail) async { userProfile: userProfile,
for (final file in detail.files) { );
context
.read<ChatFileBloc>() // If the chat supports file upload, wrap the chat content with a drop target
.add(ChatFileEvent.newFile(file.path)); if (state.supportChatWithFile) {
} child = DropTarget(
}, onDragDone: (DropDoneDetails detail) async {
child: _ChatContentPage( for (final file in detail.files) {
view: view, context
userProfile: userProfile, .read<ChatFileBloc>()
), .add(ChatFileEvent.newFile(file.path));
) }
: _ChatContentPage( },
view: view, child: child,
userProfile: userProfile, );
); }
return child;
}, },
), ),
); );
@ -146,67 +159,61 @@ class _ChatContentPageState extends State<_ChatContentPage> {
Flexible( Flexible(
child: ConstrainedBox( child: ConstrainedBox(
constraints: const BoxConstraints(maxWidth: 784), constraints: const BoxConstraints(maxWidth: 784),
child: BlocProvider( child: BlocBuilder<ChatBloc, ChatState>(
create: (_) => ChatBloc( builder: (blocContext, state) => Chat(
view: widget.view, messages: state.messages,
userProfile: widget.userProfile, onSendPressed: (_) {
)..add(const ChatEvent.initialLoad()), // We use custom bottom widget for chat input, so
child: BlocBuilder<ChatBloc, ChatState>( // do not need to handle this event.
builder: (blocContext, state) => Chat( },
messages: state.messages, customBottomWidget: buildChatInput(blocContext),
onSendPressed: (_) { user: _user,
// We use custom bottom widget for chat input, so theme: buildTheme(context),
// do not need to handle this event. onEndReached: () async {
}, if (state.hasMorePrevMessage &&
customBottomWidget: buildChatInput(blocContext), state.loadingPreviousStatus !=
user: _user, const LoadingState.loading()) {
theme: buildTheme(context), blocContext
onEndReached: () async { .read<ChatBloc>()
if (state.hasMorePrevMessage && .add(const ChatEvent.startLoadingPrevMessage());
state.loadingPreviousStatus != }
const LoadingState.loading()) { },
blocContext emptyState: BlocBuilder<ChatBloc, ChatState>(
.read<ChatBloc>() builder: (_, state) =>
.add(const ChatEvent.startLoadingPrevMessage()); state.initialLoadingStatus == const LoadingState.finish()
} ? Padding(
}, padding: AIChatUILayout.welcomePagePadding,
emptyState: BlocBuilder<ChatBloc, ChatState>( child: ChatWelcomePage(
builder: (_, state) => state.initialLoadingStatus == onSelectedQuestion: (question) => blocContext
const LoadingState.finish() .read<ChatBloc>()
? Padding( .add(ChatEvent.sendMessage(question)),
padding: AIChatUILayout.welcomePagePadding, ),
child: ChatWelcomePage( )
onSelectedQuestion: (question) => blocContext : const Center(
.read<ChatBloc>() child: CircularProgressIndicator.adaptive(),
.add(ChatEvent.sendMessage(question)),
), ),
)
: const Center(
child: CircularProgressIndicator.adaptive(),
),
),
messageWidthRatio: AIChatUILayout.messageWidthRatio,
textMessageBuilder: (
textMessage, {
required messageWidth,
required showName,
}) =>
_buildAITextMessage(blocContext, textMessage),
bubbleBuilder: (
child, {
required message,
required nextMessageInGroup,
}) {
if (message.author.id == _user.id) {
return ChatUserMessageBubble(
message: message,
child: child,
);
}
return _buildAIBubble(message, blocContext, state, child);
},
), ),
messageWidthRatio: AIChatUILayout.messageWidthRatio,
textMessageBuilder: (
textMessage, {
required messageWidth,
required showName,
}) =>
_buildAITextMessage(blocContext, textMessage),
bubbleBuilder: (
child, {
required message,
required nextMessageInGroup,
}) {
if (message.author.id == _user.id) {
return ChatUserMessageBubble(
message: message,
child: child,
);
}
return _buildAIBubble(message, blocContext, state, child);
},
), ),
), ),
), ),
@ -338,31 +345,40 @@ class _ChatContentPageState extends State<_ChatContentPage> {
return ClipRect( return ClipRect(
child: Padding( child: Padding(
padding: AIChatUILayout.safeAreaInsets(context), padding: AIChatUILayout.safeAreaInsets(context),
child: Column( child: BlocBuilder<ChatInputBloc, ChatInputState>(
children: [ builder: (context, state) {
BlocSelector<ChatBloc, ChatState, LoadingState>( return state.aiType.when(
selector: (state) => state.streamingStatus, appflowyAI: () => Column(
builder: (context, state) { children: [
return ChatInput( BlocSelector<ChatBloc, ChatState, LoadingState>(
chatId: widget.view.id, selector: (state) => state.streamingStatus,
onSendPressed: (message) => builder: (context, state) {
onSendPressed(context, message.text), return ChatInput(
isStreaming: state != const LoadingState.finish(), chatId: widget.view.id,
onStopStreaming: () { onSendPressed: (message) =>
context.read<ChatBloc>().add(const ChatEvent.stopStream()); onSendPressed(context, message.text),
}, isStreaming: state != const LoadingState.finish(),
); onStopStreaming: () {
}, context
), .read<ChatBloc>()
const VSpace(6), .add(const ChatEvent.stopStream());
Opacity( },
opacity: 0.6, );
child: FlowyText( },
LocaleKeys.chat_aiMistakePrompt.tr(), ),
fontSize: 12, const VSpace(6),
Opacity(
opacity: 0.6,
child: FlowyText(
LocaleKeys.chat_aiMistakePrompt.tr(),
fontSize: 12,
),
),
],
), ),
), localAI: () => const SizedBox.shrink(),
], );
},
), ),
), ),
); );

View File

@ -49,7 +49,9 @@ class _ChatInputState extends State<ChatInput> {
return KeyEventResult.ignored; return KeyEventResult.ignored;
} }
if (event is KeyDownEvent) { if (event is KeyDownEvent) {
_handleSendPressed(); if (!widget.isStreaming) {
_handleSendPressed();
}
} }
return KeyEventResult.handled; return KeyEventResult.handled;
} else { } else {
@ -78,17 +80,13 @@ class _ChatInputState extends State<ChatInput> {
} }
void _handleSendPressed() { void _handleSendPressed() {
if (widget.isStreaming) { final trimmedText = _textController.text.trim();
widget.onStopStreaming(); if (trimmedText != '') {
} else { final partialText = types.PartialText(text: trimmedText);
final trimmedText = _textController.text.trim(); widget.onSendPressed(partialText);
if (trimmedText != '') {
final partialText = types.PartialText(text: trimmedText);
widget.onSendPressed(partialText);
if (widget.options.inputClearMode == InputClearMode.always) { if (widget.options.inputClearMode == InputClearMode.always) {
_textController.clear(); _textController.clear();
}
} }
} }
} }
@ -139,7 +137,6 @@ class _ChatInputState extends State<ChatInput> {
padding: textPadding, padding: textPadding,
child: TextField( child: TextField(
controller: _textController, controller: _textController,
readOnly: widget.isStreaming,
focusNode: _inputFocusNode, focusNode: _inputFocusNode,
decoration: InputDecoration( decoration: InputDecoration(
border: InputBorder.none, border: InputBorder.none,
@ -151,7 +148,6 @@ class _ChatInputState extends State<ChatInput> {
style: TextStyle( style: TextStyle(
color: AFThemeExtension.of(context).textColor, color: AFThemeExtension.of(context).textColor,
), ),
enabled: widget.options.enabled,
autocorrect: widget.options.autocorrect, autocorrect: widget.options.autocorrect,
autofocus: widget.options.autofocus, autofocus: widget.options.autofocus,
enableSuggestions: widget.options.enableSuggestions, enableSuggestions: widget.options.enableSuggestions,
@ -176,7 +172,10 @@ class _ChatInputState extends State<ChatInput> {
padding: buttonPadding, padding: buttonPadding,
child: AccessoryButton( child: AccessoryButton(
onSendPressed: () { onSendPressed: () {
_handleSendPressed(); if (!widget.isStreaming) {
widget.onStopStreaming();
_handleSendPressed();
}
}, },
onStopStreaming: () { onStopStreaming: () {
widget.onStopStreaming(); widget.onStopStreaming();

View File

@ -4,6 +4,7 @@ import 'dart:isolate';
import 'package:appflowy/plugins/ai_chat/application/chat_bloc.dart'; import 'package:appflowy/plugins/ai_chat/application/chat_bloc.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart'; import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:bloc/bloc.dart'; import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:freezed_annotation/freezed_annotation.dart';
@ -40,7 +41,7 @@ class DownloadModelBloc extends Bloc<DownloadModelEvent, DownloadModelState> {
add(const DownloadModelEvent.downloadFinish()); add(const DownloadModelEvent.downloadFinish());
}, },
onError: (err) { onError: (err) {
// emit(state.copyWith(downloadError: err)); Log.error(err);
}, },
); );
@ -67,6 +68,12 @@ class DownloadModelBloc extends Bloc<DownloadModelEvent, DownloadModelState> {
}, },
); );
} }
@override
Future<void> close() async {
await state.downloadStream?.dispose();
return super.close();
}
} }
@freezed @freezed

View File

@ -1,253 +1,93 @@
import 'dart:async'; import 'dart:async';
import 'package:appflowy/plugins/ai_chat/application/chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart'; import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_result/appflowy_result.dart'; import 'package:appflowy_result/appflowy_result.dart';
import 'package:bloc/bloc.dart'; import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:freezed_annotation/freezed_annotation.dart';
part 'local_ai_bloc.freezed.dart'; part 'local_ai_bloc.freezed.dart';
class LocalAISettingBloc class LocalAIToggleBloc extends Bloc<LocalAIToggleEvent, LocalAIToggleState> {
extends Bloc<LocalAISettingEvent, LocalAISettingState> { LocalAIToggleBloc() : super(const LocalAIToggleState()) {
LocalAISettingBloc() on<LocalAIToggleEvent>(_handleEvent);
: listener = LocalLLMListener(),
super(const LocalAISettingState()) {
listener.start(
stateCallback: (newState) {
if (!isClosed) {
add(LocalAISettingEvent.updateLLMRunningState(newState.state));
}
},
);
on<LocalAISettingEvent>(_handleEvent);
} }
final LocalLLMListener listener;
/// Handles incoming events and dispatches them to the appropriate handler.
Future<void> _handleEvent( Future<void> _handleEvent(
LocalAISettingEvent event, LocalAIToggleEvent event,
Emitter<LocalAISettingState> emit, Emitter<LocalAIToggleState> emit,
) async { ) async {
await event.when( await event.when(
started: _handleStarted, started: () async {
didLoadModelInfo: (FlowyResult<LLMModelInfoPB, FlowyError> result) { final result = await ChatEventGetLocalAIState().send();
result.fold( _handleResult(emit, result);
(modelInfo) {
_fetchCurremtLLMState();
emit(
state.copyWith(
modelInfo: modelInfo,
models: modelInfo.models,
selectedLLMModel: modelInfo.selectedModel,
fetchModelInfoState: const LoadingState.finish(),
),
);
},
(err) {
emit(
state.copyWith(
fetchModelInfoState: LoadingState.finish(error: err),
),
);
},
);
}, },
selectLLMConfig: (LLMModelPB llmModel) async { toggle: () async {
final result = await ChatEventUpdateLocalLLM(llmModel).send();
result.fold(
(llmResource) {
// If all resources are downloaded, show reload plugin
if (llmResource.pendingResources.isNotEmpty) {
emit(
state.copyWith(
selectedLLMModel: llmModel,
localAIInfo: LocalAIProgress.showDownload(
llmResource,
llmModel,
),
selectLLMState: const LoadingState.finish(),
),
);
} else {
emit(
state.copyWith(
selectedLLMModel: llmModel,
selectLLMState: const LoadingState.finish(),
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
}
},
(err) {
emit(
state.copyWith(
selectLLMState: LoadingState.finish(error: err),
),
);
},
);
},
refreshLLMState: (LocalModelResourcePB llmResource) {
if (state.selectedLLMModel == null) {
Log.error(
'Unexpected null selected config. It should be set already',
);
return;
}
// reload plugin if all resources are downloaded
if (llmResource.pendingResources.isEmpty) {
emit(
state.copyWith(
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
if (state.selectedLLMModel != null) {
// Go to download page if the selected model is downloading
if (llmResource.isDownloading) {
emit(
state.copyWith(
localAIInfo:
LocalAIProgress.startDownloading(state.selectedLLMModel!),
selectLLMState: const LoadingState.finish(),
),
);
return;
} else {
emit(
state.copyWith(
localAIInfo: LocalAIProgress.showDownload(
llmResource,
state.selectedLLMModel!,
),
selectLLMState: const LoadingState.finish(),
),
);
}
}
}
},
startDownloadModel: (LLMModelPB llmModel) {
emit( emit(
state.copyWith( state.copyWith(
localAIInfo: LocalAIProgress.startDownloading(llmModel), pageIndicator: const LocalAIToggleStateIndicator.loading(),
selectLLMState: const LoadingState.finish(), ),
);
unawaited(
ChatEventToggleLocalAI().send().then(
(result) {
if (!isClosed) {
add(LocalAIToggleEvent.handleResult(result));
}
},
), ),
); );
}, },
cancelDownload: () async { handleResult: (result) {
final _ = await ChatEventCancelDownloadLLMResource().send(); _handleResult(emit, result);
_fetchCurremtLLMState();
}, },
finishDownload: () async { );
}
void _handleResult(
Emitter<LocalAIToggleState> emit,
FlowyResult<LocalAIPB, FlowyError> result,
) {
result.fold(
(localAI) {
emit( emit(
state.copyWith(localAIInfo: const LocalAIProgress.finishDownload()), state.copyWith(
pageIndicator: LocalAIToggleStateIndicator.ready(localAI.enabled),
),
); );
}, },
updateLLMRunningState: (RunningStatePB newRunningState) {
if (newRunningState == RunningStatePB.Stopped) {
emit(
state.copyWith(
runningState: newRunningState,
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
emit(state.copyWith(runningState: newRunningState));
}
},
);
}
void _fetchCurremtLLMState() async {
final result = await ChatEventGetLocalLLMState().send();
result.fold(
(llmResource) {
if (!isClosed) {
add(LocalAISettingEvent.refreshLLMState(llmResource));
}
},
(err) { (err) {
Log.error(err); emit(
state.copyWith(
pageIndicator: LocalAIToggleStateIndicator.error(err),
),
);
}, },
); );
} }
/// Handles the event to fetch local AI settings when the application starts.
Future<void> _handleStarted() async {
final result = await ChatEventRefreshLocalAIModelInfo().send();
if (!isClosed) {
add(LocalAISettingEvent.didLoadModelInfo(result));
}
}
@override
Future<void> close() async {
await listener.stop();
return super.close();
}
} }
@freezed @freezed
class LocalAISettingEvent with _$LocalAISettingEvent { class LocalAIToggleEvent with _$LocalAIToggleEvent {
const factory LocalAISettingEvent.started() = _Started; const factory LocalAIToggleEvent.started() = _Started;
const factory LocalAISettingEvent.didLoadModelInfo( const factory LocalAIToggleEvent.toggle() = _Toggle;
FlowyResult<LLMModelInfoPB, FlowyError> result, const factory LocalAIToggleEvent.handleResult(
) = _ModelInfo; FlowyResult<LocalAIPB, FlowyError> result,
const factory LocalAISettingEvent.selectLLMConfig(LLMModelPB config) = ) = _HandleResult;
_SelectLLMConfig;
const factory LocalAISettingEvent.refreshLLMState(
LocalModelResourcePB llmResource,
) = _RefreshLLMResource;
const factory LocalAISettingEvent.startDownloadModel(LLMModelPB llmModel) =
_StartDownloadModel;
const factory LocalAISettingEvent.cancelDownload() = _CancelDownload;
const factory LocalAISettingEvent.finishDownload() = _FinishDownload;
const factory LocalAISettingEvent.updateLLMRunningState(
RunningStatePB newRunningState,
) = _RunningState;
} }
@freezed @freezed
class LocalAISettingState with _$LocalAISettingState { class LocalAIToggleState with _$LocalAIToggleState {
const factory LocalAISettingState({ const factory LocalAIToggleState({
LLMModelInfoPB? modelInfo, @Default(LocalAIToggleStateIndicator.loading())
LLMModelPB? selectedLLMModel, LocalAIToggleStateIndicator pageIndicator,
LocalAIProgress? localAIInfo, }) = _LocalAIToggleState;
@Default(LoadingState.loading()) LoadingState fetchModelInfoState,
@Default(LoadingState.loading()) LoadingState selectLLMState,
@Default([]) List<LLMModelPB> models,
@Default(RunningStatePB.Connecting) RunningStatePB runningState,
}) = _LocalAISettingState;
} }
@freezed @freezed
class LocalAIProgress with _$LocalAIProgress { class LocalAIToggleStateIndicator with _$LocalAIToggleStateIndicator {
// when user select a new model, it will call requestDownload
const factory LocalAIProgress.requestDownloadInfo(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _RequestDownload;
// when user comes back to the setting page, it will auto detect current llm state
const factory LocalAIProgress.showDownload(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _DownloadNeeded;
// when start downloading the model // when start downloading the model
const factory LocalAIProgress.startDownloading(LLMModelPB llmModel) = const factory LocalAIToggleStateIndicator.error(FlowyError error) = _OnError;
_Downloading; const factory LocalAIToggleStateIndicator.ready(bool isEnabled) = _Ready;
const factory LocalAIProgress.finishDownload() = _Finish; const factory LocalAIToggleStateIndicator.loading() = _Loading;
const factory LocalAIProgress.checkPluginState() = _PluginState;
} }

View File

@ -0,0 +1,261 @@
import 'dart:async';
import 'package:appflowy/plugins/ai_chat/application/chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'local_ai_chat_bloc.freezed.dart';
class LocalAIChatSettingBloc
extends Bloc<LocalAIChatSettingEvent, LocalAIChatSettingState> {
LocalAIChatSettingBloc()
: listener = LocalLLMListener(),
super(const LocalAIChatSettingState()) {
listener.start(
stateCallback: (newState) {
if (!isClosed) {
add(LocalAIChatSettingEvent.updateLLMRunningState(newState.state));
}
},
);
on<LocalAIChatSettingEvent>(_handleEvent);
}
final LocalLLMListener listener;
/// Handles incoming events and dispatches them to the appropriate handler.
Future<void> _handleEvent(
LocalAIChatSettingEvent event,
Emitter<LocalAIChatSettingState> emit,
) async {
await event.when(
refreshAISetting: _handleStarted,
didLoadModelInfo: (FlowyResult<LLMModelInfoPB, FlowyError> result) {
result.fold(
(modelInfo) {
_fetchCurremtLLMState();
emit(
state.copyWith(
modelInfo: modelInfo,
models: modelInfo.models,
selectedLLMModel: modelInfo.selectedModel,
fetchModelInfoState: const LoadingState.finish(),
),
);
},
(err) {
emit(
state.copyWith(
fetchModelInfoState: LoadingState.finish(error: err),
),
);
},
);
},
selectLLMConfig: (LLMModelPB llmModel) async {
final result = await ChatEventUpdateLocalLLM(llmModel).send();
result.fold(
(llmResource) {
// If all resources are downloaded, show reload plugin
if (llmResource.pendingResources.isNotEmpty) {
emit(
state.copyWith(
selectedLLMModel: llmModel,
localAIInfo: LocalAIProgress.showDownload(
llmResource,
llmModel,
),
selectLLMState: const LoadingState.finish(),
),
);
} else {
emit(
state.copyWith(
selectedLLMModel: llmModel,
selectLLMState: const LoadingState.finish(),
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
}
},
(err) {
emit(
state.copyWith(
selectLLMState: LoadingState.finish(error: err),
),
);
},
);
},
refreshLLMState: (LocalModelResourcePB llmResource) {
if (state.selectedLLMModel == null) {
Log.error(
'Unexpected null selected config. It should be set already',
);
return;
}
// reload plugin if all resources are downloaded
if (llmResource.pendingResources.isEmpty) {
emit(
state.copyWith(
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
if (state.selectedLLMModel != null) {
// Go to download page if the selected model is downloading
if (llmResource.isDownloading) {
emit(
state.copyWith(
localAIInfo:
LocalAIProgress.startDownloading(state.selectedLLMModel!),
selectLLMState: const LoadingState.finish(),
),
);
return;
} else {
emit(
state.copyWith(
localAIInfo: LocalAIProgress.showDownload(
llmResource,
state.selectedLLMModel!,
),
selectLLMState: const LoadingState.finish(),
),
);
}
}
}
},
startDownloadModel: (LLMModelPB llmModel) {
emit(
state.copyWith(
localAIInfo: LocalAIProgress.startDownloading(llmModel),
selectLLMState: const LoadingState.finish(),
),
);
},
cancelDownload: () async {
final _ = await ChatEventCancelDownloadLLMResource().send();
_fetchCurremtLLMState();
},
finishDownload: () async {
emit(
state.copyWith(localAIInfo: const LocalAIProgress.finishDownload()),
);
},
updateLLMRunningState: (RunningStatePB newRunningState) {
if (newRunningState == RunningStatePB.Stopped) {
emit(
state.copyWith(
runningState: newRunningState,
localAIInfo: const LocalAIProgress.checkPluginState(),
),
);
} else {
emit(state.copyWith(runningState: newRunningState));
}
},
);
}
void _fetchCurremtLLMState() async {
final result = await ChatEventGetLocalLLMState().send();
result.fold(
(llmResource) {
if (!isClosed) {
add(LocalAIChatSettingEvent.refreshLLMState(llmResource));
}
},
(err) {
Log.error(err);
},
);
}
/// Handles the event to fetch local AI settings when the application starts.
Future<void> _handleStarted() async {
final result = await ChatEventRefreshLocalAIModelInfo().send();
if (!isClosed) {
add(LocalAIChatSettingEvent.didLoadModelInfo(result));
}
}
@override
Future<void> close() async {
await listener.stop();
return super.close();
}
}
@freezed
class LocalAIChatSettingEvent with _$LocalAIChatSettingEvent {
const factory LocalAIChatSettingEvent.refreshAISetting() = _RefreshAISetting;
const factory LocalAIChatSettingEvent.didLoadModelInfo(
FlowyResult<LLMModelInfoPB, FlowyError> result,
) = _ModelInfo;
const factory LocalAIChatSettingEvent.selectLLMConfig(LLMModelPB config) =
_SelectLLMConfig;
const factory LocalAIChatSettingEvent.refreshLLMState(
LocalModelResourcePB llmResource,
) = _RefreshLLMResource;
const factory LocalAIChatSettingEvent.startDownloadModel(
LLMModelPB llmModel,
) = _StartDownloadModel;
const factory LocalAIChatSettingEvent.cancelDownload() = _CancelDownload;
const factory LocalAIChatSettingEvent.finishDownload() = _FinishDownload;
const factory LocalAIChatSettingEvent.updateLLMRunningState(
RunningStatePB newRunningState,
) = _RunningState;
}
@freezed
class LocalAIChatSettingState with _$LocalAIChatSettingState {
const factory LocalAIChatSettingState({
LLMModelInfoPB? modelInfo,
LLMModelPB? selectedLLMModel,
LocalAIProgress? localAIInfo,
@Default(LoadingState.loading()) LoadingState fetchModelInfoState,
@Default(LoadingState.loading()) LoadingState selectLLMState,
@Default([]) List<LLMModelPB> models,
@Default(RunningStatePB.Connecting) RunningStatePB runningState,
}) = _LocalAIChatSettingState;
}
// @freezed
// class LocalChatAIStateIndicator with _$LocalChatAIStateIndicator {
// // when start downloading the model
// const factory LocalChatAIStateIndicator.error(FlowyError error) = _OnError;
// const factory LocalChatAIStateIndicator.ready(bool isEnabled) = _Ready;
// }
@freezed
class LocalAIProgress with _$LocalAIProgress {
// when user select a new model, it will call requestDownload
const factory LocalAIProgress.requestDownloadInfo(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _RequestDownload;
// when user comes back to the setting page, it will auto detect current llm state
const factory LocalAIProgress.showDownload(
LocalModelResourcePB llmResource,
LLMModelPB llmModel,
) = _DownloadNeeded;
// when start downloading the model
const factory LocalAIProgress.startDownloading(LLMModelPB llmModel) =
_Downloading;
const factory LocalAIProgress.finishDownload() = _Finish;
const factory LocalAIProgress.checkPluginState() = _PluginState;
}

View File

@ -0,0 +1,95 @@
import 'dart:async';
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'local_ai_chat_toggle_bloc.freezed.dart';
class LocalAIChatToggleBloc
extends Bloc<LocalAIChatToggleEvent, LocalAIChatToggleState> {
LocalAIChatToggleBloc() : super(const LocalAIChatToggleState()) {
on<LocalAIChatToggleEvent>(_handleEvent);
}
Future<void> _handleEvent(
LocalAIChatToggleEvent event,
Emitter<LocalAIChatToggleState> emit,
) async {
await event.when(
started: () async {
final result = await ChatEventGetLocalAIChatState().send();
_handleResult(emit, result);
},
toggle: () async {
emit(
state.copyWith(
pageIndicator: const LocalAIChatToggleStateIndicator.loading(),
),
);
unawaited(
ChatEventToggleLocalAIChat().send().then(
(result) {
if (!isClosed) {
add(LocalAIChatToggleEvent.handleResult(result));
}
},
),
);
},
handleResult: (result) {
_handleResult(emit, result);
},
);
}
void _handleResult(
Emitter<LocalAIChatToggleState> emit,
FlowyResult<LocalAIChatPB, FlowyError> result,
) {
result.fold(
(localAI) {
emit(
state.copyWith(
pageIndicator:
LocalAIChatToggleStateIndicator.ready(localAI.enabled),
),
);
},
(err) {
emit(
state.copyWith(
pageIndicator: LocalAIChatToggleStateIndicator.error(err),
),
);
},
);
}
}
@freezed
class LocalAIChatToggleEvent with _$LocalAIChatToggleEvent {
const factory LocalAIChatToggleEvent.started() = _Started;
const factory LocalAIChatToggleEvent.toggle() = _Toggle;
const factory LocalAIChatToggleEvent.handleResult(
FlowyResult<LocalAIChatPB, FlowyError> result,
) = _HandleResult;
}
@freezed
class LocalAIChatToggleState with _$LocalAIChatToggleState {
const factory LocalAIChatToggleState({
@Default(LocalAIChatToggleStateIndicator.loading())
LocalAIChatToggleStateIndicator pageIndicator,
}) = _LocalAIChatToggleState;
}
@freezed
class LocalAIChatToggleStateIndicator with _$LocalAIChatToggleStateIndicator {
const factory LocalAIChatToggleStateIndicator.error(FlowyError error) =
_OnError;
const factory LocalAIChatToggleStateIndicator.ready(bool isEnabled) = _Ready;
const factory LocalAIChatToggleStateIndicator.loading() = _Loading;
}

View File

@ -2,19 +2,19 @@ import 'dart:async';
import 'dart:typed_data'; import 'dart:typed_data';
import 'package:appflowy/plugins/ai_chat/application/chat_notification.dart'; import 'package:appflowy/plugins/ai_chat/application/chat_notification.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-chat/protobuf.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/notification.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-notification/subject.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-notification/subject.pb.dart';
import 'package:appflowy_backend/rust_stream.dart'; import 'package:appflowy_backend/rust_stream.dart';
import 'package:appflowy_result/appflowy_result.dart'; import 'package:appflowy_result/appflowy_result.dart';
typedef PluginStateCallback = void Function(PluginStatePB state); typedef PluginStateCallback = void Function(LocalAIPluginStatePB state);
typedef LocalAIChatCallback = void Function(LocalAIChatPB chatState);
class LocalLLMListener { class LocalLLMListener {
LocalLLMListener() { LocalLLMListener() {
_parser = _parser =
ChatNotificationParser(id: "appflowy_chat_plugin", callback: _callback); ChatNotificationParser(id: "appflowy_ai_plugin", callback: _callback);
_subscription = RustStreamReceiver.listen( _subscription = RustStreamReceiver.listen(
(observable) => _parser?.parse(observable), (observable) => _parser?.parse(observable),
); );
@ -24,12 +24,15 @@ class LocalLLMListener {
ChatNotificationParser? _parser; ChatNotificationParser? _parser;
PluginStateCallback? stateCallback; PluginStateCallback? stateCallback;
LocalAIChatCallback? chatStateCallback;
void Function()? finishStreamingCallback; void Function()? finishStreamingCallback;
void start({ void start({
PluginStateCallback? stateCallback, PluginStateCallback? stateCallback,
LocalAIChatCallback? chatStateCallback,
}) { }) {
this.stateCallback = stateCallback; this.stateCallback = stateCallback;
this.chatStateCallback = chatStateCallback;
} }
void _callback( void _callback(
@ -39,7 +42,10 @@ class LocalLLMListener {
result.map((r) { result.map((r) {
switch (ty) { switch (ty) {
case ChatNotification.UpdateChatPluginState: case ChatNotification.UpdateChatPluginState:
stateCallback?.call(PluginStatePB.fromBuffer(r)); stateCallback?.call(LocalAIPluginStatePB.fromBuffer(r));
break;
case ChatNotification.UpdateLocalChatAI:
chatStateCallback?.call(LocalAIChatPB.fromBuffer(r));
break; break;
default: default:
break; break;

View File

@ -11,7 +11,11 @@ part 'plugin_state_bloc.freezed.dart';
class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> { class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
PluginStateBloc() PluginStateBloc()
: listener = LocalLLMListener(), : listener = LocalLLMListener(),
super(const PluginStateState(action: PluginStateAction.init())) { super(
const PluginStateState(
action: PluginStateAction.init(),
),
) {
listener.start( listener.start(
stateCallback: (pluginState) { stateCallback: (pluginState) {
if (!isClosed) { if (!isClosed) {
@ -37,7 +41,7 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
) async { ) async {
await event.when( await event.when(
started: () async { started: () async {
final result = await ChatEventGetPluginState().send(); final result = await ChatEventGetLocalAIPluginState().send();
result.fold( result.fold(
(pluginState) { (pluginState) {
if (!isClosed) { if (!isClosed) {
@ -47,20 +51,24 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
(err) => Log.error(err.toString()), (err) => Log.error(err.toString()),
); );
}, },
updateState: (PluginStatePB pluginState) { updateState: (LocalAIPluginStatePB pluginState) {
switch (pluginState.state) { switch (pluginState.state) {
case RunningStatePB.Connecting:
emit(
const PluginStateState(action: PluginStateAction.loadingPlugin()),
);
case RunningStatePB.Running: case RunningStatePB.Running:
emit(const PluginStateState(action: PluginStateAction.ready())); emit(const PluginStateState(action: PluginStateAction.ready()));
break; break;
default: default:
emit( emit(
state.copyWith(action: const PluginStateAction.reloadRequired()), state.copyWith(action: const PluginStateAction.restart()),
); );
break; break;
} }
}, },
restartLocalAI: () { restartLocalAI: () {
ChatEventRestartLocalAI().send(); ChatEventRestartLocalAIChat().send();
}, },
); );
} }
@ -69,7 +77,7 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
@freezed @freezed
class PluginStateEvent with _$PluginStateEvent { class PluginStateEvent with _$PluginStateEvent {
const factory PluginStateEvent.started() = _Started; const factory PluginStateEvent.started() = _Started;
const factory PluginStateEvent.updateState(PluginStatePB pluginState) = const factory PluginStateEvent.updateState(LocalAIPluginStatePB pluginState) =
_UpdatePluginState; _UpdatePluginState;
const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI; const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI;
} }
@ -83,6 +91,7 @@ class PluginStateState with _$PluginStateState {
@freezed @freezed
class PluginStateAction with _$PluginStateAction { class PluginStateAction with _$PluginStateAction {
const factory PluginStateAction.init() = _Init; const factory PluginStateAction.init() = _Init;
const factory PluginStateAction.loadingPlugin() = _LoadingPlugin;
const factory PluginStateAction.ready() = _Ready; const factory PluginStateAction.ready() = _Ready;
const factory PluginStateAction.reloadRequired() = _ReloadRequired; const factory PluginStateAction.restart() = _Restart;
} }

View File

@ -1,6 +1,6 @@
import 'package:appflowy/generated/flowy_svgs.g.dart'; import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:easy_localization/easy_localization.dart'; import 'package:easy_localization/easy_localization.dart';
import 'package:flowy_infra_ui/style_widget/text.dart'; import 'package:flowy_infra_ui/style_widget/text.dart';
@ -22,7 +22,7 @@ class InitLocalAIIndicator extends StatelessWidget {
), ),
child: Padding( child: Padding(
padding: const EdgeInsets.symmetric(horizontal: 6, vertical: 4), padding: const EdgeInsets.symmetric(horizontal: 6, vertical: 4),
child: BlocBuilder<LocalAISettingBloc, LocalAISettingState>( child: BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) { builder: (context, state) {
switch (state.runningState) { switch (state.runningState) {
case RunningStatePB.Connecting: case RunningStatePB.Connecting:

View File

@ -1,73 +1,168 @@
import 'package:appflowy/generated/flowy_svgs.g.dart'; import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_toggle_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart'; import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-chat/entities.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:expandable/expandable.dart';
import 'package:flowy_infra_ui/flowy_infra_ui.dart'; import 'package:flowy_infra_ui/flowy_infra_ui.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/settings_ai_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/shared/af_dropdown_menu_entry.dart'; import 'package:appflowy/workspace/presentation/settings/shared/af_dropdown_menu_entry.dart';
import 'package:appflowy/workspace/presentation/settings/shared/settings_dropdown.dart'; import 'package:appflowy/workspace/presentation/settings/shared/settings_dropdown.dart';
import 'package:appflowy_backend/protobuf/flowy-user/protobuf.dart';
import 'package:easy_localization/easy_localization.dart'; import 'package:easy_localization/easy_localization.dart';
import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_bloc/flutter_bloc.dart';
class LocalModelConfig extends StatelessWidget { class LocalAIChatSetting extends StatelessWidget {
const LocalModelConfig({super.key}); const LocalAIChatSetting({super.key});
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
return BlocBuilder<SettingsAIBloc, SettingsAIState>( return MultiBlocProvider(
builder: (context, state) { providers: [
if (state.aiSettings == null) { BlocProvider(create: (context) => LocalAIChatSettingBloc()),
return const SizedBox.shrink(); BlocProvider(
} create: (context) => LocalAIChatToggleBloc()
..add(const LocalAIChatToggleEvent.started()),
),
],
child: ExpandableNotifier(
child: BlocListener<LocalAIChatToggleBloc, LocalAIChatToggleState>(
listener: (context, state) {
// Listen to the toggle state and expand the panel if the state is ready.
final controller = ExpandableController.of(
context,
required: true,
)!;
if (state.aiSettings!.aiModel != AIModelPB.LocalAIModel) { // Neet to wrap with WidgetsBinding.instance.addPostFrameCallback otherwise the
return const SizedBox.shrink(); // ExpandablePanel not expanded sometimes. Maybe because the ExpandablePanel is not
} // built yet when the listener is called.
WidgetsBinding.instance.addPostFrameCallback(
return BlocProvider( (_) {
create: (context) => state.pageIndicator.when(
LocalAISettingBloc()..add(const LocalAISettingEvent.started()), error: (_) => controller.expanded = false,
child: Padding( ready: (enabled) {
padding: const EdgeInsets.symmetric(vertical: 6), controller.expanded = enabled;
child: Column( context.read<LocalAIChatSettingBloc>().add(
children: [ const LocalAIChatSettingEvent.refreshAISetting(),
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Flexible(
child: FlowyText.medium(
LocaleKeys.settings_aiPage_keys_llmModel.tr(),
fontSize: 14,
),
),
const Spacer(),
BlocBuilder<LocalAISettingBloc, LocalAISettingState>(
builder: (context, state) {
return state.fetchModelInfoState.when(
loading: () =>
const CircularProgressIndicator.adaptive(),
finish: (err) {
return (err == null)
? const _SelectLocalModelDropdownMenu()
: const SizedBox.shrink();
},
); );
}, },
), loading: () => controller.expanded = false,
], );
), },
const IntrinsicHeight(child: _LocalLLMInfoWidget()), debugLabel: 'LocalAI.showLocalAIChatSetting',
], );
},
child: ExpandablePanel(
theme: const ExpandableThemeData(
headerAlignment: ExpandablePanelHeaderAlignment.center,
tapBodyToCollapse: false,
hasIcon: false,
tapBodyToExpand: false,
tapHeaderToExpand: false,
),
header: const LocalAIChatSettingHeader(),
collapsed: const SizedBox.shrink(),
expanded: Padding(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 6),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Flexible(
child: FlowyText.medium(
LocaleKeys.settings_aiPage_keys_llmModel.tr(),
fontSize: 14,
),
),
const Spacer(),
BlocBuilder<LocalAIChatSettingBloc,
LocalAIChatSettingState>(
builder: (context, state) {
return state.fetchModelInfoState.when(
loading: () => Expanded(
child: Row(
children: [
Flexible(
child: FlowyText(
LocaleKeys
.settings_aiPage_keys_fetchLocalModel
.tr(),
),
),
const Spacer(),
const CircularProgressIndicator.adaptive(),
],
),
),
finish: (err) {
return (err == null)
? const _SelectLocalModelDropdownMenu()
: const SizedBox.shrink();
},
);
},
),
],
),
const IntrinsicHeight(child: _LocalLLMInfoWidget()),
],
),
), ),
), ),
),
),
);
}
}
class LocalAIChatSettingHeader extends StatelessWidget {
const LocalAIChatSettingHeader({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<LocalAIChatToggleBloc, LocalAIChatToggleState>(
builder: (context, state) {
return state.pageIndicator.when(
error: (error) {
return const SizedBox.shrink();
},
loading: () {
return Row(
children: [
FlowyText(
LocaleKeys.settings_aiPage_keys_localAIStart.tr(),
),
const Spacer(),
const CircularProgressIndicator.adaptive(),
const HSpace(8),
],
);
},
ready: (isEnabled) {
return Row(
children: [
const FlowyText('Enable Local AI Chat'),
const Spacer(),
Toggle(
value: isEnabled,
onChanged: (value) {
context
.read<LocalAIChatToggleBloc>()
.add(const LocalAIChatToggleEvent.toggle());
},
),
],
);
},
); );
}, },
); );
@ -79,13 +174,13 @@ class _SelectLocalModelDropdownMenu extends StatelessWidget {
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
return BlocBuilder<LocalAISettingBloc, LocalAISettingState>( return BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) { builder: (context, state) {
return Flexible( return Flexible(
child: SettingsDropdown<LLMModelPB>( child: SettingsDropdown<LLMModelPB>(
key: const Key('_SelectLocalModelDropdownMenu'), key: const Key('_SelectLocalModelDropdownMenu'),
onChanged: (model) => context.read<LocalAISettingBloc>().add( onChanged: (model) => context.read<LocalAIChatSettingBloc>().add(
LocalAISettingEvent.selectLLMConfig(model), LocalAIChatSettingEvent.selectLLMConfig(model),
), ),
selectedOption: state.selectedLLMModel!, selectedOption: state.selectedLLMModel!,
options: state.models options: state.models
@ -110,7 +205,7 @@ class _LocalLLMInfoWidget extends StatelessWidget {
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
return BlocBuilder<LocalAISettingBloc, LocalAISettingState>( return BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) { builder: (context, state) {
final error = errorFromState(state); final error = errorFromState(state);
if (error == null) { if (error == null) {
@ -137,11 +232,11 @@ class _LocalLLMInfoWidget extends StatelessWidget {
key: UniqueKey(), key: UniqueKey(),
llmModel: llmModel, llmModel: llmModel,
onFinish: () => context onFinish: () => context
.read<LocalAISettingBloc>() .read<LocalAIChatSettingBloc>()
.add(const LocalAISettingEvent.finishDownload()), .add(const LocalAIChatSettingEvent.finishDownload()),
onCancel: () => context onCancel: () => context
.read<LocalAISettingBloc>() .read<LocalAIChatSettingBloc>()
.add(const LocalAISettingEvent.cancelDownload()), .add(const LocalAIChatSettingEvent.cancelDownload()),
); );
}, },
finishDownload: () => const InitLocalAIIndicator(), finishDownload: () => const InitLocalAIIndicator(),
@ -149,16 +244,19 @@ class _LocalLLMInfoWidget extends StatelessWidget {
); );
return Padding( return Padding(
padding: const EdgeInsets.only(top: 14), padding: const EdgeInsets.only(top: 8),
child: child, child: child,
); );
} else { } else {
return const SizedBox.shrink(); return const SizedBox.shrink();
} }
} else { } else {
return FlowyText( return Opacity(
error.msg, opacity: 0.5,
maxLines: 10, child: FlowyText(
error.msg,
maxLines: 10,
),
); );
} }
}, },
@ -180,15 +278,15 @@ class _LocalLLMInfoWidget extends StatelessWidget {
return _LLMModelDownloadDialog( return _LLMModelDownloadDialog(
llmResource: llmResource, llmResource: llmResource,
onOkPressed: () { onOkPressed: () {
context.read<LocalAISettingBloc>().add( context.read<LocalAIChatSettingBloc>().add(
LocalAISettingEvent.startDownloadModel( LocalAIChatSettingEvent.startDownloadModel(
llmModel, llmModel,
), ),
); );
}, },
onCancelPressed: () { onCancelPressed: () {
context.read<LocalAISettingBloc>().add( context.read<LocalAIChatSettingBloc>().add(
const LocalAISettingEvent.cancelDownload(), const LocalAIChatSettingEvent.cancelDownload(),
); );
}, },
); );
@ -199,7 +297,7 @@ class _LocalLLMInfoWidget extends StatelessWidget {
); );
} }
FlowyError? errorFromState(LocalAISettingState state) { FlowyError? errorFromState(LocalAIChatSettingState state) {
final err = state.fetchModelInfoState.when( final err = state.fetchModelInfoState.when(
loading: () => null, loading: () => null,
finish: (err) => err, finish: (err) => err,
@ -261,7 +359,7 @@ class _ShowDownloadIndicator extends StatelessWidget {
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
return BlocBuilder<LocalAISettingBloc, LocalAISettingState>( return BlocBuilder<LocalAIChatSettingBloc, LocalAIChatSettingState>(
builder: (context, state) { builder: (context, state) {
return Row( return Row(
children: [ children: [
@ -288,15 +386,16 @@ class _ShowDownloadIndicator extends StatelessWidget {
return _LLMModelDownloadDialog( return _LLMModelDownloadDialog(
llmResource: llmResource, llmResource: llmResource,
onOkPressed: () { onOkPressed: () {
context.read<LocalAISettingBloc>().add( context.read<LocalAIChatSettingBloc>().add(
LocalAISettingEvent.startDownloadModel( LocalAIChatSettingEvent.startDownloadModel(
llmModel, llmModel,
), ),
); );
}, },
onCancelPressed: () { onCancelPressed: () {
context.read<LocalAISettingBloc>().add( context.read<LocalAIChatSettingBloc>().add(
const LocalAISettingEvent.cancelDownload(), const LocalAIChatSettingEvent
.cancelDownload(),
); );
}, },
); );

View File

@ -0,0 +1,162 @@
import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/local_ai_chat_setting.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
import 'package:easy_localization/easy_localization.dart';
import 'package:expandable/expandable.dart';
import 'package:flowy_infra_ui/style_widget/text.dart';
import 'package:flutter/material.dart';
import 'package:appflowy/workspace/application/settings/ai/settings_ai_bloc.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
class LocalAISetting extends StatefulWidget {
const LocalAISetting({super.key});
@override
State<LocalAISetting> createState() => _LocalAISettingState();
}
class _LocalAISettingState extends State<LocalAISetting> {
@override
Widget build(BuildContext context) {
return BlocBuilder<SettingsAIBloc, SettingsAIState>(
builder: (context, state) {
if (state.aiSettings == null) {
return const SizedBox.shrink();
}
return BlocProvider(
create: (context) =>
LocalAIToggleBloc()..add(const LocalAIToggleEvent.started()),
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: ExpandableNotifier(
child: BlocListener<LocalAIToggleBloc, LocalAIToggleState>(
listener: (context, state) {
final controller =
ExpandableController.of(context, required: true)!;
state.pageIndicator.when(
error: (_) => controller.expanded = false,
ready: (enabled) => controller.expanded = enabled,
loading: () => controller.expanded = false,
);
},
child: ExpandablePanel(
theme: const ExpandableThemeData(
headerAlignment: ExpandablePanelHeaderAlignment.center,
tapBodyToCollapse: false,
hasIcon: false,
tapBodyToExpand: false,
tapHeaderToExpand: false,
),
header: const LocalAISettingHeader(),
collapsed: const SizedBox.shrink(),
expanded: Column(
children: [
DecoratedBox(
decoration: BoxDecoration(
color: Theme.of(context)
.colorScheme
.surfaceContainerHighest,
borderRadius:
const BorderRadius.all(Radius.circular(4)),
),
child: const Padding(
padding: EdgeInsets.only(
left: 12.0,
top: 6,
bottom: 6,
),
child: LocalAIChatSetting(),
),
),
],
),
),
),
),
),
);
},
);
}
}
class LocalAISettingHeader extends StatelessWidget {
const LocalAISettingHeader({super.key});
@override
Widget build(BuildContext context) {
return BlocBuilder<LocalAIToggleBloc, LocalAIToggleState>(
builder: (context, state) {
return state.pageIndicator.when(
error: (error) {
return const SizedBox.shrink();
},
loading: () {
return const CircularProgressIndicator.adaptive();
},
ready: (isEnabled) {
return Row(
children: [
FlowyText(
LocaleKeys.settings_aiPage_keys_localAIToggleTitle.tr(),
),
const Spacer(),
Toggle(
value: isEnabled,
onChanged: (value) {
if (isEnabled) {
showDialog(
context: context,
barrierDismissible: false,
useRootNavigator: false,
builder: (dialogContext) {
return _ToggleLocalAIDialog(
onOkPressed: () {
context
.read<LocalAIToggleBloc>()
.add(const LocalAIToggleEvent.toggle());
},
onCancelPressed: () {},
);
},
);
} else {
context
.read<LocalAIToggleBloc>()
.add(const LocalAIToggleEvent.toggle());
}
},
),
],
);
},
);
},
);
}
}
class _ToggleLocalAIDialog extends StatelessWidget {
const _ToggleLocalAIDialog({
required this.onOkPressed,
required this.onCancelPressed,
});
final VoidCallback onOkPressed;
final VoidCallback onCancelPressed;
@override
Widget build(BuildContext context) {
return NavigatorOkCancelDialog(
title: LocaleKeys.settings_aiPage_keys_disableLocalAIDialog.tr(),
okTitle: LocaleKeys.button_confirm.tr(),
cancelTitle: LocaleKeys.button_cancel.tr(),
onOkPressed: onOkPressed,
onCancelPressed: onCancelPressed,
titleUpperCase: false,
);
}
}

View File

@ -61,7 +61,6 @@ List<AIModelPB> _availableModels = [
AIModelPB.Claude3Sonnet, AIModelPB.Claude3Sonnet,
AIModelPB.GPT35, AIModelPB.GPT35,
AIModelPB.GPT4o, AIModelPB.GPT4o,
// AIModelPB.LocalAIModel,
]; ];
String _titleForAIModel(AIModelPB model) { String _titleForAIModel(AIModelPB model) {
@ -76,8 +75,6 @@ String _titleForAIModel(AIModelPB model) {
return "GPT-3.5"; return "GPT-3.5";
case AIModelPB.GPT4o: case AIModelPB.GPT4o:
return "GPT-4o"; return "GPT-4o";
case AIModelPB.LocalAIModel:
return "Local";
default: default:
Log.error("Unknown AI model: $model, fallback to default"); Log.error("Unknown AI model: $model, fallback to default");
return "Default"; return "Default";

View File

@ -21,7 +21,8 @@ class CheckPluginStateIndicator extends StatelessWidget {
return state.action.when( return state.action.when(
init: () => const _InitPlugin(), init: () => const _InitPlugin(),
ready: () => const _ReadyToUse(), ready: () => const _ReadyToUse(),
reloadRequired: () => const _ReloadButton(), restart: () => const _ReloadButton(),
loadingPlugin: () => const _InitPlugin(),
); );
}, },
), ),
@ -78,32 +79,29 @@ class _ReadyToUse extends StatelessWidget {
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
return Padding( return DecoratedBox(
padding: const EdgeInsets.symmetric(horizontal: 6, vertical: 4), decoration: const BoxDecoration(
child: DecoratedBox( color: Color(0xFFEDF7ED),
decoration: const BoxDecoration( borderRadius: BorderRadius.all(
color: Color(0xFFEDF7ED), Radius.circular(4),
borderRadius: BorderRadius.all(
Radius.circular(4),
),
), ),
child: Padding( ),
padding: const EdgeInsets.symmetric(vertical: 6), child: Padding(
child: Row( padding: const EdgeInsets.symmetric(vertical: 8),
children: [ child: Row(
const HSpace(8), children: [
const FlowySvg( const HSpace(8),
FlowySvgs.download_success_s, const FlowySvg(
color: Color(0xFF2E7D32), FlowySvgs.download_success_s,
), color: Color(0xFF2E7D32),
const HSpace(6), ),
FlowyText( const HSpace(6),
LocaleKeys.settings_aiPage_keys_localAILoaded.tr(), FlowyText(
fontSize: 11, LocaleKeys.settings_aiPage_keys_localAILoaded.tr(),
color: const Color(0xFF1E4620), fontSize: 11,
), color: const Color(0xFF1E4620),
], ),
), ],
), ),
), ),
); );

View File

@ -1,4 +1,3 @@
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/local_ai_config.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/model_selection.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/model_selection.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
@ -44,10 +43,7 @@ class SettingsAIView extends StatelessWidget {
const AIModelSelection(), const AIModelSelection(),
]; ];
if (state.aiSettings != null && // children.add(const LocalAISetting());
state.aiSettings!.aiModel == AIModelPB.LocalAIModel) {
children.add(const LocalModelConfig());
}
children.add(const _AISearchToggle(value: false)); children.add(const _AISearchToggle(value: false));

View File

@ -172,7 +172,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -192,7 +192,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -206,7 +206,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -225,7 +225,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -826,7 +826,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
@ -876,7 +876,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -888,7 +888,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -1128,7 +1128,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1153,7 +1153,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1417,7 +1417,7 @@ dependencies = [
"cssparser-macros", "cssparser-macros",
"dtoa-short", "dtoa-short",
"itoa 1.0.6", "itoa 1.0.6",
"phf 0.11.2", "phf 0.8.0",
"smallvec", "smallvec",
] ]
@ -1528,7 +1528,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3028,7 +3028,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -3045,7 +3045,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3477,7 +3477,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -6021,7 +6021,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -53,7 +53,7 @@ collab-user = { version = "0.2" }
# Run the script: # Run the script:
# scripts/tool/update_client_api_rev.sh new_rev_id # scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" } client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
[dependencies] [dependencies]
serde_json.workspace = true serde_json.workspace = true
@ -128,5 +128,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -197,7 +197,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -800,7 +800,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
@ -850,7 +850,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -862,7 +862,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -1111,7 +1111,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1136,7 +1136,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1407,7 +1407,7 @@ dependencies = [
"cssparser-macros", "cssparser-macros",
"dtoa-short", "dtoa-short",
"itoa 1.0.10", "itoa 1.0.10",
"phf 0.11.2", "phf 0.8.0",
"smallvec", "smallvec",
] ]
@ -1518,7 +1518,7 @@ checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3095,7 +3095,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -3112,7 +3112,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3549,7 +3549,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -6085,7 +6085,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -52,7 +52,7 @@ collab-user = { version = "0.2" }
# Run the script: # Run the script:
# scripts/tool/update_client_api_rev.sh new_rev_id # scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" } client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
[dependencies] [dependencies]
serde_json.workspace = true serde_json.workspace = true
@ -128,6 +128,6 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }

View File

@ -638,10 +638,14 @@
"downloadAIModelButton": "Download AI model", "downloadAIModelButton": "Download AI model",
"downloadingModel": "Downloading", "downloadingModel": "Downloading",
"localAILoaded": "Local AI Model successfully added and ready to use", "localAILoaded": "Local AI Model successfully added and ready to use",
"localAILoading": "Local AI Model is loading...", "localAIStart": "Local AI Chat is starting...",
"localAIStopped": "Local AI Model stopped", "localAILoading": "Local AI Chat Model is loading...",
"localAIStopped": "Local AI stopped",
"failToLoadLocalAI": "Failed to start local AI", "failToLoadLocalAI": "Failed to start local AI",
"restartLocalAI": "Restart Local AI", "restartLocalAI": "Restart Local AI",
"disableLocalAIDialog": "Do you want to disable local AI?",
"localAIToggleTitle": "Toggle to enable or disable local AI",
"fetchLocalModel": "Fetch local model configuration",
"title": "AI API Keys", "title": "AI API Keys",
"openAILabel": "OpenAI API key", "openAILabel": "OpenAI API key",
"openAITooltip": "You can find your Secret API key on the API key page", "openAITooltip": "You can find your Secret API key on the API key page",

View File

@ -163,7 +163,7 @@ checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
[[package]] [[package]]
name = "app-error" name = "app-error"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -183,7 +183,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-ai-client" name = "appflowy-ai-client"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -197,7 +197,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=346020270a0a3d3c82a60b545cd3f52144d56beb#346020270a0a3d3c82a60b545cd3f52144d56beb" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",
@ -718,7 +718,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api" name = "client-api"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"again", "again",
"anyhow", "anyhow",
@ -768,7 +768,7 @@ dependencies = [
[[package]] [[package]]
name = "client-api-entity" name = "client-api-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"collab-entity", "collab-entity",
"collab-rt-entity", "collab-rt-entity",
@ -780,7 +780,7 @@ dependencies = [
[[package]] [[package]]
name = "client-websocket" name = "client-websocket"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"futures-channel", "futures-channel",
"futures-util", "futures-util",
@ -989,7 +989,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-entity" name = "collab-rt-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bincode", "bincode",
@ -1014,7 +1014,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-rt-protocol" name = "collab-rt-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1352,7 +1352,7 @@ checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
[[package]] [[package]]
name = "database-entity" name = "database-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -2695,7 +2695,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue" name = "gotrue"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"futures-util", "futures-util",
@ -2712,7 +2712,7 @@ dependencies = [
[[package]] [[package]]
name = "gotrue-entity" name = "gotrue-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",
@ -3077,7 +3077,7 @@ dependencies = [
[[package]] [[package]]
name = "infra" name = "infra"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -5223,7 +5223,7 @@ dependencies = [
[[package]] [[package]]
name = "shared-entity" name = "shared-entity"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=eebdbcad79a35b07305affdd36f16d9ce95c5a18#eebdbcad79a35b07305affdd36f16d9ce95c5a18" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Cloud?rev=f73b603dc4569ea3f23c41901564b90691ac1aaa#f73b603dc4569ea3f23c41901564b90691ac1aaa"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"app-error", "app-error",

View File

@ -99,8 +99,8 @@ zip = "2.1.3"
# Run the script.add_workspace_members: # Run the script.add_workspace_members:
# scripts/tool/update_client_api_rev.sh new_rev_id # scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" } client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "eebdbcad79a35b07305affdd36f16d9ce95c5a18" } client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "f73b603dc4569ea3f23c41901564b90691ac1aaa" }
[profile.dev] [profile.dev]
opt-level = 1 opt-level = 1
@ -151,5 +151,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "346020270a0a3d3c82a60b545cd3f52144d56beb" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" }

View File

@ -3,7 +3,7 @@ use crate::entities::{
ChatMessageErrorPB, ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB, ChatMessageErrorPB, ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB,
}; };
use crate::middleware::chat_service_mw::ChatServiceMiddleware; use crate::middleware::chat_service_mw::ChatServiceMiddleware;
use crate::notification::{send_notification, ChatNotification}; use crate::notification::{make_notification, ChatNotification};
use crate::persistence::{insert_chat_messages, select_chat_messages, ChatMessageTable}; use crate::persistence::{insert_chat_messages, select_chat_messages, ChatMessageTable};
use allo_isolate::Isolate; use allo_isolate::Isolate;
use flowy_chat_pub::cloud::{ChatCloudService, ChatMessage, ChatMessageType, MessageCursor}; use flowy_chat_pub::cloud::{ChatCloudService, ChatMessage, ChatMessageType, MessageCursor};
@ -138,7 +138,7 @@ impl Chat {
chat_id: chat_id.clone(), chat_id: chat_id.clone(),
error_message: err.to_string(), error_message: err.to_string(),
}; };
send_notification(&chat_id, ChatNotification::StreamChatMessageError) make_notification(&chat_id, ChatNotification::StreamChatMessageError)
.payload(pb) .payload(pb)
.send(); .send();
return Err(err); return Err(err);
@ -153,14 +153,14 @@ impl Chat {
chat_id: chat_id.clone(), chat_id: chat_id.clone(),
error_message: err.to_string(), error_message: err.to_string(),
}; };
send_notification(&chat_id, ChatNotification::StreamChatMessageError) make_notification(&chat_id, ChatNotification::StreamChatMessageError)
.payload(pb) .payload(pb)
.send(); .send();
return Err(err); return Err(err);
}, },
} }
send_notification(&chat_id, ChatNotification::FinishStreaming).send(); make_notification(&chat_id, ChatNotification::FinishStreaming).send();
if stream_buffer.lock().await.is_empty() { if stream_buffer.lock().await.is_empty() {
return Ok(()); return Ok(());
} }
@ -193,7 +193,7 @@ impl Chat {
vec![answer.clone()], vec![answer.clone()],
)?; )?;
let pb = ChatMessagePB::from(answer); let pb = ChatMessagePB::from(answer);
send_notification(chat_id, ChatNotification::DidReceiveChatMessage) make_notification(chat_id, ChatNotification::DidReceiveChatMessage)
.payload(pb) .payload(pb)
.send(); .send();
@ -234,7 +234,7 @@ impl Chat {
has_more: true, has_more: true,
total: 0, total: 0,
}; };
send_notification(&self.chat_id, ChatNotification::DidLoadPrevChatMessage) make_notification(&self.chat_id, ChatNotification::DidLoadPrevChatMessage)
.payload(pb.clone()) .payload(pb.clone())
.send(); .send();
return Ok(pb); return Ok(pb);
@ -355,11 +355,11 @@ impl Chat {
} else { } else {
*prev_message_state.write().await = PrevMessageState::NoMore; *prev_message_state.write().await = PrevMessageState::NoMore;
} }
send_notification(&chat_id, ChatNotification::DidLoadPrevChatMessage) make_notification(&chat_id, ChatNotification::DidLoadPrevChatMessage)
.payload(pb) .payload(pb)
.send(); .send();
} else { } else {
send_notification(&chat_id, ChatNotification::DidLoadLatestChatMessage) make_notification(&chat_id, ChatNotification::DidLoadLatestChatMessage)
.payload(pb) .payload(pb)
.send(); .send();
} }

View File

@ -46,8 +46,8 @@ impl ChatManager {
cloud_service.clone(), cloud_service.clone(),
)); ));
if local_ai_controller.is_ready() { if local_ai_controller.can_init() {
if let Err(err) = local_ai_controller.initialize() { if let Err(err) = local_ai_controller.initialize_chat_plugin(None) {
error!("[AI Plugin] failed to initialize local ai: {:?}", err); error!("[AI Plugin] failed to initialize local ai: {:?}", err);
} }
} }
@ -86,7 +86,7 @@ impl ChatManager {
pub async fn close_chat(&self, chat_id: &str) -> Result<(), FlowyError> { pub async fn close_chat(&self, chat_id: &str) -> Result<(), FlowyError> {
trace!("close chat: {}", chat_id); trace!("close chat: {}", chat_id);
if self.local_ai_controller.is_ready() { if self.local_ai_controller.is_running() {
info!("[AI Plugin] notify close chat: {}", chat_id); info!("[AI Plugin] notify close chat: {}", chat_id);
self.local_ai_controller.close_chat(chat_id); self.local_ai_controller.close_chat(chat_id);
} }
@ -97,7 +97,7 @@ impl ChatManager {
if let Some((_, chat)) = self.chats.remove(chat_id) { if let Some((_, chat)) = self.chats.remove(chat_id) {
chat.close(); chat.close();
if self.local_ai_controller.is_ready() { if self.local_ai_controller.is_running() {
info!("[AI Plugin] notify close chat: {}", chat_id); info!("[AI Plugin] notify close chat: {}", chat_id);
self.local_ai_controller.close_chat(chat_id); self.local_ai_controller.close_chat(chat_id);
} }

View File

@ -367,7 +367,7 @@ pub struct PendingResourcePB {
} }
#[derive(Default, ProtoBuf, Clone, Debug)] #[derive(Default, ProtoBuf, Clone, Debug)]
pub struct PluginStatePB { pub struct LocalAIPluginStatePB {
#[pb(index = 1)] #[pb(index = 1)]
pub state: RunningStatePB, pub state: RunningStatePB,
} }
@ -392,3 +392,21 @@ impl From<RunningState> for RunningStatePB {
} }
} }
} }
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct LocalAIPB {
#[pb(index = 1)]
pub enabled: bool,
}
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct LocalAIChatPB {
#[pb(index = 1)]
pub enabled: bool,
#[pb(index = 2)]
pub file_enabled: bool,
#[pb(index = 3)]
pub plugin_state: LocalAIPluginStatePB,
}

View File

@ -1,4 +1,5 @@
use flowy_chat_pub::cloud::ChatMessageType; use flowy_chat_pub::cloud::ChatMessageType;
use std::path::PathBuf; use std::path::PathBuf;
use allo_isolate::Isolate; use allo_isolate::Isolate;
@ -9,6 +10,7 @@ use validator::Validate;
use crate::chat_manager::ChatManager; use crate::chat_manager::ChatManager;
use crate::entities::*; use crate::entities::*;
use crate::local_ai::local_llm_chat::LLMModelInfo; use crate::local_ai::local_llm_chat::LLMModelInfo;
use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
use crate::tools::AITools; use crate::tools::AITools;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult}; use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
@ -131,6 +133,17 @@ pub(crate) async fn refresh_local_ai_info_handler(
let (tx, rx) = oneshot::channel::<Result<LLMModelInfo, FlowyError>>(); let (tx, rx) = oneshot::channel::<Result<LLMModelInfo, FlowyError>>();
tokio::spawn(async move { tokio::spawn(async move {
let model_info = chat_manager.local_ai_controller.refresh().await; let model_info = chat_manager.local_ai_controller.refresh().await;
if model_info.is_err() {
if let Some(llm_model) = chat_manager.local_ai_controller.get_current_model() {
let model_info = LLMModelInfo {
selected_model: llm_model.clone(),
models: vec![llm_model],
};
let _ = tx.send(Ok(model_info));
return;
}
}
let _ = tx.send(model_info); let _ = tx.send(model_info);
}); });
@ -147,7 +160,7 @@ pub(crate) async fn update_local_llm_model_handler(
let chat_manager = upgrade_chat_manager(chat_manager)?; let chat_manager = upgrade_chat_manager(chat_manager)?;
let state = chat_manager let state = chat_manager
.local_ai_controller .local_ai_controller
.use_local_llm(data.llm_id) .select_local_llm(data.llm_id)
.await?; .await?;
data_result_ok(state) data_result_ok(state)
} }
@ -229,17 +242,99 @@ pub(crate) async fn cancel_download_llm_resource_handler(
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_plugin_state_handler( pub(crate) async fn get_plugin_state_handler(
chat_manager: AFPluginState<Weak<ChatManager>>, chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<PluginStatePB, FlowyError> { ) -> DataResult<LocalAIPluginStatePB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?; let chat_manager = upgrade_chat_manager(chat_manager)?;
let state = chat_manager.local_ai_controller.get_plugin_state(); let state = chat_manager.local_ai_controller.get_chat_plugin_state();
data_result_ok(state) data_result_ok(state)
} }
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn toggle_local_ai_chat_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIChatPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager
.local_ai_controller
.toggle_local_ai_chat()
.await?;
let file_enabled = chat_manager.local_ai_controller.is_rag_enabled();
let plugin_state = chat_manager.local_ai_controller.get_chat_plugin_state();
let pb = LocalAIChatPB {
enabled,
file_enabled,
plugin_state,
};
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateLocalChatAI,
)
.payload(pb.clone())
.send();
data_result_ok(pb)
}
#[tracing::instrument(level = "debug", skip_all, err)] #[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn restart_local_ai_handler( pub(crate) async fn toggle_local_ai_chat_file_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIChatPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.is_chat_enabled();
let file_enabled = chat_manager
.local_ai_controller
.toggle_local_ai_chat_rag()
.await?;
let plugin_state = chat_manager.local_ai_controller.get_chat_plugin_state();
let pb = LocalAIChatPB {
enabled,
file_enabled,
plugin_state,
};
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateLocalChatAI,
)
.payload(pb.clone())
.send();
data_result_ok(pb)
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_local_ai_chat_state_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIChatPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.is_chat_enabled();
let file_enabled = chat_manager.local_ai_controller.is_rag_enabled();
let plugin_state = chat_manager.local_ai_controller.get_chat_plugin_state();
data_result_ok(LocalAIChatPB {
enabled,
file_enabled,
plugin_state,
})
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn restart_local_ai_chat_handler(
chat_manager: AFPluginState<Weak<ChatManager>>, chat_manager: AFPluginState<Weak<ChatManager>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?; let chat_manager = upgrade_chat_manager(chat_manager)?;
chat_manager.local_ai_controller.restart(); chat_manager.local_ai_controller.restart_chat_plugin();
Ok(()) Ok(())
} }
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn toggle_local_ai_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.toggle_local_ai().await?;
data_result_ok(LocalAIPB { enabled })
}
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_local_ai_state_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalAIPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let enabled = chat_manager.local_ai_controller.is_enabled();
data_result_ok(LocalAIPB { enabled })
}

View File

@ -40,8 +40,19 @@ pub fn init(chat_manager: Weak<ChatManager>) -> AFPlugin {
ChatEvent::CancelDownloadLLMResource, ChatEvent::CancelDownloadLLMResource,
cancel_download_llm_resource_handler, cancel_download_llm_resource_handler,
) )
.event(ChatEvent::GetPluginState, get_plugin_state_handler) .event(ChatEvent::GetLocalAIPluginState, get_plugin_state_handler)
.event(ChatEvent::RestartLocalAI, restart_local_ai_handler) .event(ChatEvent::ToggleLocalAIChat, toggle_local_ai_chat_handler)
.event(
ChatEvent::GetLocalAIChatState,
get_local_ai_chat_state_handler,
)
.event(ChatEvent::RestartLocalAIChat, restart_local_ai_chat_handler)
.event(ChatEvent::ToggleLocalAI, toggle_local_ai_handler)
.event(ChatEvent::GetLocalAIState, get_local_ai_state_handler)
.event(
ChatEvent::ToggleChatWithFile,
toggle_local_ai_chat_file_handler,
)
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
@ -90,9 +101,29 @@ pub enum ChatEvent {
#[event()] #[event()]
CancelDownloadLLMResource = 13, CancelDownloadLLMResource = 13,
#[event(output = "PluginStatePB")] #[event(output = "LocalAIPluginStatePB")]
GetPluginState = 14, GetLocalAIPluginState = 14,
#[event(output = "LocalAIChatPB")]
ToggleLocalAIChat = 15,
/// Return Local AI Chat State
#[event(output = "LocalAIChatPB")]
GetLocalAIChatState = 16,
/// Restart local AI chat. When plugin quit or user terminate in task manager or activity monitor,
/// the plugin will need to restart.
#[event()]
RestartLocalAIChat = 17,
/// Enable or disable local AI
#[event(output = "LocalAIPB")]
ToggleLocalAI = 18,
/// Return LocalAIPB that contains the current state of the local AI
#[event(output = "LocalAIPB")]
GetLocalAIState = 19,
#[event()] #[event()]
RestartLocalAI = 15, ToggleChatWithFile = 20,
} }

View File

@ -1,15 +1,15 @@
use crate::chat_manager::ChatUserService; use crate::chat_manager::ChatUserService;
use crate::entities::{ use crate::entities::{
ChatStatePB, LocalModelResourcePB, ModelTypePB, PluginStatePB, RunningStatePB, ChatStatePB, LocalAIPluginStatePB, LocalModelResourcePB, ModelTypePB, RunningStatePB,
}; };
use crate::local_ai::llm_resource::{LLMResourceController, LLMResourceService}; use crate::local_ai::local_llm_resource::{LLMResourceController, LLMResourceService};
use crate::notification::{send_notification, ChatNotification}; use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
use anyhow::Error; use anyhow::Error;
use appflowy_local_ai::chat_plugin::{AIPluginConfig, LocalChatLLMChat}; use appflowy_local_ai::chat_plugin::{AIPluginConfig, LocalChatLLMChat};
use appflowy_plugin::manager::PluginManager; use appflowy_plugin::manager::PluginManager;
use appflowy_plugin::util::is_apple_silicon; use appflowy_plugin::util::is_apple_silicon;
use flowy_chat_pub::cloud::{AppFlowyAIPlugin, ChatCloudService, LLMModel, LocalAIConfig}; use flowy_chat_pub::cloud::{AppFlowyAIPlugin, ChatCloudService, LLMModel, LocalAIConfig};
use flowy_error::FlowyResult; use flowy_error::{FlowyError, FlowyResult};
use flowy_sqlite::kv::KVStorePreferences; use flowy_sqlite::kv::KVStorePreferences;
use futures::Sink; use futures::Sink;
use lib_infra::async_trait::async_trait; use lib_infra::async_trait::async_trait;
@ -33,11 +33,16 @@ pub struct LLMModelInfo {
pub models: Vec<LLMModel>, pub models: Vec<LLMModel>,
} }
const LOCAL_AI_SETTING_KEY: &str = "local_ai_setting"; const APPFLOWY_LOCAL_AI_ENABLED: &str = "appflowy_local_ai_enabled";
const APPFLOWY_LOCAL_AI_CHAT_ENABLED: &str = "appflowy_local_ai_chat_enabled";
const APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED: &str = "appflowy_local_ai_chat_rag_enabled";
const LOCAL_AI_SETTING_KEY: &str = "appflowy_local_ai_setting:v0";
pub struct LocalAIController { pub struct LocalAIController {
llm_chat: Arc<LocalChatLLMChat>, llm_chat: Arc<LocalChatLLMChat>,
llm_res: Arc<LLMResourceController>, llm_res: Arc<LLMResourceController>,
current_chat_id: Mutex<Option<String>>, current_chat_id: Mutex<Option<String>>,
store_preferences: Arc<KVStorePreferences>,
} }
impl Deref for LocalAIController { impl Deref for LocalAIController {
@ -57,15 +62,17 @@ impl LocalAIController {
) -> Self { ) -> Self {
let llm_chat = Arc::new(LocalChatLLMChat::new(plugin_manager)); let llm_chat = Arc::new(LocalChatLLMChat::new(plugin_manager));
let mut rx = llm_chat.subscribe_running_state(); let mut rx = llm_chat.subscribe_running_state();
let _weak_store_preferences = Arc::downgrade(&store_preferences);
tokio::spawn(async move { tokio::spawn(async move {
while let Some(state) = rx.next().await { while let Some(state) = rx.next().await {
info!("[AI Plugin] state: {:?}", state); info!("[AI Plugin] state: {:?}", state);
let new_state = RunningStatePB::from(state); let new_state = RunningStatePB::from(state);
send_notification( make_notification(
"appflowy_chat_plugin", APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState, ChatNotification::UpdateChatPluginState,
) )
.payload(PluginStatePB { state: new_state }) .payload(LocalAIPluginStatePB { state: new_state })
.send(); .send();
} }
}); });
@ -73,17 +80,26 @@ impl LocalAIController {
let res_impl = LLMResourceServiceImpl { let res_impl = LLMResourceServiceImpl {
user_service: user_service.clone(), user_service: user_service.clone(),
cloud_service, cloud_service,
store_preferences, store_preferences: store_preferences.clone(),
}; };
let (tx, mut rx) = tokio::sync::mpsc::channel(1); let (tx, mut rx) = tokio::sync::mpsc::channel(1);
let llm_res = Arc::new(LLMResourceController::new(user_service, res_impl, tx)); let llm_res = Arc::new(LLMResourceController::new(user_service, res_impl, tx));
let current_chat_id = Mutex::new(None);
let cloned_llm_chat = llm_chat.clone(); let this = Self {
let cloned_llm_res = llm_res.clone(); llm_chat,
llm_res,
current_chat_id,
store_preferences,
};
let rag_enabled = this.is_rag_enabled();
let cloned_llm_chat = this.llm_chat.clone();
let cloned_llm_res = this.llm_res.clone();
tokio::spawn(async move { tokio::spawn(async move {
while rx.recv().await.is_some() { while rx.recv().await.is_some() {
if let Ok(chat_config) = cloned_llm_res.get_ai_plugin_config() { if let Ok(chat_config) = cloned_llm_res.get_chat_config(rag_enabled) {
if let Err(err) = initialize_chat_plugin(&cloned_llm_chat, chat_config) { if let Err(err) = initialize_chat_plugin(&cloned_llm_chat, chat_config) {
error!("[AI Plugin] failed to setup plugin: {:?}", err); error!("[AI Plugin] failed to setup plugin: {:?}", err);
} }
@ -91,30 +107,82 @@ impl LocalAIController {
} }
}); });
Self { this
llm_chat,
llm_res,
current_chat_id: Default::default(),
}
} }
pub async fn refresh(&self) -> FlowyResult<LLMModelInfo> { pub async fn refresh(&self) -> FlowyResult<LLMModelInfo> {
self.llm_res.refresh_llm_resource().await self.llm_res.refresh_llm_resource().await
} }
pub fn initialize(&self) -> FlowyResult<()> { pub fn initialize_chat_plugin(
let chat_config = self.llm_res.get_ai_plugin_config()?; &self,
ret: Option<tokio::sync::oneshot::Sender<()>>,
) -> FlowyResult<()> {
let mut chat_config = self.llm_res.get_chat_config(self.is_rag_enabled())?;
let llm_chat = self.llm_chat.clone(); let llm_chat = self.llm_chat.clone();
initialize_chat_plugin(&llm_chat, chat_config)?; tokio::spawn(async move {
trace!("[AI Plugin] config: {:?}", chat_config);
if is_apple_silicon().await.unwrap_or(false) {
chat_config = chat_config.with_device("gpu");
}
match llm_chat.init_chat_plugin(chat_config).await {
Ok(_) => {
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: true,
})
.send();
},
Err(err) => {
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: false,
})
.send();
error!("[AI Plugin] failed to setup plugin: {:?}", err);
},
}
if let Some(ret) = ret {
let _ = ret.send(());
}
});
Ok(()) Ok(())
} }
/// Returns true if the local AI is enabled and ready to use. /// Returns true if the local AI is enabled and ready to use.
pub fn is_ready(&self) -> bool { pub fn can_init(&self) -> bool {
self.llm_res.is_resource_ready() self.is_enabled() && self.llm_res.is_resource_ready()
}
pub fn is_running(&self) -> bool {
self.llm_chat.get_plugin_running_state().is_ready()
}
pub fn is_enabled(&self) -> bool {
self.store_preferences.get_bool(APPFLOWY_LOCAL_AI_ENABLED)
}
pub fn is_chat_enabled(&self) -> bool {
self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED)
}
pub fn is_rag_enabled(&self) -> bool {
self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED)
} }
pub fn open_chat(&self, chat_id: &str) { pub fn open_chat(&self, chat_id: &str) {
if !self.is_ready() { if !self.is_running() {
return; return;
} }
@ -149,7 +217,11 @@ impl LocalAIController {
}); });
} }
pub async fn use_local_llm(&self, llm_id: i64) -> FlowyResult<LocalModelResourcePB> { pub async fn select_local_llm(&self, llm_id: i64) -> FlowyResult<LocalModelResourcePB> {
if !self.is_enabled() {
return Err(FlowyError::local_ai_unavailable());
}
let llm_chat = self.llm_chat.clone(); let llm_chat = self.llm_chat.clone();
match llm_chat.destroy_chat_plugin().await { match llm_chat.destroy_chat_plugin().await {
Ok(_) => info!("[AI Plugin] destroy plugin successfully"), Ok(_) => info!("[AI Plugin] destroy plugin successfully"),
@ -158,7 +230,7 @@ impl LocalAIController {
let state = self.llm_res.use_local_llm(llm_id)?; let state = self.llm_res.use_local_llm(llm_id)?;
// Re-initialize the plugin if the setting is updated and ready to use // Re-initialize the plugin if the setting is updated and ready to use
if self.llm_res.is_resource_ready() { if self.llm_res.is_resource_ready() {
self.initialize()?; self.initialize_chat_plugin(None)?;
} }
Ok(state) Ok(state)
} }
@ -167,6 +239,10 @@ impl LocalAIController {
self.llm_res.get_local_llm_state() self.llm_res.get_local_llm_state()
} }
pub fn get_current_model(&self) -> Option<LLMModel> {
self.llm_res.get_selected_model()
}
pub async fn start_downloading<T>(&self, progress_sink: T) -> FlowyResult<String> pub async fn start_downloading<T>(&self, progress_sink: T) -> FlowyResult<String>
where where
T: Sink<String, Error = anyhow::Error> + Unpin + Sync + Send + 'static, T: Sink<String, Error = anyhow::Error> + Unpin + Sync + Send + 'static,
@ -180,20 +256,77 @@ impl LocalAIController {
Ok(()) Ok(())
} }
pub fn get_plugin_state(&self) -> PluginStatePB { pub fn get_chat_plugin_state(&self) -> LocalAIPluginStatePB {
let state = self.llm_chat.get_plugin_running_state(); let state = self.llm_chat.get_plugin_running_state();
PluginStatePB { LocalAIPluginStatePB {
state: RunningStatePB::from(state), state: RunningStatePB::from(state),
} }
} }
pub fn restart(&self) { pub fn restart_chat_plugin(&self) {
if let Ok(chat_config) = self.llm_res.get_ai_plugin_config() { let rag_enabled = self.is_rag_enabled();
if let Ok(chat_config) = self.llm_res.get_chat_config(rag_enabled) {
if let Err(err) = initialize_chat_plugin(&self.llm_chat, chat_config) { if let Err(err) = initialize_chat_plugin(&self.llm_chat, chat_config) {
error!("[AI Plugin] failed to setup plugin: {:?}", err); error!("[AI Plugin] failed to setup plugin: {:?}", err);
} }
} }
} }
pub async fn toggle_local_ai(&self) -> FlowyResult<bool> {
let enabled = !self.store_preferences.get_bool(APPFLOWY_LOCAL_AI_ENABLED);
self
.store_preferences
.set_bool(APPFLOWY_LOCAL_AI_ENABLED, enabled)?;
// when enable local ai. we need to check if chat is enabled, if enabled, we need to init chat plugin
// otherwise, we need to destroy the plugin
if enabled {
let chat_enabled = self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED);
self.enable_chat_plugin(chat_enabled).await?;
} else {
self.enable_chat_plugin(false).await?;
}
Ok(enabled)
}
pub async fn toggle_local_ai_chat(&self) -> FlowyResult<bool> {
let enabled = !self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED);
self
.store_preferences
.set_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED, enabled)?;
self.enable_chat_plugin(enabled).await?;
Ok(enabled)
}
pub async fn toggle_local_ai_chat_rag(&self) -> FlowyResult<bool> {
let enabled = !self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED);
self
.store_preferences
.set_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED, enabled)?;
Ok(enabled)
}
async fn enable_chat_plugin(&self, enabled: bool) -> FlowyResult<()> {
if enabled {
let (tx, rx) = tokio::sync::oneshot::channel();
if let Err(err) = self.initialize_chat_plugin(Some(tx)) {
error!("[AI Plugin] failed to initialize local ai: {:?}", err);
}
let _ = rx.await;
} else {
if let Err(err) = self.llm_chat.destroy_chat_plugin().await {
error!("[AI Plugin] failed to destroy plugin: {:?}", err);
}
}
Ok(())
}
} }
fn initialize_chat_plugin( fn initialize_chat_plugin(
@ -208,24 +341,26 @@ fn initialize_chat_plugin(
} }
match llm_chat.init_chat_plugin(chat_config).await { match llm_chat.init_chat_plugin(chat_config).await {
Ok(_) => { Ok(_) => {
send_notification( make_notification(
"appflowy_chat_plugin", APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState, ChatNotification::UpdateChatPluginState,
) )
.payload(ChatStatePB { .payload(ChatStatePB {
model_type: ModelTypePB::LocalAI, model_type: ModelTypePB::LocalAI,
available: true, available: true,
}); })
.send();
}, },
Err(err) => { Err(err) => {
send_notification( make_notification(
"appflowy_chat_plugin", APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState, ChatNotification::UpdateChatPluginState,
) )
.payload(ChatStatePB { .payload(ChatStatePB {
model_type: ModelTypePB::LocalAI, model_type: ModelTypePB::LocalAI,
available: false, available: false,
}); })
.send();
error!("[AI Plugin] failed to setup plugin: {:?}", err); error!("[AI Plugin] failed to setup plugin: {:?}", err);
}, },
} }
@ -240,7 +375,7 @@ pub struct LLMResourceServiceImpl {
} }
#[async_trait] #[async_trait]
impl LLMResourceService for LLMResourceServiceImpl { impl LLMResourceService for LLMResourceServiceImpl {
async fn get_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error> { async fn fetch_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error> {
let workspace_id = self.user_service.workspace_id()?; let workspace_id = self.user_service.workspace_id()?;
let config = self let config = self
.cloud_service .cloud_service
@ -249,16 +384,22 @@ impl LLMResourceService for LLMResourceServiceImpl {
Ok(config) Ok(config)
} }
fn store(&self, setting: LLMSetting) -> Result<(), Error> { fn store_setting(&self, setting: LLMSetting) -> Result<(), Error> {
self self
.store_preferences .store_preferences
.set_object(LOCAL_AI_SETTING_KEY, setting)?; .set_object(LOCAL_AI_SETTING_KEY, setting)?;
Ok(()) Ok(())
} }
fn retrieve(&self) -> Option<LLMSetting> { fn retrieve_setting(&self) -> Option<LLMSetting> {
self self
.store_preferences .store_preferences
.get_object::<LLMSetting>(LOCAL_AI_SETTING_KEY) .get_object::<LLMSetting>(LOCAL_AI_SETTING_KEY)
} }
fn is_rag_enabled(&self) -> bool {
self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED)
}
} }

View File

@ -14,16 +14,19 @@ use parking_lot::RwLock;
use appflowy_local_ai::plugin_request::download_plugin; use appflowy_local_ai::plugin_request::download_plugin;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio::fs::{self}; use tokio::fs::{self};
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
use tracing::{debug, error, info, instrument, trace}; use tracing::{debug, error, info, instrument, trace, warn};
use zip_extensions::zip_extract; use zip_extensions::zip_extract;
#[async_trait] #[async_trait]
pub trait LLMResourceService: Send + Sync + 'static { pub trait LLMResourceService: Send + Sync + 'static {
async fn get_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error>; /// Get local ai configuration from remote server
fn store(&self, setting: LLMSetting) -> Result<(), anyhow::Error>; async fn fetch_local_ai_config(&self) -> Result<LocalAIConfig, anyhow::Error>;
fn retrieve(&self) -> Option<LLMSetting>; fn store_setting(&self, setting: LLMSetting) -> Result<(), anyhow::Error>;
fn retrieve_setting(&self) -> Option<LLMSetting>;
fn is_rag_enabled(&self) -> bool;
} }
const PLUGIN_DIR: &str = "plugin"; const PLUGIN_DIR: &str = "plugin";
@ -41,7 +44,7 @@ pub struct DownloadTask {
} }
impl DownloadTask { impl DownloadTask {
pub fn new() -> Self { pub fn new() -> Self {
let (tx, _) = tokio::sync::broadcast::channel(5); let (tx, _) = tokio::sync::broadcast::channel(100);
let cancel_token = CancellationToken::new(); let cancel_token = CancellationToken::new();
Self { cancel_token, tx } Self { cancel_token, tx }
} }
@ -67,7 +70,7 @@ impl LLMResourceController {
resource_service: impl LLMResourceService, resource_service: impl LLMResourceService,
resource_notify: tokio::sync::mpsc::Sender<()>, resource_notify: tokio::sync::mpsc::Sender<()>,
) -> Self { ) -> Self {
let llm_setting = RwLock::new(resource_service.retrieve()); let llm_setting = RwLock::new(resource_service.retrieve_setting());
Self { Self {
user_service, user_service,
resource_service: Arc::new(resource_service), resource_service: Arc::new(resource_service),
@ -102,7 +105,7 @@ impl LLMResourceController {
llm_model: selected_model.clone(), llm_model: selected_model.clone(),
}; };
self.llm_setting.write().replace(llm_setting.clone()); self.llm_setting.write().replace(llm_setting.clone());
self.resource_service.store(llm_setting)?; self.resource_service.store_setting(llm_setting)?;
Ok(LLMModelInfo { Ok(LLMModelInfo {
selected_model, selected_model,
@ -133,7 +136,7 @@ impl LLMResourceController {
trace!("[LLM Resource] Selected AI setting: {:?}", llm_setting); trace!("[LLM Resource] Selected AI setting: {:?}", llm_setting);
*self.llm_setting.write() = Some(llm_setting.clone()); *self.llm_setting.write() = Some(llm_setting.clone());
self.resource_service.store(llm_setting)?; self.resource_service.store_setting(llm_setting)?;
self.get_local_llm_state() self.get_local_llm_state()
} }
@ -302,6 +305,7 @@ impl LLMResourceController {
let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0); let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress)); let _ = plugin_progress_tx.send(format!("plugin:progress:{}", progress));
})), })),
Some(Duration::from_millis(100)),
) )
.await?; .await?;
@ -342,7 +346,11 @@ impl LLMResourceController {
let cloned_model_name = model_name.clone(); let cloned_model_name = model_name.clone();
let progress = Arc::new(move |downloaded, total_size| { let progress = Arc::new(move |downloaded, total_size| {
let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0); let progress = (downloaded as f64 / total_size as f64).clamp(0.0, 1.0);
let _ = plugin_progress_tx.send(format!("{}:progress:{}", cloned_model_name, progress)); if let Err(err) =
plugin_progress_tx.send(format!("{}:progress:{}", cloned_model_name, progress))
{
warn!("Failed to send progress: {:?}", err);
}
}); });
match download_model( match download_model(
&url, &url,
@ -384,7 +392,7 @@ impl LLMResourceController {
} }
#[instrument(level = "debug", skip_all, err)] #[instrument(level = "debug", skip_all, err)]
pub fn get_ai_plugin_config(&self) -> FlowyResult<AIPluginConfig> { pub fn get_chat_config(&self, rag_enabled: bool) -> FlowyResult<AIPluginConfig> {
if !self.is_resource_ready() { if !self.is_resource_ready() {
return Err(FlowyError::local_ai().with_context("Local AI resources are not ready")); return Err(FlowyError::local_ai().with_context("Local AI resources are not ready"));
} }
@ -397,27 +405,26 @@ impl LLMResourceController {
.ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?; .ok_or_else(|| FlowyError::local_ai().with_context("No local llm setting found"))?;
let model_dir = self.user_model_folder()?; let model_dir = self.user_model_folder()?;
let resource_dir = self.resource_dir()?;
let bin_path = self let bin_path = self
.plugin_path(&llm_setting.plugin.etag)? .plugin_path(&llm_setting.plugin.etag)?
.join(llm_setting.plugin.name); .join(llm_setting.plugin.name);
let chat_model_path = model_dir.join(&llm_setting.llm_model.chat_model.file_name); let chat_model_path = model_dir.join(&llm_setting.llm_model.chat_model.file_name);
let embedding_model_path = model_dir.join(&llm_setting.llm_model.embedding_model.file_name);
let mut config = AIPluginConfig::new(bin_path, chat_model_path)?; let mut config = AIPluginConfig::new(bin_path, chat_model_path)?;
// if rag_enabled {
let persist_directory = resource_dir.join("rag"); let resource_dir = self.resource_dir()?;
if !persist_directory.exists() { let embedding_model_path = model_dir.join(&llm_setting.llm_model.embedding_model.file_name);
std::fs::create_dir_all(&persist_directory)?; let persist_directory = resource_dir.join("vectorstore");
if !persist_directory.exists() {
std::fs::create_dir_all(&persist_directory)?;
}
config.set_rag_enabled(&embedding_model_path, &persist_directory)?;
} }
// Enable RAG when the embedding model path is set
config.set_rag_enabled(&embedding_model_path, &persist_directory)?;
if cfg!(debug_assertions) { if cfg!(debug_assertions) {
config = config.with_verbose(true); config = config.with_verbose(true);
} }
trace!("[AI Chat] use config: {:?}", config);
Ok(config) Ok(config)
} }
@ -425,7 +432,7 @@ impl LLMResourceController {
async fn fetch_ai_config(&self) -> FlowyResult<LocalAIConfig> { async fn fetch_ai_config(&self) -> FlowyResult<LocalAIConfig> {
self self
.resource_service .resource_service
.get_local_ai_config() .fetch_local_ai_config()
.await .await
.map_err(|err| { .map_err(|err| {
error!("[LLM Resource] Failed to fetch local ai config: {:?}", err); error!("[LLM Resource] Failed to fetch local ai config: {:?}", err);
@ -434,6 +441,14 @@ impl LLMResourceController {
}) })
} }
pub fn get_selected_model(&self) -> Option<LLMModel> {
self
.llm_setting
.read()
.as_ref()
.map(|setting| setting.llm_model.clone())
}
/// Selects the appropriate model based on the current settings or defaults to the first model. /// Selects the appropriate model based on the current settings or defaults to the first model.
fn select_model(&self, ai_config: &LocalAIConfig) -> FlowyResult<LLMModel> { fn select_model(&self, ai_config: &LocalAIConfig) -> FlowyResult<LLMModel> {
let selected_model = match self.llm_setting.read().as_ref() { let selected_model = match self.llm_setting.read().as_ref() {
@ -477,7 +492,7 @@ impl LLMResourceController {
.map(|dir| dir.join(model_file_name)) .map(|dir| dir.join(model_file_name))
} }
fn resource_dir(&self) -> FlowyResult<PathBuf> { pub(crate) fn resource_dir(&self) -> FlowyResult<PathBuf> {
let user_data_dir = self.user_service.user_data_dir()?; let user_data_dir = self.user_service.user_data_dir()?;
Ok(user_data_dir.join("llm")) Ok(user_data_dir.join("llm"))
} }

View File

@ -1,3 +1,3 @@
pub mod llm_resource;
pub mod local_llm_chat; pub mod local_llm_chat;
pub mod local_llm_resource;
mod model_request; mod model_request;

View File

@ -5,6 +5,7 @@ use reqwest::{Client, Response, StatusCode};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::fs::{self, File}; use tokio::fs::{self, File};
use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt}; use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
@ -29,6 +30,11 @@ pub async fn download_model(
let mut part_file = File::create(&partial_path).await?; let mut part_file = File::create(&partial_path).await?;
let mut downloaded: u64 = 0; let mut downloaded: u64 = 0;
let debounce_duration = Duration::from_millis(100);
let mut last_update = Instant::now()
.checked_sub(debounce_duration)
.unwrap_or(Instant::now());
while let Some(chunk) = response.chunk().await? { while let Some(chunk) = response.chunk().await? {
if let Some(cancel_token) = &cancel_token { if let Some(cancel_token) = &cancel_token {
if cancel_token.is_cancelled() { if cancel_token.is_cancelled() {
@ -42,7 +48,11 @@ pub async fn download_model(
downloaded += chunk.len() as u64; downloaded += chunk.len() as u64;
if let Some(progress_callback) = &progress_callback { if let Some(progress_callback) = &progress_callback {
progress_callback(downloaded, total_size_in_bytes); let now = Instant::now();
if now.duration_since(last_update) >= debounce_duration {
progress_callback(downloaded, total_size_in_bytes);
last_update = now;
}
} }
} }

View File

@ -1,7 +1,7 @@
use crate::chat_manager::ChatUserService; use crate::chat_manager::ChatUserService;
use crate::entities::{ChatStatePB, ModelTypePB}; use crate::entities::{ChatStatePB, ModelTypePB};
use crate::local_ai::local_llm_chat::LocalAIController; use crate::local_ai::local_llm_chat::LocalAIController;
use crate::notification::{send_notification, ChatNotification}; use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
use crate::persistence::select_single_message; use crate::persistence::select_single_message;
use appflowy_plugin::error::PluginError; use appflowy_plugin::error::PluginError;
@ -53,14 +53,15 @@ impl ChatServiceMiddleware {
err, err,
PluginError::PluginNotConnected | PluginError::PeerDisconnect PluginError::PluginNotConnected | PluginError::PeerDisconnect
) { ) {
send_notification( make_notification(
"appflowy_chat_plugin", APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState, ChatNotification::UpdateChatPluginState,
) )
.payload(ChatStatePB { .payload(ChatStatePB {
model_type: ModelTypePB::LocalAI, model_type: ModelTypePB::LocalAI,
available: false, available: false,
}); })
.send();
} }
} }
} }
@ -106,7 +107,7 @@ impl ChatCloudService for ChatServiceMiddleware {
chat_id: &str, chat_id: &str,
message_id: i64, message_id: i64,
) -> Result<StreamAnswer, FlowyError> { ) -> Result<StreamAnswer, FlowyError> {
if self.local_llm_controller.is_ready() { if self.local_llm_controller.is_running() {
let content = self.get_message_content(message_id)?; let content = self.get_message_content(message_id)?;
match self match self
.local_llm_controller .local_llm_controller
@ -137,7 +138,7 @@ impl ChatCloudService for ChatServiceMiddleware {
chat_id: &str, chat_id: &str,
question_message_id: i64, question_message_id: i64,
) -> Result<ChatMessage, FlowyError> { ) -> Result<ChatMessage, FlowyError> {
if self.local_llm_controller.is_ready() { if self.local_llm_controller.is_running() {
let content = self.get_message_content(question_message_id)?; let content = self.get_message_content(question_message_id)?;
match self match self
.local_llm_controller .local_llm_controller
@ -182,7 +183,7 @@ impl ChatCloudService for ChatServiceMiddleware {
chat_id: &str, chat_id: &str,
message_id: i64, message_id: i64,
) -> FutureResult<RepeatedRelatedQuestion, FlowyError> { ) -> FutureResult<RepeatedRelatedQuestion, FlowyError> {
if self.local_llm_controller.is_ready() { if self.local_llm_controller.is_running() {
FutureResult::new(async move { FutureResult::new(async move {
Ok(RepeatedRelatedQuestion { Ok(RepeatedRelatedQuestion {
message_id, message_id,
@ -202,7 +203,7 @@ impl ChatCloudService for ChatServiceMiddleware {
text: &str, text: &str,
complete_type: CompletionType, complete_type: CompletionType,
) -> Result<StreamComplete, FlowyError> { ) -> Result<StreamComplete, FlowyError> {
if self.local_llm_controller.is_ready() { if self.local_llm_controller.is_running() {
return Err( return Err(
FlowyError::not_support().with_context("completion with local ai is not supported yet"), FlowyError::not_support().with_context("completion with local ai is not supported yet"),
); );
@ -220,7 +221,7 @@ impl ChatCloudService for ChatServiceMiddleware {
file_path: PathBuf, file_path: PathBuf,
chat_id: &str, chat_id: &str,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
if self.local_llm_controller.is_ready() { if self.local_llm_controller.is_running() {
self self
.local_llm_controller .local_llm_controller
.index_file(chat_id, file_path) .index_file(chat_id, file_path)

View File

@ -2,7 +2,7 @@ use flowy_derive::ProtoBuf_Enum;
use flowy_notification::NotificationBuilder; use flowy_notification::NotificationBuilder;
const CHAT_OBSERVABLE_SOURCE: &str = "Chat"; const CHAT_OBSERVABLE_SOURCE: &str = "Chat";
pub const APPFLOWY_AI_NOTIFICATION_KEY: &str = "appflowy_ai_plugin";
#[derive(ProtoBuf_Enum, Debug, Default)] #[derive(ProtoBuf_Enum, Debug, Default)]
pub enum ChatNotification { pub enum ChatNotification {
#[default] #[default]
@ -13,7 +13,7 @@ pub enum ChatNotification {
StreamChatMessageError = 4, StreamChatMessageError = 4,
FinishStreaming = 5, FinishStreaming = 5,
UpdateChatPluginState = 6, UpdateChatPluginState = 6,
LocalAIResourceNeeded = 7, UpdateLocalChatAI = 7,
} }
impl std::convert::From<ChatNotification> for i32 { impl std::convert::From<ChatNotification> for i32 {
@ -30,13 +30,13 @@ impl std::convert::From<i32> for ChatNotification {
4 => ChatNotification::StreamChatMessageError, 4 => ChatNotification::StreamChatMessageError,
5 => ChatNotification::FinishStreaming, 5 => ChatNotification::FinishStreaming,
6 => ChatNotification::UpdateChatPluginState, 6 => ChatNotification::UpdateChatPluginState,
7 => ChatNotification::LocalAIResourceNeeded, 7 => ChatNotification::UpdateLocalChatAI,
_ => ChatNotification::Unknown, _ => ChatNotification::Unknown,
} }
} }
} }
#[tracing::instrument(level = "trace")] #[tracing::instrument(level = "trace")]
pub(crate) fn send_notification(id: &str, ty: ChatNotification) -> NotificationBuilder { pub(crate) fn make_notification(id: &str, ty: ChatNotification) -> NotificationBuilder {
NotificationBuilder::new(id, ty, CHAT_OBSERVABLE_SOURCE) NotificationBuilder::new(id, ty, CHAT_OBSERVABLE_SOURCE)
} }

View File

@ -3,6 +3,7 @@ use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use crate::af_cloud::define::ServerUser;
use anyhow::Error; use anyhow::Error;
use client_api::collab_sync::ServerCollabMessage; use client_api::collab_sync::ServerCollabMessage;
use client_api::entity::ai_dto::AIModel; use client_api::entity::ai_dto::AIModel;
@ -12,8 +13,18 @@ use client_api::ws::{
ConnectState, WSClient, WSClientConfig, WSConnectStateReceiver, WebSocketChannel, ConnectState, WSClient, WSClientConfig, WSConnectStateReceiver, WebSocketChannel,
}; };
use client_api::{Client, ClientConfiguration}; use client_api::{Client, ClientConfiguration};
use flowy_chat_pub::cloud::ChatCloudService; use flowy_chat_pub::cloud::ChatCloudService;
use flowy_database_pub::cloud::DatabaseCloudService;
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::{ErrorCode, FlowyError};
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_search_pub::cloud::SearchCloudService; use flowy_search_pub::cloud::SearchCloudService;
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_storage_pub::cloud::StorageCloudService;
use flowy_user_pub::cloud::{UserCloudService, UserUpdate};
use flowy_user_pub::entities::UserTokenState;
use lib_dispatch::prelude::af_spawn;
use rand::Rng; use rand::Rng;
use semver::Version; use semver::Version;
use tokio::select; use tokio::select;
@ -23,17 +34,6 @@ use tokio_util::sync::CancellationToken;
use tracing::{error, event, info, warn}; use tracing::{error, event, info, warn};
use uuid::Uuid; use uuid::Uuid;
use crate::af_cloud::define::ServerUser;
use flowy_database_pub::cloud::DatabaseCloudService;
use flowy_document_pub::cloud::DocumentCloudService;
use flowy_error::{ErrorCode, FlowyError};
use flowy_folder_pub::cloud::FolderCloudService;
use flowy_server_pub::af_cloud_config::AFCloudConfiguration;
use flowy_storage_pub::cloud::StorageCloudService;
use flowy_user_pub::cloud::{UserCloudService, UserUpdate};
use flowy_user_pub::entities::UserTokenState;
use lib_dispatch::prelude::af_spawn;
use crate::af_cloud::impls::{ use crate::af_cloud::impls::{
AFCloudChatCloudServiceImpl, AFCloudDatabaseCloudServiceImpl, AFCloudDocumentCloudServiceImpl, AFCloudChatCloudServiceImpl, AFCloudDatabaseCloudServiceImpl, AFCloudDocumentCloudServiceImpl,
AFCloudFileStorageServiceImpl, AFCloudFolderCloudServiceImpl, AFCloudUserAuthServiceImpl, AFCloudFileStorageServiceImpl, AFCloudFolderCloudServiceImpl, AFCloudUserAuthServiceImpl,

View File

@ -399,7 +399,6 @@ pub enum AIModelPB {
GPT4o = 2, GPT4o = 2,
Claude3Sonnet = 3, Claude3Sonnet = 3,
Claude3Opus = 4, Claude3Opus = 4,
LocalAIModel = 5,
} }
impl AIModelPB { impl AIModelPB {
@ -410,7 +409,6 @@ impl AIModelPB {
AIModelPB::GPT4o => "gpt-4o", AIModelPB::GPT4o => "gpt-4o",
AIModelPB::Claude3Sonnet => "claude-3-sonnet", AIModelPB::Claude3Sonnet => "claude-3-sonnet",
AIModelPB::Claude3Opus => "claude-3-opus", AIModelPB::Claude3Opus => "claude-3-opus",
AIModelPB::LocalAIModel => "local",
} }
} }
} }
@ -424,7 +422,6 @@ impl FromStr for AIModelPB {
"gpt-4o" => Ok(AIModelPB::GPT4o), "gpt-4o" => Ok(AIModelPB::GPT4o),
"claude-3-sonnet" => Ok(AIModelPB::Claude3Sonnet), "claude-3-sonnet" => Ok(AIModelPB::Claude3Sonnet),
"claude-3-opus" => Ok(AIModelPB::Claude3Opus), "claude-3-opus" => Ok(AIModelPB::Claude3Opus),
"local" => Ok(AIModelPB::LocalAIModel),
_ => Ok(AIModelPB::DefaultModel), _ => Ok(AIModelPB::DefaultModel),
} }
} }

View File

@ -489,10 +489,7 @@ impl UserManager {
&self, &self,
updated_settings: UpdateUserWorkspaceSettingPB, updated_settings: UpdateUserWorkspaceSettingPB,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let ai_model = updated_settings let ai_model = updated_settings.ai_model.clone();
.ai_model
.as_ref()
.map(|model| model.to_str().to_string());
let workspace_id = updated_settings.workspace_id.clone(); let workspace_id = updated_settings.workspace_id.clone();
let cloud_service = self.cloud_services.get_user_service()?; let cloud_service = self.cloud_services.get_user_service()?;
let settings = cloud_service let settings = cloud_service
@ -505,13 +502,13 @@ impl UserManager {
.payload(pb) .payload(pb)
.send(); .send();
if let Some(ai_model) = ai_model { if let Some(ai_model) = &ai_model {
if let Err(err) = self.cloud_services.set_ai_model(&ai_model) { if let Err(err) = self.cloud_services.set_ai_model(ai_model.to_str()) {
error!("Set ai model failed: {}", err); error!("Set ai model failed: {}", err);
} }
let conn = self.db_connection(uid)?; let conn = self.db_connection(uid)?;
let params = UpdateUserProfileParams::new(uid).with_ai_model(&ai_model); let params = UpdateUserProfileParams::new(uid).with_ai_model(ai_model.to_str());
upsert_user_profile_change(uid, conn, UserTableChangeset::new(params))?; upsert_user_profile_change(uid, conn, UserTableChangeset::new(params))?;
} }
Ok(()) Ok(())
@ -520,7 +517,6 @@ impl UserManager {
pub async fn get_workspace_settings(&self, workspace_id: &str) -> FlowyResult<UseAISettingPB> { pub async fn get_workspace_settings(&self, workspace_id: &str) -> FlowyResult<UseAISettingPB> {
let cloud_service = self.cloud_services.get_user_service()?; let cloud_service = self.cloud_services.get_user_service()?;
let settings = cloud_service.get_workspace_setting(workspace_id).await?; let settings = cloud_service.get_workspace_setting(workspace_id).await?;
let uid = self.user_id()?; let uid = self.user_id()?;
let conn = self.db_connection(uid)?; let conn = self.db_connection(uid)?;
let params = UpdateUserProfileParams::new(uid).with_ai_model(&settings.ai_model); let params = UpdateUserProfileParams::new(uid).with_ai_model(&settings.ai_model);