chore: document local ai (#5810)

* chore: document local ai

* chore: update ui

* chore: clippy
This commit is contained in:
Nathan.fooo 2024-07-25 19:41:16 +08:00 committed by GitHub
parent 0373088fb8
commit 82fffba45a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
34 changed files with 499 additions and 203 deletions

View File

@ -91,7 +91,7 @@ enum FeatureFlag {
bool get isOn { bool get isOn {
if ([ if ([
// FeatureFlag.planBilling, // if (kDebugMode) FeatureFlag.planBilling,
// release this feature in version 0.6.1 // release this feature in version 0.6.1
FeatureFlag.spaceDesign, FeatureFlag.spaceDesign,
// release this feature in version 0.5.9 // release this feature in version 0.5.9

View File

@ -0,0 +1,69 @@
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-error/errors.pb.dart';
import 'package:appflowy_backend/protobuf/flowy-user/workspace.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:bloc/bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
part 'local_ai_on_boarding_bloc.freezed.dart';
class LocalAIOnBoardingBloc
extends Bloc<LocalAIOnBoardingEvent, LocalAIOnBoardingState> {
LocalAIOnBoardingBloc(this.workspaceId)
: super(const LocalAIOnBoardingState()) {
_dispatch();
}
final String workspaceId;
void _dispatch() {
on<LocalAIOnBoardingEvent>((event, emit) {
event.when(
started: () {
_loadSubscriptionPlans();
},
didGetSubscriptionPlans: (result) {
result.fold(
(workspaceSubInfo) {
final isPurchaseAILocal = workspaceSubInfo.addOns.any((addOn) {
return addOn.type == WorkspaceAddOnPBType.AddOnAiLocal;
});
emit(
state.copyWith(isPurchaseAILocal: isPurchaseAILocal),
);
},
(err) {
Log.error("Failed to get subscription plans: $err");
},
);
},
);
});
}
void _loadSubscriptionPlans() {
final payload = UserWorkspaceIdPB()..workspaceId = workspaceId;
UserEventGetWorkspaceSubscriptionInfo(payload).send().then((result) {
if (!isClosed) {
add(LocalAIOnBoardingEvent.didGetSubscriptionPlans(result));
}
});
}
}
@freezed
class LocalAIOnBoardingEvent with _$LocalAIOnBoardingEvent {
const factory LocalAIOnBoardingEvent.started() = _Started;
const factory LocalAIOnBoardingEvent.didGetSubscriptionPlans(
FlowyResult<WorkspaceSubscriptionInfoPB, FlowyError> result,
) = _LoadSubscriptionPlans;
}
@freezed
class LocalAIOnBoardingState with _$LocalAIOnBoardingState {
const factory LocalAIOnBoardingState({
@Default(false) bool isPurchaseAILocal,
}) = _LocalAIOnBoardingState;
}

View File

@ -1,5 +1,6 @@
import 'dart:async'; import 'dart:async';
import 'package:appflowy/core/helpers/url_launcher.dart';
import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart'; import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.dart';
import 'package:appflowy_backend/dispatch/dispatch.dart'; import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart'; import 'package:appflowy_backend/log.dart';
@ -67,8 +68,20 @@ class PluginStateBloc extends Bloc<PluginStateEvent, PluginStateState> {
break; break;
} }
}, },
restartLocalAI: () { restartLocalAI: () async {
ChatEventRestartLocalAIChat().send(); emit(
const PluginStateState(action: PluginStateAction.loadingPlugin()),
);
unawaited(ChatEventRestartLocalAIChat().send());
},
openModelDirectory: () async {
final result = await ChatEventGetModelStorageDirectory().send();
result.fold(
(data) {
afLaunchUrl(Uri.file(data.filePath));
},
(err) => Log.error(err.toString()),
);
}, },
); );
} }
@ -80,12 +93,15 @@ class PluginStateEvent with _$PluginStateEvent {
const factory PluginStateEvent.updateState(LocalAIPluginStatePB pluginState) = const factory PluginStateEvent.updateState(LocalAIPluginStatePB pluginState) =
_UpdatePluginState; _UpdatePluginState;
const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI; const factory PluginStateEvent.restartLocalAI() = _RestartLocalAI;
const factory PluginStateEvent.openModelDirectory() =
_OpenModelStorageDirectory;
} }
@freezed @freezed
class PluginStateState with _$PluginStateState { class PluginStateState with _$PluginStateState {
const factory PluginStateState({required PluginStateAction action}) = const factory PluginStateState({
_PluginStateState; required PluginStateAction action,
}) = _PluginStateState;
} }
@freezed @freezed

View File

@ -1,5 +1,6 @@
import 'package:appflowy/shared/feature_flags.dart'; import 'package:appflowy/shared/feature_flags.dart';
import 'package:appflowy/workspace/presentation/home/menu/sidebar/footer/sidebar_toast.dart'; import 'package:appflowy/workspace/presentation/home/menu/sidebar/footer/sidebar_toast.dart';
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:appflowy/generated/flowy_svgs.g.dart'; import 'package:appflowy/generated/flowy_svgs.g.dart';
@ -19,7 +20,12 @@ class SidebarFooter extends StatelessWidget {
Widget build(BuildContext context) { Widget build(BuildContext context) {
return Column( return Column(
children: [ children: [
if (FeatureFlag.planBilling.isOn) const SidebarToast(), if (FeatureFlag.planBilling.isOn)
BillingGateGuard(
builder: (context) {
return const SidebarToast();
},
),
const Row( const Row(
children: [ children: [
Expanded(child: SidebarTrashButton()), Expanded(child: SidebarTrashButton()),

View File

@ -37,18 +37,13 @@ class DownloadingIndicator extends StatelessWidget {
color: Theme.of(context).colorScheme.surfaceContainerHighest, color: Theme.of(context).colorScheme.surfaceContainerHighest,
borderRadius: BorderRadius.circular(8), borderRadius: BorderRadius.circular(8),
), ),
child: Padding(
padding: const EdgeInsets.all(12.0),
child: Column( child: Column(
children: [ children: [
// const DownloadingPrompt(),
// const VSpace(12),
DownloadingProgressBar(onCancel: onCancel), DownloadingProgressBar(onCancel: onCancel),
], ],
), ),
), ),
), ),
),
); );
} }
} }
@ -65,10 +60,13 @@ class DownloadingProgressBar extends StatelessWidget {
return Column( return Column(
crossAxisAlignment: CrossAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.start,
children: [ children: [
FlowyText( Opacity(
opacity: 0.6,
child: FlowyText(
"${LocaleKeys.settings_aiPage_keys_downloadingModel.tr()}: ${state.object}", "${LocaleKeys.settings_aiPage_keys_downloadingModel.tr()}: ${state.object}",
fontSize: 11, fontSize: 11,
), ),
),
IntrinsicHeight( IntrinsicHeight(
child: Row( child: Row(
children: [ children: [

View File

@ -1,7 +1,7 @@
import 'package:appflowy/generated/flowy_svgs.g.dart'; import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_bloc.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_toggle_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/local_ai_chat_toggle_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/downloading_model.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/init_local_ai.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/plugin_state.dart';
import 'package:appflowy/workspace/presentation/widgets/dialogs.dart'; import 'package:appflowy/workspace/presentation/widgets/dialogs.dart';
@ -70,7 +70,7 @@ class LocalAIChatSetting extends StatelessWidget {
header: const LocalAIChatSettingHeader(), header: const LocalAIChatSettingHeader(),
collapsed: const SizedBox.shrink(), collapsed: const SizedBox.shrink(),
expanded: Padding( expanded: Padding(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 6), padding: const EdgeInsets.symmetric(vertical: 6),
child: Column( child: Column(
crossAxisAlignment: CrossAxisAlignment.start, crossAxisAlignment: CrossAxisAlignment.start,
children: [ children: [
@ -240,7 +240,7 @@ class _LocalLLMInfoWidget extends StatelessWidget {
); );
}, },
finishDownload: () => const InitLocalAIIndicator(), finishDownload: () => const InitLocalAIIndicator(),
checkPluginState: () => const CheckPluginStateIndicator(), checkPluginState: () => const PluginStateIndicator(),
); );
return Padding( return Padding(
@ -253,10 +253,13 @@ class _LocalLLMInfoWidget extends StatelessWidget {
} else { } else {
return Opacity( return Opacity(
opacity: 0.5, opacity: 0.5,
child: Padding(
padding: const EdgeInsets.symmetric(vertical: 6),
child: FlowyText( child: FlowyText(
error.msg, error.msg,
maxLines: 10, maxLines: 10,
), ),
),
); );
} }
}, },

View File

@ -6,6 +6,7 @@ import 'package:appflowy/workspace/presentation/widgets/toggle/toggle.dart';
import 'package:easy_localization/easy_localization.dart'; import 'package:easy_localization/easy_localization.dart';
import 'package:expandable/expandable.dart'; import 'package:expandable/expandable.dart';
import 'package:flowy_infra_ui/style_widget/text.dart'; import 'package:flowy_infra_ui/style_widget/text.dart';
import 'package:flowy_infra_ui/widget/spacing.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:appflowy/workspace/application/settings/ai/settings_ai_bloc.dart'; import 'package:appflowy/workspace/application/settings/ai/settings_ai_bloc.dart';
@ -55,6 +56,7 @@ class _LocalAISettingState extends State<LocalAISetting> {
collapsed: const SizedBox.shrink(), collapsed: const SizedBox.shrink(),
expanded: Column( expanded: Column(
children: [ children: [
const VSpace(6),
DecoratedBox( DecoratedBox(
decoration: BoxDecoration( decoration: BoxDecoration(
color: Theme.of(context) color: Theme.of(context)
@ -64,11 +66,8 @@ class _LocalAISettingState extends State<LocalAISetting> {
const BorderRadius.all(Radius.circular(4)), const BorderRadius.all(Radius.circular(4)),
), ),
child: const Padding( child: const Padding(
padding: EdgeInsets.only( padding:
left: 12.0, EdgeInsets.symmetric(horizontal: 12, vertical: 6),
top: 6,
bottom: 6,
),
child: LocalAIChatSetting(), child: LocalAIChatSetting(),
), ),
), ),

View File

@ -8,8 +8,8 @@ import 'package:flowy_infra_ui/widget/spacing.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_bloc/flutter_bloc.dart'; import 'package:flutter_bloc/flutter_bloc.dart';
class CheckPluginStateIndicator extends StatelessWidget { class PluginStateIndicator extends StatelessWidget {
const CheckPluginStateIndicator({super.key}); const PluginStateIndicator({super.key});
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
@ -20,7 +20,7 @@ class CheckPluginStateIndicator extends StatelessWidget {
builder: (context, state) { builder: (context, state) {
return state.action.when( return state.action.when(
init: () => const _InitPlugin(), init: () => const _InitPlugin(),
ready: () => const _ReadyToUse(), ready: () => const _LocalAIReadyToUse(),
restart: () => const _ReloadButton(), restart: () => const _ReloadButton(),
loadingPlugin: () => const _InitPlugin(), loadingPlugin: () => const _InitPlugin(),
); );
@ -74,8 +74,8 @@ class _ReloadButton extends StatelessWidget {
} }
} }
class _ReadyToUse extends StatelessWidget { class _LocalAIReadyToUse extends StatelessWidget {
const _ReadyToUse(); const _LocalAIReadyToUse();
@override @override
Widget build(BuildContext context) { Widget build(BuildContext context) {
@ -87,7 +87,7 @@ class _ReadyToUse extends StatelessWidget {
), ),
), ),
child: Padding( child: Padding(
padding: const EdgeInsets.symmetric(vertical: 8), padding: const EdgeInsets.symmetric(vertical: 4),
child: Row( child: Row(
children: [ children: [
const HSpace(8), const HSpace(8),
@ -101,6 +101,23 @@ class _ReadyToUse extends StatelessWidget {
fontSize: 11, fontSize: 11,
color: const Color(0xFF1E4620), color: const Color(0xFF1E4620),
), ),
const Spacer(),
Padding(
padding: const EdgeInsets.symmetric(horizontal: 6),
child: FlowyButton(
useIntrinsicWidth: true,
text: FlowyText(
LocaleKeys.settings_aiPage_keys_openModelDirectory.tr(),
fontSize: 11,
color: const Color(0xFF1E4620),
),
onTap: () {
context.read<PluginStateBloc>().add(
const PluginStateEvent.openModelDirectory(),
);
},
),
),
], ],
), ),
), ),

View File

@ -1,4 +1,12 @@
import 'package:appflowy/generated/flowy_svgs.g.dart';
import 'package:appflowy/shared/feature_flags.dart';
import 'package:appflowy/workspace/application/settings/ai/local_ai_on_boarding_bloc.dart';
import 'package:appflowy/workspace/application/settings/settings_dialog_bloc.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/local_ai_setting.dart';
import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/model_selection.dart'; import 'package:appflowy/workspace/presentation/settings/pages/setting_ai_view/model_selection.dart';
import 'package:appflowy/workspace/presentation/settings/widgets/setting_appflowy_cloud.dart';
import 'package:flowy_infra/theme_extension.dart';
import 'package:flowy_infra_ui/widget/spacing.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/generated/locale_keys.g.dart';
@ -43,9 +51,12 @@ class SettingsAIView extends StatelessWidget {
const AIModelSelection(), const AIModelSelection(),
]; ];
// children.add(const LocalAISetting());
children.add(const _AISearchToggle(value: false)); children.add(const _AISearchToggle(value: false));
children.add(
_LocalAIOnBoarding(
workspaceId: userProfile.workspaceId,
),
);
return SettingsBody( return SettingsBody(
title: LocaleKeys.settings_aiPage_title.tr(), title: LocaleKeys.settings_aiPage_title.tr(),
@ -101,3 +112,113 @@ class _AISearchToggle extends StatelessWidget {
); );
} }
} }
class _LocalAIOnBoarding extends StatelessWidget {
const _LocalAIOnBoarding({required this.workspaceId});
final String workspaceId;
@override
Widget build(BuildContext context) {
if (FeatureFlag.planBilling.isOn) {
return BillingGateGuard(
builder: (context) {
return BlocProvider(
create: (context) => LocalAIOnBoardingBloc(workspaceId)
..add(const LocalAIOnBoardingEvent.started()),
child: BlocBuilder<LocalAIOnBoardingBloc, LocalAIOnBoardingState>(
builder: (context, state) {
// Show the local AI settings if the user has purchased the AI Local plan
if (state.isPurchaseAILocal) {
return const LocalAISetting();
} else {
// Show the upgrade to AI Local plan button if the user has not purchased the AI Local plan
return _UpgradeToAILocalPlan(
onTap: () {
context.read<SettingsDialogBloc>().add(
const SettingsDialogEvent.setSelectedPage(
SettingsPage.plan,
),
);
},
);
}
},
),
);
},
);
} else {
return const SizedBox.shrink();
}
}
}
class _UpgradeToAILocalPlan extends StatefulWidget {
const _UpgradeToAILocalPlan({required this.onTap});
final VoidCallback onTap;
@override
State<_UpgradeToAILocalPlan> createState() => _UpgradeToAILocalPlanState();
}
class _UpgradeToAILocalPlanState extends State<_UpgradeToAILocalPlan> {
bool _isHovered = false;
@override
Widget build(BuildContext context) {
const textGradient = LinearGradient(
begin: Alignment.bottomLeft,
end: Alignment.bottomRight,
colors: [Color(0xFF8032FF), Color(0xFFEF35FF)],
stops: [0.1545, 0.8225],
);
final backgroundGradient = LinearGradient(
begin: Alignment.topLeft,
end: Alignment.bottomRight,
colors: [
_isHovered
? const Color(0xFF8032FF).withOpacity(0.3)
: Colors.transparent,
_isHovered
? const Color(0xFFEF35FF).withOpacity(0.3)
: Colors.transparent,
],
);
return GestureDetector(
onTap: widget.onTap,
child: MouseRegion(
cursor: SystemMouseCursors.click,
onEnter: (_) => setState(() => _isHovered = true),
onExit: (_) => setState(() => _isHovered = false),
child: Container(
padding: const EdgeInsets.symmetric(vertical: 8, horizontal: 10),
clipBehavior: Clip.antiAlias,
decoration: BoxDecoration(
gradient: backgroundGradient,
borderRadius: BorderRadius.circular(10),
),
child: Row(
children: [
const FlowySvg(
FlowySvgs.upgrade_storage_s,
blendMode: null,
),
const HSpace(6),
ShaderMask(
shaderCallback: (bounds) => textGradient.createShader(bounds),
blendMode: BlendMode.srcIn,
child: FlowyText(
LocaleKeys.sideBar_upgradeToAILocal.tr(),
color: AFThemeExtension.of(context).strongText,
),
),
],
),
),
),
);
}
}

View File

@ -1,3 +1,5 @@
import 'package:appflowy_backend/log.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart'; import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
@ -327,3 +329,51 @@ class AppFlowyCloudEnableSync extends StatelessWidget {
); );
} }
} }
class BillingGateGuard extends StatelessWidget {
const BillingGateGuard({required this.builder, super.key});
final Widget Function(BuildContext context) builder;
@override
Widget build(BuildContext context) {
return FutureBuilder(
future: isBillingEnabled(),
builder: (context, snapshot) {
final isBillingEnabled = snapshot.data ?? false;
if (isBillingEnabled &&
snapshot.connectionState == ConnectionState.done) {
return builder(context);
}
// If the billing is not enabled, show nothing
return const SizedBox.shrink();
},
);
}
}
Future<bool> isBillingEnabled() async {
final result = await UserEventGetCloudConfig().send();
return result.fold((cloudSetting) {
final whiteList = [
"https://beta.appflowy.cloud",
"https://test.appflowy.cloud",
];
if (kDebugMode) {
whiteList.add("http://localhost:8000");
}
if (whiteList.contains(cloudSetting.serverUrl)) {
return true;
} else {
Log.warn(
"Billing is not enabled for this server:${cloudSetting.serverUrl}",
);
return false;
}
}, (err) {
Log.error("Failed to get cloud config: $err");
return false;
});
}

View File

@ -206,7 +206,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -225,7 +225,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",

View File

@ -128,5 +128,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }

View File

@ -197,7 +197,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",

View File

@ -128,6 +128,6 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }

View File

@ -660,7 +660,8 @@
"restartLocalAI": "Restart Local AI", "restartLocalAI": "Restart Local AI",
"disableLocalAIDialog": "Do you want to disable local AI?", "disableLocalAIDialog": "Do you want to disable local AI?",
"localAIToggleTitle": "Toggle to enable or disable local AI", "localAIToggleTitle": "Toggle to enable or disable local AI",
"fetchLocalModel": "Fetch local model configuration" "fetchLocalModel": "Fetch local model configuration",
"openModelDirectory": "Open folder"
} }
}, },
"planPage": { "planPage": {

View File

@ -197,7 +197,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-local-ai" name = "appflowy-local-ai"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"appflowy-plugin", "appflowy-plugin",
@ -216,7 +216,7 @@ dependencies = [
[[package]] [[package]]
name = "appflowy-plugin" name = "appflowy-plugin"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=c4ab1db44e96348f9b0770dd8ecc990f68ac415d#c4ab1db44e96348f9b0770dd8ecc990f68ac415d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=f3b678e36f22012b241f8e2f3cb811be2da245c0#f3b678e36f22012b241f8e2f3cb811be2da245c0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cfg-if", "cfg-if",

View File

@ -147,5 +147,5 @@ collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-
# To update the commit ID, run: # To update the commit ID, run:
# scripts/tool/update_local_ai_rev.sh new_rev_id # scripts/tool/update_local_ai_rev.sh new_rev_id
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" } appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "c4ab1db44e96348f9b0770dd8ecc990f68ac415d" } appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "f3b678e36f22012b241f8e2f3cb811be2da245c0" }

View File

@ -15,7 +15,7 @@ use std::path::PathBuf;
pub type ChatMessageStream = BoxStream<'static, Result<ChatMessage, AppResponseError>>; pub type ChatMessageStream = BoxStream<'static, Result<ChatMessage, AppResponseError>>;
pub type StreamAnswer = BoxStream<'static, Result<Bytes, FlowyError>>; pub type StreamAnswer = BoxStream<'static, Result<Bytes, FlowyError>>;
pub type StreamComplete = BoxStream<'static, Result<Bytes, AppResponseError>>; pub type StreamComplete = BoxStream<'static, Result<Bytes, FlowyError>>;
#[async_trait] #[async_trait]
pub trait ChatCloudService: Send + Sync + 'static { pub trait ChatCloudService: Send + Sync + 'static {
fn create_chat( fn create_chat(
@ -63,12 +63,12 @@ pub trait ChatCloudService: Send + Sync + 'static {
limit: u64, limit: u64,
) -> FutureResult<RepeatedChatMessage, FlowyError>; ) -> FutureResult<RepeatedChatMessage, FlowyError>;
fn get_related_message( async fn get_related_message(
&self, &self,
workspace_id: &str, workspace_id: &str,
chat_id: &str, chat_id: &str,
message_id: i64, message_id: i64,
) -> FutureResult<RepeatedRelatedQuestion, FlowyError>; ) -> Result<RepeatedRelatedQuestion, FlowyError>;
async fn stream_complete( async fn stream_complete(
&self, &self,

View File

@ -2,7 +2,7 @@ use crate::chat_manager::ChatUserService;
use crate::entities::{ use crate::entities::{
ChatMessageErrorPB, ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB, ChatMessageErrorPB, ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB,
}; };
use crate::middleware::chat_service_mw::ChatServiceMiddleware; use crate::middleware::chat_service_mw::CloudServiceMiddleware;
use crate::notification::{make_notification, ChatNotification}; use crate::notification::{make_notification, ChatNotification};
use crate::persistence::{insert_chat_messages, select_chat_messages, ChatMessageTable}; use crate::persistence::{insert_chat_messages, select_chat_messages, ChatMessageTable};
use allo_isolate::Isolate; use allo_isolate::Isolate;
@ -27,7 +27,7 @@ pub struct Chat {
chat_id: String, chat_id: String,
uid: i64, uid: i64,
user_service: Arc<dyn ChatUserService>, user_service: Arc<dyn ChatUserService>,
chat_service: Arc<ChatServiceMiddleware>, chat_service: Arc<CloudServiceMiddleware>,
prev_message_state: Arc<RwLock<PrevMessageState>>, prev_message_state: Arc<RwLock<PrevMessageState>>,
latest_message_id: Arc<AtomicI64>, latest_message_id: Arc<AtomicI64>,
stop_stream: Arc<AtomicBool>, stop_stream: Arc<AtomicBool>,
@ -39,7 +39,7 @@ impl Chat {
uid: i64, uid: i64,
chat_id: String, chat_id: String,
user_service: Arc<dyn ChatUserService>, user_service: Arc<dyn ChatUserService>,
chat_service: Arc<ChatServiceMiddleware>, chat_service: Arc<CloudServiceMiddleware>,
) -> Chat { ) -> Chat {
Chat { Chat {
uid, uid,

View File

@ -1,7 +1,7 @@
use crate::chat::Chat; use crate::chat::Chat;
use crate::entities::{ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB}; use crate::entities::{ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB};
use crate::local_ai::local_llm_chat::LocalAIController; use crate::local_ai::local_llm_chat::LocalAIController;
use crate::middleware::chat_service_mw::ChatServiceMiddleware; use crate::middleware::chat_service_mw::CloudServiceMiddleware;
use crate::persistence::{insert_chat, ChatTable}; use crate::persistence::{insert_chat, ChatTable};
use appflowy_plugin::manager::PluginManager; use appflowy_plugin::manager::PluginManager;
@ -25,7 +25,7 @@ pub trait ChatUserService: Send + Sync + 'static {
} }
pub struct ChatManager { pub struct ChatManager {
pub chat_service_wm: Arc<ChatServiceMiddleware>, pub cloud_service_wm: Arc<CloudServiceMiddleware>,
pub user_service: Arc<dyn ChatUserService>, pub user_service: Arc<dyn ChatUserService>,
chats: Arc<DashMap<String, Arc<Chat>>>, chats: Arc<DashMap<String, Arc<Chat>>>,
pub local_ai_controller: Arc<LocalAIController>, pub local_ai_controller: Arc<LocalAIController>,
@ -46,21 +46,21 @@ impl ChatManager {
cloud_service.clone(), cloud_service.clone(),
)); ));
if local_ai_controller.can_init() { if local_ai_controller.can_init_plugin() {
if let Err(err) = local_ai_controller.initialize_chat_plugin(None) { if let Err(err) = local_ai_controller.initialize_ai_plugin(None) {
error!("[AI Plugin] failed to initialize local ai: {:?}", err); error!("[AI Plugin] failed to initialize local ai: {:?}", err);
} }
} }
// setup local chat service // setup local chat service
let chat_service_wm = Arc::new(ChatServiceMiddleware::new( let cloud_service_wm = Arc::new(CloudServiceMiddleware::new(
user_service.clone(), user_service.clone(),
cloud_service, cloud_service,
local_ai_controller.clone(), local_ai_controller.clone(),
)); ));
Self { Self {
chat_service_wm, cloud_service_wm,
user_service, user_service,
chats: Arc::new(DashMap::new()), chats: Arc::new(DashMap::new()),
local_ai_controller, local_ai_controller,
@ -74,12 +74,14 @@ impl ChatManager {
self.user_service.user_id().unwrap(), self.user_service.user_id().unwrap(),
chat_id.to_string(), chat_id.to_string(),
self.user_service.clone(), self.user_service.clone(),
self.chat_service_wm.clone(), self.cloud_service_wm.clone(),
)) ))
}); });
trace!("[AI Plugin] notify open chat: {}", chat_id); trace!("[AI Plugin] notify open chat: {}", chat_id);
if self.local_ai_controller.is_running() {
self.local_ai_controller.open_chat(chat_id); self.local_ai_controller.open_chat(chat_id);
}
Ok(()) Ok(())
} }
@ -108,7 +110,7 @@ impl ChatManager {
pub async fn create_chat(&self, uid: &i64, chat_id: &str) -> Result<Arc<Chat>, FlowyError> { pub async fn create_chat(&self, uid: &i64, chat_id: &str) -> Result<Arc<Chat>, FlowyError> {
let workspace_id = self.user_service.workspace_id()?; let workspace_id = self.user_service.workspace_id()?;
self self
.chat_service_wm .cloud_service_wm
.create_chat(uid, &workspace_id, chat_id) .create_chat(uid, &workspace_id, chat_id)
.await?; .await?;
save_chat(self.user_service.sqlite_connection(*uid)?, chat_id)?; save_chat(self.user_service.sqlite_connection(*uid)?, chat_id)?;
@ -117,7 +119,7 @@ impl ChatManager {
self.user_service.user_id().unwrap(), self.user_service.user_id().unwrap(),
chat_id.to_string(), chat_id.to_string(),
self.user_service.clone(), self.user_service.clone(),
self.chat_service_wm.clone(), self.cloud_service_wm.clone(),
)); ));
self.chats.insert(chat_id.to_string(), chat.clone()); self.chats.insert(chat_id.to_string(), chat.clone());
Ok(chat) Ok(chat)
@ -145,7 +147,7 @@ impl ChatManager {
self.user_service.user_id().unwrap(), self.user_service.user_id().unwrap(),
chat_id.to_string(), chat_id.to_string(),
self.user_service.clone(), self.user_service.clone(),
self.chat_service_wm.clone(), self.cloud_service_wm.clone(),
)); ));
self.chats.insert(chat_id.to_string(), chat.clone()); self.chats.insert(chat_id.to_string(), chat.clone());
Ok(chat) Ok(chat)

View File

@ -410,3 +410,9 @@ pub struct LocalAIChatPB {
#[pb(index = 3)] #[pb(index = 3)]
pub plugin_state: LocalAIPluginStatePB, pub plugin_state: LocalAIPluginStatePB,
} }
#[derive(Default, ProtoBuf, Clone, Debug)]
pub struct LocalModelStoragePB {
#[pb(index = 1)]
pub file_path: String,
}

View File

@ -87,9 +87,15 @@ pub(crate) async fn get_related_question_handler(
) -> DataResult<RepeatedRelatedQuestionPB, FlowyError> { ) -> DataResult<RepeatedRelatedQuestionPB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?; let chat_manager = upgrade_chat_manager(chat_manager)?;
let data = data.into_inner(); let data = data.into_inner();
let (tx, rx) = tokio::sync::oneshot::channel();
tokio::spawn(async move {
let messages = chat_manager let messages = chat_manager
.get_related_questions(&data.chat_id, data.message_id) .get_related_questions(&data.chat_id, data.message_id)
.await?; .await?;
let _ = tx.send(messages);
Ok::<_, FlowyError>(())
});
let messages = rx.await?;
data_result_ok(messages) data_result_ok(messages)
} }
@ -338,3 +344,14 @@ pub(crate) async fn get_local_ai_state_handler(
let enabled = chat_manager.local_ai_controller.is_enabled(); let enabled = chat_manager.local_ai_controller.is_enabled();
data_result_ok(LocalAIPB { enabled }) data_result_ok(LocalAIPB { enabled })
} }
#[tracing::instrument(level = "debug", skip_all, err)]
pub(crate) async fn get_model_storage_directory_handler(
chat_manager: AFPluginState<Weak<ChatManager>>,
) -> DataResult<LocalModelStoragePB, FlowyError> {
let chat_manager = upgrade_chat_manager(chat_manager)?;
let file_path = chat_manager
.local_ai_controller
.get_model_storage_directory()?;
data_result_ok(LocalModelStoragePB { file_path })
}

View File

@ -11,7 +11,7 @@ use crate::event_handler::*;
pub fn init(chat_manager: Weak<ChatManager>) -> AFPlugin { pub fn init(chat_manager: Weak<ChatManager>) -> AFPlugin {
let user_service = Arc::downgrade(&chat_manager.upgrade().unwrap().user_service); let user_service = Arc::downgrade(&chat_manager.upgrade().unwrap().user_service);
let cloud_service = Arc::downgrade(&chat_manager.upgrade().unwrap().chat_service_wm); let cloud_service = Arc::downgrade(&chat_manager.upgrade().unwrap().cloud_service_wm);
let ai_tools = Arc::new(AITools::new(cloud_service, user_service)); let ai_tools = Arc::new(AITools::new(cloud_service, user_service));
AFPlugin::new() AFPlugin::new()
.name("Flowy-Chat") .name("Flowy-Chat")
@ -53,6 +53,10 @@ pub fn init(chat_manager: Weak<ChatManager>) -> AFPlugin {
ChatEvent::ToggleChatWithFile, ChatEvent::ToggleChatWithFile,
toggle_local_ai_chat_file_handler, toggle_local_ai_chat_file_handler,
) )
.event(
ChatEvent::GetModelStorageDirectory,
get_model_storage_directory_handler,
)
} }
#[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)] #[derive(Clone, Copy, PartialEq, Eq, Debug, Display, Hash, ProtoBuf_Enum, Flowy_Event)]
@ -126,4 +130,7 @@ pub enum ChatEvent {
#[event()] #[event()]
ToggleChatWithFile = 20, ToggleChatWithFile = 20,
#[event(output = "LocalModelStoragePB")]
GetModelStorageDirectory = 21,
} }

View File

@ -1,7 +1,5 @@
use crate::chat_manager::ChatUserService; use crate::chat_manager::ChatUserService;
use crate::entities::{ use crate::entities::{LocalAIPluginStatePB, LocalModelResourcePB, RunningStatePB};
ChatStatePB, LocalAIPluginStatePB, LocalModelResourcePB, ModelTypePB, RunningStatePB,
};
use crate::local_ai::local_llm_resource::{LLMResourceController, LLMResourceService}; use crate::local_ai::local_llm_resource::{LLMResourceController, LLMResourceService};
use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY}; use crate::notification::{make_notification, ChatNotification, APPFLOWY_AI_NOTIFICATION_KEY};
use anyhow::Error; use anyhow::Error;
@ -100,7 +98,7 @@ impl LocalAIController {
tokio::spawn(async move { tokio::spawn(async move {
while rx.recv().await.is_some() { while rx.recv().await.is_some() {
if let Ok(chat_config) = cloned_llm_res.get_chat_config(rag_enabled) { if let Ok(chat_config) = cloned_llm_res.get_chat_config(rag_enabled) {
if let Err(err) = initialize_chat_plugin(&cloned_llm_chat, chat_config) { if let Err(err) = initialize_chat_plugin(&cloned_llm_chat, chat_config, None) {
error!("[AI Plugin] failed to setup plugin: {:?}", err); error!("[AI Plugin] failed to setup plugin: {:?}", err);
} }
} }
@ -113,79 +111,49 @@ impl LocalAIController {
self.llm_res.refresh_llm_resource().await self.llm_res.refresh_llm_resource().await
} }
pub fn initialize_chat_plugin( pub fn initialize_ai_plugin(
&self, &self,
ret: Option<tokio::sync::oneshot::Sender<()>>, ret: Option<tokio::sync::oneshot::Sender<()>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let mut chat_config = self.llm_res.get_chat_config(self.is_rag_enabled())?; let chat_config = self.llm_res.get_chat_config(self.is_rag_enabled())?;
let llm_chat = self.llm_chat.clone(); initialize_chat_plugin(&self.llm_chat, chat_config, ret)?;
tokio::spawn(async move {
trace!("[AI Plugin] config: {:?}", chat_config);
if is_apple_silicon().await.unwrap_or(false) {
chat_config = chat_config.with_device("gpu");
}
match llm_chat.init_chat_plugin(chat_config).await {
Ok(_) => {
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: true,
})
.send();
},
Err(err) => {
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: false,
})
.send();
error!("[AI Plugin] failed to setup plugin: {:?}", err);
},
}
if let Some(ret) = ret {
let _ = ret.send(());
}
});
Ok(()) Ok(())
} }
/// Returns true if the local AI is enabled and ready to use. /// Returns true if the local AI is enabled and ready to use.
pub fn can_init(&self) -> bool { pub fn can_init_plugin(&self) -> bool {
self.is_enabled() && self.llm_res.is_resource_ready() self.is_enabled() && self.llm_res.is_resource_ready()
} }
/// Indicate whether the local AI plugin is running.
pub fn is_running(&self) -> bool { pub fn is_running(&self) -> bool {
self.llm_chat.get_plugin_running_state().is_ready() self.llm_chat.get_plugin_running_state().is_ready()
} }
/// Indicate whether the local AI is enabled.
pub fn is_enabled(&self) -> bool { pub fn is_enabled(&self) -> bool {
self.store_preferences.get_bool(APPFLOWY_LOCAL_AI_ENABLED) self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_ENABLED)
.unwrap_or(true)
} }
/// Indicate whether the local AI chat is enabled. In the future, we can support multiple
/// AI plugin.
pub fn is_chat_enabled(&self) -> bool { pub fn is_chat_enabled(&self) -> bool {
self self
.store_preferences .store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED) .get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED)
.unwrap_or(true)
} }
pub fn is_rag_enabled(&self) -> bool { pub fn is_rag_enabled(&self) -> bool {
self self
.store_preferences .store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED) .get_bool_or_default(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED)
} }
pub fn open_chat(&self, chat_id: &str) { pub fn open_chat(&self, chat_id: &str) {
if !self.is_chat_enabled() {
return;
}
if !self.is_running() { if !self.is_running() {
return; return;
} }
@ -234,7 +202,7 @@ impl LocalAIController {
let state = self.llm_res.use_local_llm(llm_id)?; let state = self.llm_res.use_local_llm(llm_id)?;
// Re-initialize the plugin if the setting is updated and ready to use // Re-initialize the plugin if the setting is updated and ready to use
if self.llm_res.is_resource_ready() { if self.llm_res.is_resource_ready() {
self.initialize_chat_plugin(None)?; self.initialize_ai_plugin(None)?;
} }
Ok(state) Ok(state)
} }
@ -270,14 +238,24 @@ impl LocalAIController {
pub fn restart_chat_plugin(&self) { pub fn restart_chat_plugin(&self) {
let rag_enabled = self.is_rag_enabled(); let rag_enabled = self.is_rag_enabled();
if let Ok(chat_config) = self.llm_res.get_chat_config(rag_enabled) { if let Ok(chat_config) = self.llm_res.get_chat_config(rag_enabled) {
if let Err(err) = initialize_chat_plugin(&self.llm_chat, chat_config) { if let Err(err) = initialize_chat_plugin(&self.llm_chat, chat_config, None) {
error!("[AI Plugin] failed to setup plugin: {:?}", err); error!("[AI Plugin] failed to setup plugin: {:?}", err);
} }
} }
} }
pub fn get_model_storage_directory(&self) -> FlowyResult<String> {
self
.llm_res
.user_model_folder()
.map(|path| path.to_string_lossy().to_string())
}
pub async fn toggle_local_ai(&self) -> FlowyResult<bool> { pub async fn toggle_local_ai(&self) -> FlowyResult<bool> {
let enabled = !self.store_preferences.get_bool(APPFLOWY_LOCAL_AI_ENABLED); let enabled = !self
.store_preferences
.get_bool(APPFLOWY_LOCAL_AI_ENABLED)
.unwrap_or(true);
self self
.store_preferences .store_preferences
.set_bool(APPFLOWY_LOCAL_AI_ENABLED, enabled)?; .set_bool(APPFLOWY_LOCAL_AI_ENABLED, enabled)?;
@ -287,7 +265,7 @@ impl LocalAIController {
if enabled { if enabled {
let chat_enabled = self let chat_enabled = self
.store_preferences .store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED); .get_bool_or_default(APPFLOWY_LOCAL_AI_CHAT_ENABLED);
self.enable_chat_plugin(chat_enabled).await?; self.enable_chat_plugin(chat_enabled).await?;
} else { } else {
self.enable_chat_plugin(false).await?; self.enable_chat_plugin(false).await?;
@ -298,7 +276,8 @@ impl LocalAIController {
pub async fn toggle_local_ai_chat(&self) -> FlowyResult<bool> { pub async fn toggle_local_ai_chat(&self) -> FlowyResult<bool> {
let enabled = !self let enabled = !self
.store_preferences .store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED); .get_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED)
.unwrap_or(true);
self self
.store_preferences .store_preferences
.set_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED, enabled)?; .set_bool(APPFLOWY_LOCAL_AI_CHAT_ENABLED, enabled)?;
@ -310,7 +289,7 @@ impl LocalAIController {
pub async fn toggle_local_ai_chat_rag(&self) -> FlowyResult<bool> { pub async fn toggle_local_ai_chat_rag(&self) -> FlowyResult<bool> {
let enabled = !self let enabled = !self
.store_preferences .store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED); .get_bool_or_default(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED);
self self
.store_preferences .store_preferences
.set_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED, enabled)?; .set_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED, enabled)?;
@ -320,7 +299,7 @@ impl LocalAIController {
async fn enable_chat_plugin(&self, enabled: bool) -> FlowyResult<()> { async fn enable_chat_plugin(&self, enabled: bool) -> FlowyResult<()> {
if enabled { if enabled {
let (tx, rx) = tokio::sync::oneshot::channel(); let (tx, rx) = tokio::sync::oneshot::channel();
if let Err(err) = self.initialize_chat_plugin(Some(tx)) { if let Err(err) = self.initialize_ai_plugin(Some(tx)) {
error!("[AI Plugin] failed to initialize local ai: {:?}", err); error!("[AI Plugin] failed to initialize local ai: {:?}", err);
} }
let _ = rx.await; let _ = rx.await;
@ -334,6 +313,7 @@ impl LocalAIController {
fn initialize_chat_plugin( fn initialize_chat_plugin(
llm_chat: &Arc<LocalChatLLMChat>, llm_chat: &Arc<LocalChatLLMChat>,
mut chat_config: AIPluginConfig, mut chat_config: AIPluginConfig,
ret: Option<tokio::sync::oneshot::Sender<()>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let llm_chat = llm_chat.clone(); let llm_chat = llm_chat.clone();
tokio::spawn(async move { tokio::spawn(async move {
@ -342,29 +322,12 @@ fn initialize_chat_plugin(
chat_config = chat_config.with_device("gpu"); chat_config = chat_config.with_device("gpu");
} }
match llm_chat.init_chat_plugin(chat_config).await { match llm_chat.init_chat_plugin(chat_config).await {
Ok(_) => { Ok(_) => {},
make_notification( Err(err) => error!("[AI Plugin] failed to setup plugin: {:?}", err),
APPFLOWY_AI_NOTIFICATION_KEY, }
ChatNotification::UpdateChatPluginState,
) if let Some(ret) = ret {
.payload(ChatStatePB { let _ = ret.send(());
model_type: ModelTypePB::LocalAI,
available: true,
})
.send();
},
Err(err) => {
make_notification(
APPFLOWY_AI_NOTIFICATION_KEY,
ChatNotification::UpdateChatPluginState,
)
.payload(ChatStatePB {
model_type: ModelTypePB::LocalAI,
available: false,
})
.send();
error!("[AI Plugin] failed to setup plugin: {:?}", err);
},
} }
}); });
Ok(()) Ok(())
@ -402,6 +365,6 @@ impl LLMResourceService for LLMResourceServiceImpl {
fn is_rag_enabled(&self) -> bool { fn is_rag_enabled(&self) -> bool {
self self
.store_preferences .store_preferences
.get_bool(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED) .get_bool_or_default(APPFLOWY_LOCAL_AI_CHAT_RAG_ENABLED)
} }
} }

View File

@ -478,7 +478,7 @@ impl LLMResourceController {
self.resource_dir().map(|dir| dir.join(PLUGIN_DIR)) self.resource_dir().map(|dir| dir.join(PLUGIN_DIR))
} }
fn user_model_folder(&self) -> FlowyResult<PathBuf> { pub(crate) fn user_model_folder(&self) -> FlowyResult<PathBuf> {
self.resource_dir().map(|dir| dir.join(LLM_MODEL_DIR)) self.resource_dir().map(|dir| dir.join(LLM_MODEL_DIR))
} }

View File

@ -7,7 +7,7 @@ use appflowy_plugin::error::PluginError;
use flowy_chat_pub::cloud::{ use flowy_chat_pub::cloud::{
ChatCloudService, ChatMessage, ChatMessageType, CompletionType, LocalAIConfig, MessageCursor, ChatCloudService, ChatMessage, ChatMessageType, CompletionType, LocalAIConfig, MessageCursor,
RepeatedChatMessage, RepeatedRelatedQuestion, StreamAnswer, StreamComplete, RelatedQuestion, RepeatedChatMessage, RepeatedRelatedQuestion, StreamAnswer, StreamComplete,
}; };
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use futures::{stream, StreamExt, TryStreamExt}; use futures::{stream, StreamExt, TryStreamExt};
@ -17,13 +17,13 @@ use lib_infra::future::FutureResult;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
pub struct ChatServiceMiddleware { pub struct CloudServiceMiddleware {
pub cloud_service: Arc<dyn ChatCloudService>, cloud_service: Arc<dyn ChatCloudService>,
user_service: Arc<dyn ChatUserService>, user_service: Arc<dyn ChatUserService>,
local_llm_controller: Arc<LocalAIController>, local_llm_controller: Arc<LocalAIController>,
} }
impl ChatServiceMiddleware { impl CloudServiceMiddleware {
pub fn new( pub fn new(
user_service: Arc<dyn ChatUserService>, user_service: Arc<dyn ChatUserService>,
cloud_service: Arc<dyn ChatCloudService>, cloud_service: Arc<dyn ChatCloudService>,
@ -67,7 +67,7 @@ impl ChatServiceMiddleware {
} }
#[async_trait] #[async_trait]
impl ChatCloudService for ChatServiceMiddleware { impl ChatCloudService for CloudServiceMiddleware {
fn create_chat( fn create_chat(
&self, &self,
uid: &i64, uid: &i64,
@ -177,23 +177,34 @@ impl ChatCloudService for ChatServiceMiddleware {
.get_chat_messages(workspace_id, chat_id, offset, limit) .get_chat_messages(workspace_id, chat_id, offset, limit)
} }
fn get_related_message( async fn get_related_message(
&self, &self,
workspace_id: &str, workspace_id: &str,
chat_id: &str, chat_id: &str,
message_id: i64, message_id: i64,
) -> FutureResult<RepeatedRelatedQuestion, FlowyError> { ) -> Result<RepeatedRelatedQuestion, FlowyError> {
if self.local_llm_controller.is_running() { if self.local_llm_controller.is_running() {
FutureResult::new(async move { let questions = self
.local_llm_controller
.get_related_question(chat_id)
.await
.map_err(|err| FlowyError::local_ai().with_context(err))?
.into_iter()
.map(|content| RelatedQuestion {
content,
metadata: None,
})
.collect::<Vec<_>>();
Ok(RepeatedRelatedQuestion { Ok(RepeatedRelatedQuestion {
message_id, message_id,
items: vec![], items: questions,
})
}) })
} else { } else {
self self
.cloud_service .cloud_service
.get_related_message(workspace_id, chat_id, message_id) .get_related_message(workspace_id, chat_id, message_id)
.await
} }
} }
@ -204,9 +215,21 @@ impl ChatCloudService for ChatServiceMiddleware {
complete_type: CompletionType, complete_type: CompletionType,
) -> Result<StreamComplete, FlowyError> { ) -> Result<StreamComplete, FlowyError> {
if self.local_llm_controller.is_running() { if self.local_llm_controller.is_running() {
return Err( match self
FlowyError::not_support().with_context("completion with local ai is not supported yet"), .local_llm_controller
); .complete_text(text, complete_type as u8)
.await
{
Ok(stream) => Ok(
stream
.map_err(|err| FlowyError::local_ai().with_context(err))
.boxed(),
),
Err(err) => {
self.handle_plugin_error(err);
Ok(stream::once(async { Err(FlowyError::local_ai_unavailable()) }).boxed())
},
}
} else { } else {
self self
.cloud_service .cloud_service

View File

@ -1 +1 @@
pub mod chat_service_mw; pub(crate) mod chat_service_mw;

View File

@ -11,7 +11,6 @@ use lib_infra::isolate_stream::IsolateSink;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use tokio::select; use tokio::select;
use tracing::trace;
pub struct AITools { pub struct AITools {
tasks: Arc<DashMap<String, tokio::sync::mpsc::Sender<()>>>, tasks: Arc<DashMap<String, tokio::sync::mpsc::Sender<()>>>,
@ -109,11 +108,10 @@ impl ToolTask {
match result { match result {
Some(Ok(data)) => { Some(Ok(data)) => {
let s = String::from_utf8(data.to_vec()).unwrap_or_default(); let s = String::from_utf8(data.to_vec()).unwrap_or_default();
trace!("stream completion data: {}", s);
let _ = sink.send(format!("data:{}", s)).await; let _ = sink.send(format!("data:{}", s)).await;
}, },
Some(Err(error)) => { Some(Err(error)) => {
handle_error(&mut sink, FlowyError::from(error)).await; handle_error(&mut sink, error).await;
return; return;
}, },
None => { None => {

View File

@ -685,21 +685,17 @@ impl ChatCloudService for ServerProvider {
}) })
} }
fn get_related_message( async fn get_related_message(
&self, &self,
workspace_id: &str, workspace_id: &str,
chat_id: &str, chat_id: &str,
message_id: i64, message_id: i64,
) -> FutureResult<RepeatedRelatedQuestion, FlowyError> { ) -> Result<RepeatedRelatedQuestion, FlowyError> {
let workspace_id = workspace_id.to_string(); self
let chat_id = chat_id.to_string(); .get_server()?
let server = self.get_server();
FutureResult::new(async move {
server?
.chat_service() .chat_service()
.get_related_message(&workspace_id, &chat_id, message_id) .get_related_message(workspace_id, chat_id, message_id)
.await .await
})
} }
async fn generate_answer( async fn generate_answer(

View File

@ -146,24 +146,19 @@ where
}) })
} }
fn get_related_message( async fn get_related_message(
&self, &self,
workspace_id: &str, workspace_id: &str,
chat_id: &str, chat_id: &str,
message_id: i64, message_id: i64,
) -> FutureResult<RepeatedRelatedQuestion, FlowyError> { ) -> Result<RepeatedRelatedQuestion, FlowyError> {
let workspace_id = workspace_id.to_string();
let chat_id = chat_id.to_string();
let try_get_client = self.inner.try_get_client(); let try_get_client = self.inner.try_get_client();
FutureResult::new(async move {
let resp = try_get_client? let resp = try_get_client?
.get_chat_related_question(&workspace_id, &chat_id, message_id) .get_chat_related_question(workspace_id, chat_id, message_id)
.await .await
.map_err(FlowyError::from)?; .map_err(FlowyError::from)?;
Ok(resp) Ok(resp)
})
} }
async fn stream_complete( async fn stream_complete(
@ -181,7 +176,8 @@ where
.try_get_client()? .try_get_client()?
.stream_completion_text(workspace_id, params) .stream_completion_text(workspace_id, params)
.await .await
.map_err(FlowyError::from)?; .map_err(FlowyError::from)?
.map_err(FlowyError::from);
Ok(stream.boxed()) Ok(stream.boxed())
} }

View File

@ -66,15 +66,13 @@ impl ChatCloudService for DefaultChatCloudServiceImpl {
}) })
} }
fn get_related_message( async fn get_related_message(
&self, &self,
_workspace_id: &str, _workspace_id: &str,
_chat_id: &str, _chat_id: &str,
_message_id: i64, _message_id: i64,
) -> FutureResult<RepeatedRelatedQuestion, FlowyError> { ) -> Result<RepeatedRelatedQuestion, FlowyError> {
FutureResult::new(async move {
Err(FlowyError::not_support().with_context("Chat is not supported in local server.")) Err(FlowyError::not_support().with_context("Chat is not supported in local server."))
})
} }
async fn generate_answer( async fn generate_answer(

View File

@ -63,7 +63,7 @@ impl KVStorePreferences {
} }
/// Get a bool value of a key /// Get a bool value of a key
pub fn get_bool(&self, key: &str) -> bool { pub fn get_bool_or_default(&self, key: &str) -> bool {
self self
.get_key_value(key) .get_key_value(key)
.and_then(|kv| kv.value) .and_then(|kv| kv.value)
@ -71,6 +71,13 @@ impl KVStorePreferences {
.unwrap_or(false) .unwrap_or(false)
} }
pub fn get_bool(&self, key: &str) -> Option<bool> {
self
.get_key_value(key)
.and_then(|kv| kv.value)
.and_then(|v| v.parse::<bool>().ok())
}
/// Get a i64 value of a key /// Get a i64 value of a key
pub fn get_i64(&self, key: &str) -> Option<i64> { pub fn get_i64(&self, key: &str) -> Option<i64> {
self self
@ -157,8 +164,8 @@ mod tests {
assert_eq!(store.get_str("2"), None); assert_eq!(store.get_str("2"), None);
store.set_bool("1", true).unwrap(); store.set_bool("1", true).unwrap();
assert!(store.get_bool("1")); assert!(store.get_bool_or_default("1"));
assert!(!store.get_bool("2")); assert!(!store.get_bool_or_default("2"));
store.set_i64("1", 1).unwrap(); store.set_i64("1", 1).unwrap();
assert_eq!(store.get_i64("1").unwrap(), 1); assert_eq!(store.get_i64("1").unwrap(), 1);

View File

@ -10,7 +10,7 @@ pub fn migrate_session_with_user_uuid(
session_cache_key: &str, session_cache_key: &str,
store_preferences: &Arc<KVStorePreferences>, store_preferences: &Arc<KVStorePreferences>,
) -> Option<Session> { ) -> Option<Session> {
if !store_preferences.get_bool(MIGRATION_USER_NO_USER_UUID) if !store_preferences.get_bool_or_default(MIGRATION_USER_NO_USER_UUID)
&& store_preferences && store_preferences
.set_bool(MIGRATION_USER_NO_USER_UUID, true) .set_bool(MIGRATION_USER_NO_USER_UUID, true)
.is_ok() .is_ok()

View File

@ -40,7 +40,10 @@ impl AuthenticateUser {
} }
pub fn vacuum_database_if_need(&self) { pub fn vacuum_database_if_need(&self) {
if !self.store_preferences.get_bool(SQLITE_VACUUM_042) { if !self
.store_preferences
.get_bool_or_default(SQLITE_VACUUM_042)
{
if let Ok(session) = self.get_session() { if let Ok(session) = self.get_session() {
let _ = self.store_preferences.set_bool(SQLITE_VACUUM_042, true); let _ = self.store_preferences.set_bool(SQLITE_VACUUM_042, true);
if let Ok(conn) = self.database.get_connection(session.user_id) { if let Ok(conn) = self.database.get_connection(session.user_id) {