mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
chore: update commit id
This commit is contained in:
parent
ffc75106f3
commit
a9b43b6454
@ -119,7 +119,6 @@ class ChatBloc extends Bloc<ChatEvent, ChatState> {
|
||||
final uniqueMessages = {...allMessages, ...messages}.toList()
|
||||
..sort((a, b) => b.id.compareTo(a.id));
|
||||
uniqueMessages.insertAll(0, onetimeMessages);
|
||||
|
||||
emit(
|
||||
state.copyWith(
|
||||
messages: uniqueMessages,
|
||||
@ -380,7 +379,8 @@ class ChatBloc extends Bloc<ChatEvent, ChatState> {
|
||||
}
|
||||
|
||||
Message _createStreamMessage(AnswerStream stream, Int64 questionMessageId) {
|
||||
final streamMessageId = nanoid();
|
||||
final streamMessageId = (questionMessageId + 1).toString();
|
||||
|
||||
lastStreamMessageId = streamMessageId;
|
||||
|
||||
return TextMessage(
|
||||
|
8
frontend/rust-lib/Cargo.lock
generated
8
frontend/rust-lib/Cargo.lock
generated
@ -195,9 +195,9 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "appflowy-local-ai-chat"
|
||||
name = "appflowy-local-ai"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=b7f51a3f#b7f51a3fe79142582d89c4e577ccd36957cc2c00"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8bb364#8bb364a97fe1c3eec5c5092dd8208883a4e6186b"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"appflowy-plugin",
|
||||
@ -212,7 +212,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "appflowy-plugin"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=b7f51a3f#b7f51a3fe79142582d89c4e577ccd36957cc2c00"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-LocalAI?rev=8bb364#8bb364a97fe1c3eec5c5092dd8208883a4e6186b"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
@ -1701,7 +1701,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"allo-isolate",
|
||||
"anyhow",
|
||||
"appflowy-local-ai-chat",
|
||||
"appflowy-local-ai",
|
||||
"appflowy-plugin",
|
||||
"bytes",
|
||||
"dashmap",
|
||||
|
@ -145,5 +145,5 @@ collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFl
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "3a58d95" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "3a58d95" }
|
||||
|
||||
appflowy-local-ai-chat = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "b7f51a3f" }
|
||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "b7f51a3f" }
|
||||
appflowy-local-ai = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8bb364" }
|
||||
appflowy-plugin = { version = "0.1", git = "https://github.com/AppFlowy-IO/AppFlowy-LocalAI", rev = "8bb364" }
|
||||
|
@ -33,7 +33,7 @@ serde_json = { workspace = true }
|
||||
anyhow = "1.0.86"
|
||||
tokio-stream = "0.1.15"
|
||||
parking_lot.workspace = true
|
||||
appflowy-local-ai-chat = { version = "0.1.0", features = ["verbose"] }
|
||||
appflowy-local-ai = { version = "0.1.0", features = ["verbose"] }
|
||||
appflowy-plugin = { version = "0.1.0", features = ["verbose"] }
|
||||
|
||||
[dev-dependencies]
|
||||
|
@ -1,8 +1,8 @@
|
||||
use crate::chat_manager::ChatUserService;
|
||||
use crate::chat_service_impl::ChatService;
|
||||
use crate::entities::{
|
||||
ChatMessageErrorPB, ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB,
|
||||
};
|
||||
use crate::middleware::chat_service_mw::ChatService;
|
||||
use crate::notification::{send_notification, ChatNotification};
|
||||
use crate::persistence::{insert_chat_messages, select_chat_messages, ChatMessageTable};
|
||||
use allo_isolate::Isolate;
|
||||
|
@ -1,9 +1,9 @@
|
||||
use crate::chat::Chat;
|
||||
use crate::chat_service_impl::ChatService;
|
||||
use crate::entities::{ChatMessageListPB, ChatMessagePB, RepeatedRelatedQuestionPB};
|
||||
use crate::middleware::chat_service_mw::ChatService;
|
||||
use crate::persistence::{insert_chat, ChatTable};
|
||||
use appflowy_local_ai_chat::llm_chat::{LocalChatLLMChat, LocalLLMSetting};
|
||||
use appflowy_plugin::manager::SidecarManager;
|
||||
use appflowy_local_ai::llm_chat::{LocalChatLLMChat, LocalLLMSetting};
|
||||
use appflowy_plugin::manager::PluginManager;
|
||||
use dashmap::DashMap;
|
||||
use flowy_chat_pub::cloud::{ChatCloudService, ChatMessageType};
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
@ -38,7 +38,7 @@ impl ChatManager {
|
||||
let local_ai_setting = store_preferences
|
||||
.get_object::<LocalLLMSetting>(LOCAL_AI_SETTING_KEY)
|
||||
.unwrap_or_default();
|
||||
let sidecar_manager = Arc::new(SidecarManager::new());
|
||||
let sidecar_manager = Arc::new(PluginManager::new());
|
||||
|
||||
// setup local AI chat plugin
|
||||
let local_llm_ctrl = Arc::new(LocalChatLLMChat::new(sidecar_manager));
|
||||
|
@ -1,4 +1,4 @@
|
||||
use appflowy_local_ai_chat::llm_chat::LocalLLMSetting;
|
||||
use appflowy_local_ai::llm_chat::LocalLLMSetting;
|
||||
use flowy_chat_pub::cloud::{
|
||||
ChatMessage, RelatedQuestion, RepeatedChatMessage, RepeatedRelatedQuestion,
|
||||
};
|
||||
@ -267,3 +267,19 @@ pub enum CompletionTypePB {
|
||||
MakeLonger = 4,
|
||||
ContinueWriting = 5,
|
||||
}
|
||||
|
||||
#[derive(Default, ProtoBuf, Clone, Debug)]
|
||||
pub struct ChatStatePB {
|
||||
#[pb(index = 1)]
|
||||
pub model_type: ModelTypePB,
|
||||
|
||||
#[pb(index = 2)]
|
||||
pub available: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, ProtoBuf_Enum, Default)]
|
||||
pub enum ModelTypePB {
|
||||
LocalAI = 0,
|
||||
#[default]
|
||||
RemoteAI = 1,
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ pub mod event_map;
|
||||
|
||||
mod chat;
|
||||
pub mod chat_manager;
|
||||
mod chat_service_impl;
|
||||
pub mod entities;
|
||||
mod middleware;
|
||||
pub mod notification;
|
||||
mod persistence;
|
||||
mod protobuf;
|
||||
|
@ -1,12 +1,16 @@
|
||||
use crate::chat_manager::ChatUserService;
|
||||
use crate::entities::{ChatStatePB, ModelTypePB};
|
||||
use crate::notification::{send_notification, ChatNotification};
|
||||
use crate::persistence::select_single_message;
|
||||
use appflowy_local_ai_chat::llm_chat::{LocalChatLLMChat, LocalLLMSetting};
|
||||
use appflowy_local_ai::llm_chat::{LocalChatLLMChat, LocalLLMSetting};
|
||||
use appflowy_plugin::error::PluginError;
|
||||
use appflowy_plugin::util::is_apple_silicon;
|
||||
use flowy_chat_pub::cloud::{
|
||||
ChatCloudService, ChatMessage, ChatMessageType, CompletionType, MessageCursor,
|
||||
RepeatedChatMessage, RepeatedRelatedQuestion, StreamAnswer, StreamComplete,
|
||||
};
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use futures::{stream, StreamExt, TryStreamExt};
|
||||
use lib_infra::async_trait::async_trait;
|
||||
use lib_infra::future::FutureResult;
|
||||
use parking_lot::RwLock;
|
||||
@ -31,6 +35,13 @@ impl ChatService {
|
||||
setup_local_chat(&local_llm_setting, local_llm_ctrl.clone());
|
||||
}
|
||||
|
||||
let mut rx = local_llm_ctrl.subscribe_running_state();
|
||||
tokio::spawn(async move {
|
||||
while let Ok(state) = rx.recv().await {
|
||||
info!("[Chat Plugin] state: {:?}", state);
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
user_service,
|
||||
cloud_service,
|
||||
@ -92,6 +103,20 @@ impl ChatService {
|
||||
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
fn handle_plugin_error(&self, err: PluginError) {
|
||||
if matches!(
|
||||
err,
|
||||
PluginError::PluginNotConnected | PluginError::PeerDisconnect
|
||||
) {
|
||||
send_notification("appflowy_chat_plugin", ChatNotification::ChatStateUpdated).payload(
|
||||
ChatStatePB {
|
||||
model_type: ModelTypePB::LocalAI,
|
||||
available: false,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@ -137,12 +162,17 @@ impl ChatCloudService for ChatService {
|
||||
) -> Result<StreamAnswer, FlowyError> {
|
||||
if self.local_llm_setting.read().enabled {
|
||||
let content = self.get_message_content(message_id)?;
|
||||
let stream = self
|
||||
.local_llm_chat
|
||||
.ask_question(chat_id, &content)
|
||||
.await?
|
||||
.map_err(|err| FlowyError::local_ai().with_context(err));
|
||||
Ok(stream.boxed())
|
||||
match self.local_llm_chat.ask_question(chat_id, &content).await {
|
||||
Ok(stream) => Ok(
|
||||
stream
|
||||
.map_err(|err| FlowyError::local_ai().with_context(err))
|
||||
.boxed(),
|
||||
),
|
||||
Err(err) => {
|
||||
self.handle_plugin_error(err);
|
||||
Ok(stream::once(async { Err(FlowyError::local_ai_unavailable()) }).boxed())
|
||||
},
|
||||
}
|
||||
} else {
|
||||
self
|
||||
.cloud_service
|
||||
@ -159,11 +189,19 @@ impl ChatCloudService for ChatService {
|
||||
) -> Result<ChatMessage, FlowyError> {
|
||||
if self.local_llm_setting.read().enabled {
|
||||
let content = self.get_message_content(question_message_id)?;
|
||||
let _answer = self
|
||||
.local_llm_chat
|
||||
.generate_answer(chat_id, &content)
|
||||
match self.local_llm_chat.generate_answer(chat_id, &content).await {
|
||||
Ok(answer) => {
|
||||
let message = self
|
||||
.cloud_service
|
||||
.save_answer(workspace_id, chat_id, &answer, question_message_id)
|
||||
.await?;
|
||||
todo!()
|
||||
Ok(message)
|
||||
},
|
||||
Err(err) => {
|
||||
self.handle_plugin_error(err);
|
||||
Err(FlowyError::local_ai_unavailable())
|
||||
},
|
||||
}
|
||||
} else {
|
||||
self
|
||||
.cloud_service
|
||||
@ -223,12 +261,35 @@ impl ChatCloudService for ChatService {
|
||||
|
||||
fn setup_local_chat(local_llm_setting: &LocalLLMSetting, llm_chat_ctrl: Arc<LocalChatLLMChat>) {
|
||||
if local_llm_setting.enabled {
|
||||
if let Ok(config) = local_llm_setting.chat_config() {
|
||||
if let Ok(mut config) = local_llm_setting.chat_config() {
|
||||
tokio::spawn(async move {
|
||||
trace!("[Chat Plugin] setup local chat: {:?}", config);
|
||||
if is_apple_silicon().await.unwrap_or(false) {
|
||||
config = config.with_device("gpu");
|
||||
}
|
||||
|
||||
if let Err(err) = llm_chat_ctrl.init_chat_plugin(config).await {
|
||||
if cfg!(debug_assertions) {
|
||||
config = config.with_verbose(true);
|
||||
}
|
||||
|
||||
match llm_chat_ctrl.init_chat_plugin(config).await {
|
||||
Ok(_) => {
|
||||
send_notification("appflowy_chat_plugin", ChatNotification::ChatStateUpdated).payload(
|
||||
ChatStatePB {
|
||||
model_type: ModelTypePB::LocalAI,
|
||||
available: true,
|
||||
},
|
||||
);
|
||||
},
|
||||
Err(err) => {
|
||||
send_notification("appflowy_chat_plugin", ChatNotification::ChatStateUpdated).payload(
|
||||
ChatStatePB {
|
||||
model_type: ModelTypePB::LocalAI,
|
||||
available: false,
|
||||
},
|
||||
);
|
||||
error!("[Chat Plugin] failed to setup plugin: {:?}", err);
|
||||
},
|
||||
}
|
||||
});
|
||||
}
|
1
frontend/rust-lib/flowy-chat/src/middleware/mod.rs
Normal file
1
frontend/rust-lib/flowy-chat/src/middleware/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod chat_service_mw;
|
@ -12,6 +12,7 @@ pub enum ChatNotification {
|
||||
DidReceiveChatMessage = 3,
|
||||
StreamChatMessageError = 4,
|
||||
FinishStreaming = 5,
|
||||
ChatStateUpdated = 6,
|
||||
}
|
||||
|
||||
impl std::convert::From<ChatNotification> for i32 {
|
||||
@ -27,6 +28,7 @@ impl std::convert::From<i32> for ChatNotification {
|
||||
3 => ChatNotification::DidReceiveChatMessage,
|
||||
4 => ChatNotification::StreamChatMessageError,
|
||||
5 => ChatNotification::FinishStreaming,
|
||||
6 => ChatNotification::ChatStateUpdated,
|
||||
_ => ChatNotification::Unknown,
|
||||
}
|
||||
}
|
||||
|
@ -1,41 +0,0 @@
|
||||
use crate::util::LocalAITest;
|
||||
use tokio_stream::StreamExt;
|
||||
|
||||
#[tokio::test]
|
||||
async fn load_chat_model_test() {
|
||||
if let Ok(test) = LocalAITest::new() {
|
||||
let plugin_id = test.init_chat_plugin().await;
|
||||
let chat_id = uuid::Uuid::new_v4().to_string();
|
||||
let resp = test
|
||||
.send_chat_message(&chat_id, plugin_id, "hello world")
|
||||
.await;
|
||||
eprintln!("chat response: {:?}", resp);
|
||||
|
||||
let embedding_plugin_id = test.init_embedding_plugin().await;
|
||||
let score = test.calculate_similarity(embedding_plugin_id, &resp, "Hello! How can I help you today? Is there something specific you would like to know or discuss").await;
|
||||
assert!(score > 0.9, "score: {}", score);
|
||||
|
||||
// let questions = test.related_question(&chat_id, plugin_id).await;
|
||||
// assert_eq!(questions.len(), 3);
|
||||
// eprintln!("related questions: {:?}", questions);
|
||||
}
|
||||
}
|
||||
#[tokio::test]
|
||||
async fn stream_local_model_test() {
|
||||
if let Ok(test) = LocalAITest::new() {
|
||||
let plugin_id = test.init_chat_plugin().await;
|
||||
let chat_id = uuid::Uuid::new_v4().to_string();
|
||||
|
||||
let mut resp = test
|
||||
.stream_chat_message(&chat_id, plugin_id, "hello world")
|
||||
.await;
|
||||
let mut list = vec![];
|
||||
while let Some(s) = resp.next().await {
|
||||
list.push(String::from_utf8(s.unwrap().to_vec()).unwrap());
|
||||
}
|
||||
|
||||
let answer = list.join("");
|
||||
eprintln!("chat response: {:?}", answer);
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
|
||||
}
|
||||
}
|
@ -1,2 +0,0 @@
|
||||
pub mod chat_test;
|
||||
pub mod util;
|
@ -1,180 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use bytes::Bytes;
|
||||
use flowy_sidecar::manager::SidecarManager;
|
||||
use serde_json::json;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Once;
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
|
||||
use flowy_chat::local_ai::chat_plugin::ChatPluginOperation;
|
||||
use flowy_chat::local_ai::embedding_plugin::EmbeddingPluginOperation;
|
||||
use flowy_sidecar::core::plugin::{PluginId, PluginInfo};
|
||||
use flowy_sidecar::error::SidecarError;
|
||||
use simsimd::SpatialSimilarity;
|
||||
use std::f64;
|
||||
use tracing_subscriber::fmt::Subscriber;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::EnvFilter;
|
||||
|
||||
pub struct LocalAITest {
|
||||
config: LocalAIConfiguration,
|
||||
manager: SidecarManager,
|
||||
}
|
||||
|
||||
impl LocalAITest {
|
||||
pub fn new() -> Result<Self> {
|
||||
let config = LocalAIConfiguration::new()?;
|
||||
let manager = SidecarManager::new();
|
||||
|
||||
Ok(Self { config, manager })
|
||||
}
|
||||
pub async fn init_chat_plugin(&self) -> PluginId {
|
||||
let info = PluginInfo {
|
||||
name: "chat".to_string(),
|
||||
exec_path: self.config.chat_bin_path.clone(),
|
||||
};
|
||||
let plugin_id = self.manager.create_plugin(info).await.unwrap();
|
||||
self
|
||||
.manager
|
||||
.init_plugin(
|
||||
plugin_id,
|
||||
json!({
|
||||
"absolute_chat_model_path":self.config.chat_model_absolute_path(),
|
||||
}),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
plugin_id
|
||||
}
|
||||
|
||||
pub async fn init_embedding_plugin(&self) -> PluginId {
|
||||
let info = PluginInfo {
|
||||
name: "embedding".to_string(),
|
||||
exec_path: self.config.embedding_bin_path.clone(),
|
||||
};
|
||||
let plugin_id = self.manager.create_plugin(info).await.unwrap();
|
||||
let embedding_model_path = self.config.embedding_model_absolute_path();
|
||||
self
|
||||
.manager
|
||||
.init_plugin(
|
||||
plugin_id,
|
||||
json!({
|
||||
"absolute_model_path":embedding_model_path,
|
||||
}),
|
||||
)
|
||||
.unwrap();
|
||||
plugin_id
|
||||
}
|
||||
|
||||
pub async fn send_chat_message(
|
||||
&self,
|
||||
chat_id: &str,
|
||||
plugin_id: PluginId,
|
||||
message: &str,
|
||||
) -> String {
|
||||
let plugin = self.manager.get_plugin(plugin_id).await.unwrap();
|
||||
let operation = ChatPluginOperation::new(plugin);
|
||||
operation.send_message(chat_id, message).await.unwrap()
|
||||
}
|
||||
|
||||
pub async fn stream_chat_message(
|
||||
&self,
|
||||
chat_id: &str,
|
||||
plugin_id: PluginId,
|
||||
message: &str,
|
||||
) -> ReceiverStream<Result<Bytes, SidecarError>> {
|
||||
let plugin = self.manager.get_plugin(plugin_id).await.unwrap();
|
||||
let operation = ChatPluginOperation::new(plugin);
|
||||
operation.stream_message(chat_id, message).await.unwrap()
|
||||
}
|
||||
|
||||
pub async fn related_question(
|
||||
&self,
|
||||
chat_id: &str,
|
||||
plugin_id: PluginId,
|
||||
) -> Vec<serde_json::Value> {
|
||||
let plugin = self.manager.get_plugin(plugin_id).await.unwrap();
|
||||
let operation = ChatPluginOperation::new(plugin);
|
||||
operation.get_related_questions(chat_id).await.unwrap()
|
||||
}
|
||||
|
||||
pub async fn calculate_similarity(
|
||||
&self,
|
||||
plugin_id: PluginId,
|
||||
message1: &str,
|
||||
message2: &str,
|
||||
) -> f64 {
|
||||
let plugin = self.manager.get_plugin(plugin_id).await.unwrap();
|
||||
let operation = EmbeddingPluginOperation::new(plugin);
|
||||
let left = operation.get_embeddings(message1).await.unwrap();
|
||||
let right = operation.get_embeddings(message2).await.unwrap();
|
||||
|
||||
let actual_embedding_flat = flatten_vec(left);
|
||||
let expected_embedding_flat = flatten_vec(right);
|
||||
let distance = f64::cosine(&actual_embedding_flat, &expected_embedding_flat)
|
||||
.expect("Vectors must be of the same length");
|
||||
|
||||
distance.cos()
|
||||
}
|
||||
}
|
||||
|
||||
// Function to flatten Vec<Vec<f64>> into Vec<f64>
|
||||
fn flatten_vec(vec: Vec<Vec<f64>>) -> Vec<f64> {
|
||||
vec.into_iter().flatten().collect()
|
||||
}
|
||||
|
||||
pub struct LocalAIConfiguration {
|
||||
model_dir: String,
|
||||
chat_bin_path: PathBuf,
|
||||
chat_model_name: String,
|
||||
embedding_bin_path: PathBuf,
|
||||
embedding_model_name: String,
|
||||
}
|
||||
|
||||
impl LocalAIConfiguration {
|
||||
pub fn new() -> Result<Self> {
|
||||
dotenv::dotenv().ok();
|
||||
setup_log();
|
||||
|
||||
// load from .env
|
||||
let model_dir = dotenv::var("LOCAL_AI_MODEL_DIR")?;
|
||||
let chat_bin_path = PathBuf::from(dotenv::var("CHAT_BIN_PATH")?);
|
||||
let chat_model_name = dotenv::var("LOCAL_AI_CHAT_MODEL_NAME")?;
|
||||
|
||||
let embedding_bin_path = PathBuf::from(dotenv::var("EMBEDDING_BIN_PATH")?);
|
||||
let embedding_model_name = dotenv::var("LOCAL_AI_EMBEDDING_MODEL_NAME")?;
|
||||
|
||||
Ok(Self {
|
||||
model_dir,
|
||||
chat_bin_path,
|
||||
chat_model_name,
|
||||
embedding_bin_path,
|
||||
embedding_model_name,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn chat_model_absolute_path(&self) -> String {
|
||||
format!("{}/{}", self.model_dir, self.chat_model_name)
|
||||
}
|
||||
|
||||
pub fn embedding_model_absolute_path(&self) -> String {
|
||||
format!("{}/{}", self.model_dir, self.embedding_model_name)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setup_log() {
|
||||
static START: Once = Once::new();
|
||||
START.call_once(|| {
|
||||
let level = "trace";
|
||||
let mut filters = vec![];
|
||||
filters.push(format!("flowy_sidecar={}", level));
|
||||
std::env::set_var("RUST_LOG", filters.join(","));
|
||||
|
||||
let subscriber = Subscriber::builder()
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.with_line_number(true)
|
||||
.with_ansi(true)
|
||||
.finish();
|
||||
subscriber.try_init().unwrap();
|
||||
});
|
||||
}
|
@ -57,8 +57,8 @@ pub fn create_log_filter(
|
||||
filters.push(format!("lib_infra={}", level));
|
||||
filters.push(format!("flowy_search={}", level));
|
||||
filters.push(format!("flowy_chat={}", level));
|
||||
filters.push(format!("flowy_chat={}", level));
|
||||
filters.push(format!("flowy_sidecar={}", level));
|
||||
filters.push(format!("appflowy_local_ai={}", level));
|
||||
filters.push(format!("appflowy_plugin={}", level));
|
||||
filters.push(format!("flowy_ai={}", level));
|
||||
// Enable the frontend logs. DO NOT DISABLE.
|
||||
// These logs are essential for debugging and verifying frontend behavior.
|
||||
|
@ -283,6 +283,9 @@ pub enum ErrorCode {
|
||||
|
||||
#[error("Local AI error")]
|
||||
LocalAIError = 98,
|
||||
|
||||
#[error("Local AI unavailable")]
|
||||
LocalAIUnavailable = 99,
|
||||
}
|
||||
|
||||
impl ErrorCode {
|
||||
|
@ -119,6 +119,7 @@ impl FlowyError {
|
||||
);
|
||||
static_flowy_error!(workspace_data_not_match, ErrorCode::WorkspaceDataNotMatch);
|
||||
static_flowy_error!(local_ai, ErrorCode::LocalAIError);
|
||||
static_flowy_error!(local_ai_unavailable, ErrorCode::LocalAIUnavailable);
|
||||
}
|
||||
|
||||
impl std::convert::From<ErrorCode> for FlowyError {
|
||||
|
Loading…
Reference in New Issue
Block a user