chore: remove lru (#5008)

* chore: remove lru

* chore: update logs

* chore: clippy
This commit is contained in:
Nathan.fooo
2024-03-30 16:28:24 +08:00
committed by GitHub
parent c2c84a5812
commit adc2ee755e
33 changed files with 384 additions and 317 deletions

View File

@ -47,7 +47,6 @@ chrono-tz = "0.8.2"
csv = "1.1.6"
strum = "0.25"
strum_macros = "0.25"
lru.workspace = true
validator = { version = "0.16.0", features = ["derive"] }
[dev-dependencies]

View File

@ -14,7 +14,7 @@ pub fn init(database_manager: Weak<DatabaseManager>) -> AFPlugin {
.state(database_manager);
plugin
.event(DatabaseEvent::GetDatabase, get_database_data_handler)
.event(DatabaseEvent::OpenDatabase, get_database_data_handler)
.event(DatabaseEvent::GetDatabaseData, get_database_data_handler)
.event(DatabaseEvent::GetDatabaseId, get_database_id_handler)
.event(DatabaseEvent::GetDatabaseSetting, get_database_setting_handler)
.event(DatabaseEvent::UpdateDatabaseSetting, update_database_setting_handler)
@ -128,7 +128,7 @@ pub enum DatabaseEvent {
DeleteAllSorts = 6,
#[event(input = "DatabaseViewIdPB")]
OpenDatabase = 7,
GetDatabaseData = 7,
/// [GetFields] event is used to get the database's fields.
///

View File

@ -1,19 +1,17 @@
use anyhow::anyhow;
use std::collections::HashMap;
use std::num::NonZeroUsize;
use std::sync::{Arc, Weak};
use collab::core::collab::{DocStateSource, MutexCollab};
use collab_database::blocks::BlockEvent;
use collab_database::database::{get_inline_view_id, DatabaseData, MutexDatabase};
use collab_database::database::{DatabaseData, MutexDatabase};
use collab_database::error::DatabaseError;
use collab_database::user::{
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
use collab_database::workspace_database::{
CollabDocStateByOid, CollabFuture, DatabaseCollabService, DatabaseMeta, WorkspaceDatabase,
};
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
use collab_entity::CollabType;
use collab_plugins::local_storage::kv::KVTransactionDB;
use lru::LruCache;
use tokio::sync::{Mutex, RwLock};
use tracing::{event, instrument, trace};
@ -40,7 +38,7 @@ pub struct DatabaseManager {
user: Arc<dyn DatabaseUser>,
workspace_database: Arc<RwLock<Option<Arc<WorkspaceDatabase>>>>,
task_scheduler: Arc<RwLock<TaskDispatcher>>,
editors: Mutex<LruCache<String, Arc<DatabaseEditor>>>,
editors: Mutex<HashMap<String, Arc<DatabaseEditor>>>,
collab_builder: Arc<AppFlowyCollabBuilder>,
cloud_service: Arc<dyn DatabaseCloudService>,
}
@ -52,12 +50,11 @@ impl DatabaseManager {
collab_builder: Arc<AppFlowyCollabBuilder>,
cloud_service: Arc<dyn DatabaseCloudService>,
) -> Self {
let editors = Mutex::new(LruCache::new(NonZeroUsize::new(5).unwrap()));
Self {
user: database_user,
workspace_database: Default::default(),
task_scheduler,
editors,
editors: Default::default(),
collab_builder,
cloud_service,
}
@ -84,7 +81,7 @@ impl DatabaseManager {
self.task_scheduler.write().await.clear_task();
// 2. Release all existing editors
for (_, editor) in self.editors.lock().await.iter() {
editor.close().await;
editor.close_all_views().await;
}
self.editors.lock().await.clear();
// 3. Clear the workspace database
@ -111,8 +108,13 @@ impl DatabaseManager {
)
.await
{
Ok(doc_state) => {
workspace_database_doc_state = DocStateSource::FromDocState(doc_state);
Ok(doc_state) => match doc_state {
Some(doc_state) => {
workspace_database_doc_state = DocStateSource::FromDocState(doc_state);
},
None => {
workspace_database_doc_state = DocStateSource::FromDisk;
},
},
Err(err) => {
return Err(FlowyError::record_not_found().with_context(format!(
@ -136,7 +138,7 @@ impl DatabaseManager {
collab_db.clone(),
workspace_database_doc_state,
config.clone(),
);
)?;
let workspace_database =
WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder);
*self.workspace_database.write().await = Some(Arc::new(workspace_database));
@ -163,17 +165,12 @@ impl DatabaseManager {
pub async fn get_database_inline_view_id(&self, database_id: &str) -> FlowyResult<String> {
let wdb = self.get_workspace_database().await?;
let database_collab = wdb.get_database_collab(database_id).await.ok_or_else(|| {
let database_collab = wdb.get_database(database_id).await.ok_or_else(|| {
FlowyError::record_not_found().with_context(format!("The database:{} not found", database_id))
})?;
let inline_view_id = get_inline_view_id(&database_collab.lock()).ok_or_else(|| {
FlowyError::record_not_found().with_context(format!(
"Can't find the inline view for database:{}",
database_id
))
})?;
Ok(inline_view_id)
let lock_guard = database_collab.lock();
Ok(lock_guard.get_inline_view_id())
}
pub async fn get_all_databases_meta(&self) -> Vec<DatabaseMeta> {
@ -218,7 +215,7 @@ impl DatabaseManager {
}
pub async fn open_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
trace!("create new editor for database {}", database_id);
trace!("open database editor:{}", database_id);
let database = self
.get_workspace_database()
.await?
@ -234,18 +231,35 @@ impl DatabaseManager {
.editors
.lock()
.await
.put(database_id.to_string(), editor.clone());
.insert(database_id.to_string(), editor.clone());
Ok(editor)
}
pub async fn open_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let wdb = self.get_workspace_database().await?;
if let Some(database_id) = wdb.get_database_id_with_view_id(view_id) {
wdb.open_database(&database_id);
}
Ok(())
}
pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let wdb = self.get_workspace_database().await?;
let database_id = wdb.get_database_id_with_view_id(view_id);
if let Some(database_id) = database_id {
let mut editors = self.editors.lock().await;
let mut should_remove = false;
if let Some(editor) = editors.get(&database_id) {
editor.close_view(view_id).await;
should_remove = editor.num_views().await == 0;
}
if should_remove {
trace!("remove database editor:{}", database_id);
editors.remove(&database_id);
wdb.close_database(&database_id);
}
}
@ -428,15 +442,15 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
let weak_cloud_service = Arc::downgrade(&self.cloud_service);
Box::pin(async move {
match weak_cloud_service.upgrade() {
None => {
tracing::warn!("Cloud service is dropped");
Ok(DocStateSource::FromDocState(vec![]))
},
None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))),
Some(cloud_service) => {
let doc_state = cloud_service
.get_database_object_doc_state(&object_id, object_ty, &workspace_id)
.await?;
Ok(DocStateSource::FromDocState(doc_state))
match doc_state {
None => Ok(DocStateSource::FromDisk),
Some(doc_state) => Ok(DocStateSource::FromDocState(doc_state)),
}
},
}
})
@ -473,18 +487,16 @@ impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
collab_db: Weak<CollabKVDB>,
collab_raw_data: DocStateSource,
persistence_config: CollabPersistenceConfig,
) -> Arc<MutexCollab> {
self
.collab_builder
.build_with_config(
uid,
object_id,
object_type,
collab_db,
collab_raw_data,
persistence_config,
CollabBuilderConfig::default().sync_enable(true),
)
.unwrap()
) -> Result<Arc<MutexCollab>, DatabaseError> {
let collab = self.collab_builder.build_with_config(
uid,
object_id,
object_type.clone(),
collab_db.clone(),
collab_raw_data,
persistence_config,
CollabBuilderConfig::default().sync_enable(true),
)?;
Ok(collab)
}
}

View File

@ -1,22 +1,3 @@
use std::collections::HashMap;
use std::sync::Arc;
use collab_database::database::MutexDatabase;
use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{Cell, Cells, Row, RowCell, RowDetail, RowId};
use collab_database::views::{
DatabaseLayout, DatabaseView, FilterMap, LayoutSetting, OrderObjectPosition,
};
use futures::StreamExt;
use lib_infra::box_any::BoxAny;
use tokio::sync::{broadcast, RwLock};
use tracing::{event, warn};
use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
use lib_dispatch::prelude::af_spawn;
use lib_infra::future::{to_fut, Fut, FutureResult};
use lib_infra::priority_task::TaskDispatcher;
use crate::entities::*;
use crate::notification::{send_notification, DatabaseNotification};
use crate::services::calculations::Calculation;
@ -39,6 +20,22 @@ use crate::services::group::{default_group_setting, GroupChangeset, GroupSetting
use crate::services::share::csv::{CSVExport, CSVFormat};
use crate::services::sort::Sort;
use crate::utils::cache::AnyTypeCache;
use collab_database::database::MutexDatabase;
use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{Cell, Cells, Row, RowCell, RowDetail, RowId};
use collab_database::views::{
DatabaseLayout, DatabaseView, FilterMap, LayoutSetting, OrderObjectPosition,
};
use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
use futures::StreamExt;
use lib_dispatch::prelude::af_spawn;
use lib_infra::box_any::BoxAny;
use lib_infra::future::{to_fut, Fut, FutureResult};
use lib_infra::priority_task::TaskDispatcher;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::{broadcast, RwLock};
use tracing::{event, warn};
#[derive(Clone)]
pub struct DatabaseEditor {
@ -115,23 +112,16 @@ impl DatabaseEditor {
})
}
/// Returns bool value indicating whether the database is empty.
///
pub async fn close_view(&self, view_id: &str) -> bool {
// If the database is empty, flush the database to the disk.
if self.database_views.editors().await.len() == 1 {
if let Some(database) = self.database.try_lock() {
let _ = database.flush();
}
}
self.database_views.close_view(view_id).await
pub async fn close_view(&self, view_id: &str) {
self.database_views.close_view(view_id).await;
}
pub async fn num_views(&self) -> usize {
self.database_views.num_editors().await
}
#[tracing::instrument(level = "debug", skip_all)]
pub async fn close(&self) {
if let Some(database) = self.database.try_lock() {
let _ = database.flush();
}
pub async fn close_all_views(&self) {
for view in self.database_views.editors().await {
view.close().await;
}

View File

@ -20,7 +20,7 @@ pub struct DatabaseViews {
database: Arc<MutexDatabase>,
cell_cache: CellCache,
view_operation: Arc<dyn DatabaseViewOperation>,
editor_by_view_id: Arc<RwLock<EditorByViewId>>,
view_editors: Arc<RwLock<EditorByViewId>>,
}
impl DatabaseViews {
@ -28,41 +28,38 @@ impl DatabaseViews {
database: Arc<MutexDatabase>,
cell_cache: CellCache,
view_operation: Arc<dyn DatabaseViewOperation>,
editor_by_view_id: Arc<RwLock<EditorByViewId>>,
view_editors: Arc<RwLock<EditorByViewId>>,
) -> FlowyResult<Self> {
Ok(Self {
database,
view_operation,
cell_cache,
editor_by_view_id,
view_editors,
})
}
pub async fn close_view(&self, view_id: &str) -> bool {
let mut editor_map = self.editor_by_view_id.write().await;
if let Some(view) = editor_map.remove(view_id) {
pub async fn close_view(&self, view_id: &str) {
let mut lock_guard = self.view_editors.write().await;
if let Some(view) = lock_guard.remove(view_id) {
view.close().await;
}
editor_map.is_empty()
}
pub async fn num_editors(&self) -> usize {
self.view_editors.read().await.len()
}
pub async fn editors(&self) -> Vec<Arc<DatabaseViewEditor>> {
self
.editor_by_view_id
.read()
.await
.values()
.cloned()
.collect()
self.view_editors.read().await.values().cloned().collect()
}
pub async fn get_view_editor(&self, view_id: &str) -> FlowyResult<Arc<DatabaseViewEditor>> {
debug_assert!(!view_id.is_empty());
if let Some(editor) = self.editor_by_view_id.read().await.get(view_id) {
if let Some(editor) = self.view_editors.read().await.get(view_id) {
return Ok(editor.clone());
}
let mut editor_map = self.editor_by_view_id.try_write().map_err(|err| {
let mut editor_map = self.view_editors.try_write().map_err(|err| {
FlowyError::internal().with_context(format!(
"fail to acquire the lock of editor_by_view_id: {}",
err