2024-03-30 08:28:24 +00:00
|
|
|
use anyhow::anyhow;
|
2023-12-29 05:02:27 +00:00
|
|
|
use std::collections::HashMap;
|
2023-07-29 01:46:24 +00:00
|
|
|
use std::sync::{Arc, Weak};
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2024-04-15 06:50:28 +00:00
|
|
|
use collab::core::collab::{DataSource, MutexCollab};
|
2024-04-30 12:40:03 +00:00
|
|
|
use collab_database::database::DatabaseData;
|
2023-07-14 05:37:13 +00:00
|
|
|
use collab_database::error::DatabaseError;
|
2024-05-05 14:04:34 +00:00
|
|
|
use collab_database::rows::RowId;
|
2024-03-30 08:28:24 +00:00
|
|
|
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
|
|
|
|
use collab_database::workspace_database::{
|
2024-03-15 14:58:55 +00:00
|
|
|
CollabDocStateByOid, CollabFuture, DatabaseCollabService, DatabaseMeta, WorkspaceDatabase,
|
2023-07-14 05:37:13 +00:00
|
|
|
};
|
2023-10-10 11:05:55 +00:00
|
|
|
use collab_entity::CollabType;
|
2024-01-22 05:34:15 +00:00
|
|
|
use collab_plugins::local_storage::kv::KVTransactionDB;
|
2023-11-14 09:21:09 +00:00
|
|
|
use tokio::sync::{Mutex, RwLock};
|
2023-11-05 06:00:24 +00:00
|
|
|
use tracing::{event, instrument, trace};
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-12-27 03:42:39 +00:00
|
|
|
use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig};
|
2024-01-04 16:05:38 +00:00
|
|
|
use collab_integrate::{CollabKVAction, CollabKVDB, CollabPersistenceConfig};
|
2024-05-05 14:04:34 +00:00
|
|
|
use flowy_database_pub::cloud::{DatabaseCloudService, SummaryRowContent};
|
2023-05-25 15:22:23 +00:00
|
|
|
use flowy_error::{internal_error, FlowyError, FlowyResult};
|
2024-05-05 14:04:34 +00:00
|
|
|
use lib_infra::box_any::BoxAny;
|
2024-01-11 06:42:03 +00:00
|
|
|
use lib_infra::priority_task::TaskDispatcher;
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2024-04-30 12:40:03 +00:00
|
|
|
use crate::entities::{DatabaseLayoutPB, DatabaseSnapshotPB};
|
2024-05-05 14:04:34 +00:00
|
|
|
use crate::services::cell::stringify_cell;
|
2023-07-14 05:37:13 +00:00
|
|
|
use crate::services::database::DatabaseEditor;
|
2023-06-20 15:48:34 +00:00
|
|
|
use crate::services::database_view::DatabaseLayoutDepsResolver;
|
2023-09-01 14:40:17 +00:00
|
|
|
use crate::services::field_settings::default_field_settings_by_layout_map;
|
2023-05-27 13:29:18 +00:00
|
|
|
use crate::services::share::csv::{CSVFormat, CSVImporter, ImportResult};
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
pub trait DatabaseUser: Send + Sync {
|
|
|
|
fn user_id(&self) -> Result<i64, FlowyError>;
|
2024-01-04 16:05:38 +00:00
|
|
|
fn collab_db(&self, uid: i64) -> Result<Weak<CollabKVDB>, FlowyError>;
|
2024-04-26 01:44:07 +00:00
|
|
|
fn workspace_id(&self) -> Result<String, FlowyError>;
|
|
|
|
fn workspace_database_object_id(&self) -> Result<String, FlowyError>;
|
2023-07-29 01:46:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct DatabaseManager {
|
|
|
|
user: Arc<dyn DatabaseUser>,
|
2023-07-14 05:37:13 +00:00
|
|
|
workspace_database: Arc<RwLock<Option<Arc<WorkspaceDatabase>>>>,
|
2023-04-28 06:08:53 +00:00
|
|
|
task_scheduler: Arc<RwLock<TaskDispatcher>>,
|
2024-03-30 08:28:24 +00:00
|
|
|
editors: Mutex<HashMap<String, Arc<DatabaseEditor>>>,
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
2023-07-05 12:57:09 +00:00
|
|
|
cloud_service: Arc<dyn DatabaseCloudService>,
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
impl DatabaseManager {
|
2023-04-28 06:08:53 +00:00
|
|
|
pub fn new(
|
2023-07-29 01:46:24 +00:00
|
|
|
database_user: Arc<dyn DatabaseUser>,
|
2023-04-28 06:08:53 +00:00
|
|
|
task_scheduler: Arc<RwLock<TaskDispatcher>>,
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
2023-07-05 12:57:09 +00:00
|
|
|
cloud_service: Arc<dyn DatabaseCloudService>,
|
2023-04-28 06:08:53 +00:00
|
|
|
) -> Self {
|
|
|
|
Self {
|
|
|
|
user: database_user,
|
2023-07-14 05:37:13 +00:00
|
|
|
workspace_database: Default::default(),
|
2023-04-28 06:08:53 +00:00
|
|
|
task_scheduler,
|
2024-03-30 08:28:24 +00:00
|
|
|
editors: Default::default(),
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder,
|
2023-07-05 12:57:09 +00:00
|
|
|
cloud_service,
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-04 16:05:38 +00:00
|
|
|
fn is_collab_exist(&self, uid: i64, collab_db: &Weak<CollabKVDB>, object_id: &str) -> bool {
|
2023-07-29 01:46:24 +00:00
|
|
|
match collab_db.upgrade() {
|
|
|
|
None => false,
|
|
|
|
Some(collab_db) => {
|
|
|
|
let read_txn = collab_db.read_txn();
|
|
|
|
read_txn.is_exist(uid, object_id)
|
|
|
|
},
|
|
|
|
}
|
2023-07-14 05:37:13 +00:00
|
|
|
}
|
|
|
|
|
2024-02-03 21:49:45 +00:00
|
|
|
/// When initialize with new workspace, all the resources will be cleared.
|
2024-04-26 01:44:07 +00:00
|
|
|
pub async fn initialize(&self, uid: i64) -> FlowyResult<()> {
|
2024-02-03 21:49:45 +00:00
|
|
|
// 1. Clear all existing tasks
|
2023-11-05 06:00:24 +00:00
|
|
|
self.task_scheduler.write().await.clear_task();
|
2024-02-03 21:49:45 +00:00
|
|
|
// 2. Release all existing editors
|
|
|
|
for (_, editor) in self.editors.lock().await.iter() {
|
2024-03-30 08:28:24 +00:00
|
|
|
editor.close_all_views().await;
|
2024-02-03 21:49:45 +00:00
|
|
|
}
|
2023-11-14 09:21:09 +00:00
|
|
|
self.editors.lock().await.clear();
|
2024-02-03 21:49:45 +00:00
|
|
|
// 3. Clear the workspace database
|
2024-04-26 01:44:07 +00:00
|
|
|
if let Some(old_workspace_database) = self.workspace_database.write().await.take() {
|
|
|
|
old_workspace_database.close();
|
|
|
|
}
|
2023-11-14 09:21:09 +00:00
|
|
|
*self.workspace_database.write().await = None;
|
2023-11-05 06:00:24 +00:00
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
let collab_db = self.user.collab_db(uid)?;
|
|
|
|
let collab_builder = UserDatabaseCollabServiceImpl {
|
2024-04-26 01:44:07 +00:00
|
|
|
user: self.user.clone(),
|
2023-07-14 05:37:13 +00:00
|
|
|
collab_builder: self.collab_builder.clone(),
|
|
|
|
cloud_service: self.cloud_service.clone(),
|
|
|
|
};
|
2024-01-07 05:59:39 +00:00
|
|
|
let config = CollabPersistenceConfig::new().snapshot_per_update(100);
|
2023-07-14 05:37:13 +00:00
|
|
|
|
2024-04-26 01:44:07 +00:00
|
|
|
let workspace_id = self.user.workspace_id()?;
|
|
|
|
let workspace_database_object_id = self.user.workspace_database_object_id()?;
|
2024-04-15 06:50:28 +00:00
|
|
|
let mut workspace_database_doc_state = DataSource::Disk;
|
2023-07-14 05:37:13 +00:00
|
|
|
// If the workspace database not exist in disk, try to fetch from remote.
|
2024-01-30 16:43:55 +00:00
|
|
|
if !self.is_collab_exist(uid, &collab_db, &workspace_database_object_id) {
|
2023-10-07 01:58:44 +00:00
|
|
|
trace!("workspace database not exist, try to fetch from remote");
|
2023-07-14 05:37:13 +00:00
|
|
|
match self
|
|
|
|
.cloud_service
|
2024-02-03 21:49:45 +00:00
|
|
|
.get_database_object_doc_state(
|
2024-01-30 16:43:55 +00:00
|
|
|
&workspace_database_object_id,
|
2023-10-23 03:43:31 +00:00
|
|
|
CollabType::WorkspaceDatabase,
|
|
|
|
&workspace_id,
|
|
|
|
)
|
2023-07-14 05:37:13 +00:00
|
|
|
.await
|
|
|
|
{
|
2024-03-30 08:28:24 +00:00
|
|
|
Ok(doc_state) => match doc_state {
|
|
|
|
Some(doc_state) => {
|
2024-04-15 06:50:28 +00:00
|
|
|
workspace_database_doc_state = DataSource::DocStateV1(doc_state);
|
2024-03-30 08:28:24 +00:00
|
|
|
},
|
|
|
|
None => {
|
2024-04-15 06:50:28 +00:00
|
|
|
workspace_database_doc_state = DataSource::Disk;
|
2024-03-30 08:28:24 +00:00
|
|
|
},
|
2023-08-17 15:46:39 +00:00
|
|
|
},
|
2023-07-14 05:37:13 +00:00
|
|
|
Err(err) => {
|
2023-08-21 16:19:15 +00:00
|
|
|
return Err(FlowyError::record_not_found().with_context(format!(
|
2023-07-14 05:37:13 +00:00
|
|
|
"get workspace database :{} failed: {}",
|
2024-01-30 16:43:55 +00:00
|
|
|
workspace_database_object_id, err,
|
2023-07-14 05:37:13 +00:00
|
|
|
)));
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Construct the workspace database.
|
2023-11-05 06:00:24 +00:00
|
|
|
event!(
|
|
|
|
tracing::Level::INFO,
|
|
|
|
"open aggregate database views object: {}",
|
2024-01-30 16:43:55 +00:00
|
|
|
&workspace_database_object_id
|
2023-11-05 06:00:24 +00:00
|
|
|
);
|
2023-07-14 05:37:13 +00:00
|
|
|
let collab = collab_builder.build_collab_with_config(
|
|
|
|
uid,
|
2024-01-30 16:43:55 +00:00
|
|
|
&workspace_database_object_id,
|
2023-07-29 01:46:24 +00:00
|
|
|
CollabType::WorkspaceDatabase,
|
2023-07-14 05:37:13 +00:00
|
|
|
collab_db.clone(),
|
2024-02-03 21:49:45 +00:00
|
|
|
workspace_database_doc_state,
|
2024-01-06 04:46:11 +00:00
|
|
|
config.clone(),
|
2024-03-30 08:28:24 +00:00
|
|
|
)?;
|
2023-07-14 05:37:13 +00:00
|
|
|
let workspace_database =
|
|
|
|
WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder);
|
|
|
|
*self.workspace_database.write().await = Some(Arc::new(workspace_database));
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-10-24 12:11:06 +00:00
|
|
|
#[instrument(
|
|
|
|
name = "database_initialize_with_new_user",
|
|
|
|
level = "debug",
|
|
|
|
skip_all,
|
|
|
|
err
|
|
|
|
)]
|
2024-04-26 01:44:07 +00:00
|
|
|
pub async fn initialize_with_new_user(&self, user_id: i64) -> FlowyResult<()> {
|
|
|
|
self.initialize(user_id).await?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2024-03-15 14:58:55 +00:00
|
|
|
pub async fn get_database_inline_view_id(&self, database_id: &str) -> FlowyResult<String> {
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2024-03-30 08:28:24 +00:00
|
|
|
let database_collab = wdb.get_database(database_id).await.ok_or_else(|| {
|
2024-03-15 14:58:55 +00:00
|
|
|
FlowyError::record_not_found().with_context(format!("The database:{} not found", database_id))
|
|
|
|
})?;
|
|
|
|
|
2024-03-30 08:28:24 +00:00
|
|
|
let lock_guard = database_collab.lock();
|
|
|
|
Ok(lock_guard.get_inline_view_id())
|
2024-03-15 14:58:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_all_databases_meta(&self) -> Vec<DatabaseMeta> {
|
2023-07-14 05:37:13 +00:00
|
|
|
let mut items = vec![];
|
2024-05-05 14:04:34 +00:00
|
|
|
if let Ok(wdb) = self.get_database_indexer().await {
|
2024-03-15 14:58:55 +00:00
|
|
|
items = wdb.get_all_database_meta()
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
2024-03-15 14:58:55 +00:00
|
|
|
items
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2024-05-05 14:04:34 +00:00
|
|
|
pub async fn update_database_indexing(
|
2023-12-29 05:02:27 +00:00
|
|
|
&self,
|
|
|
|
view_ids_by_database_id: HashMap<String, Vec<String>>,
|
|
|
|
) -> FlowyResult<()> {
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2023-12-29 05:02:27 +00:00
|
|
|
view_ids_by_database_id
|
|
|
|
.into_iter()
|
|
|
|
.for_each(|(database_id, view_ids)| {
|
|
|
|
wdb.track_database(&database_id, view_ids);
|
|
|
|
});
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-25 15:22:23 +00:00
|
|
|
pub async fn get_database_with_view_id(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
2023-06-02 04:04:14 +00:00
|
|
|
let database_id = self.get_database_id_with_view_id(view_id).await?;
|
|
|
|
self.get_database(&database_id).await
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_database_id_with_view_id(&self, view_id: &str) -> FlowyResult<String> {
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2023-07-14 05:37:13 +00:00
|
|
|
wdb.get_database_id_with_view_id(view_id).ok_or_else(|| {
|
|
|
|
FlowyError::record_not_found()
|
2023-08-21 16:19:15 +00:00
|
|
|
.with_context(format!("The database for view id: {} not found", view_id))
|
2023-07-14 05:37:13 +00:00
|
|
|
})
|
2023-05-25 15:22:23 +00:00
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-05-25 15:22:23 +00:00
|
|
|
pub async fn get_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
2023-11-14 09:21:09 +00:00
|
|
|
if let Some(editor) = self.editors.lock().await.get(database_id).cloned() {
|
|
|
|
return Ok(editor);
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
2024-04-02 14:15:42 +00:00
|
|
|
// TODO(nathan): refactor the get_database that split the database creation and database opening.
|
2023-07-05 12:57:09 +00:00
|
|
|
self.open_database(database_id).await
|
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
pub async fn open_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
2024-03-30 08:28:24 +00:00
|
|
|
trace!("open database editor:{}", database_id);
|
2023-11-14 09:21:09 +00:00
|
|
|
let database = self
|
2024-05-05 14:04:34 +00:00
|
|
|
.get_database_indexer()
|
2023-11-14 09:21:09 +00:00
|
|
|
.await?
|
2023-07-14 05:37:13 +00:00
|
|
|
.get_database(database_id)
|
|
|
|
.await
|
2024-04-26 01:44:07 +00:00
|
|
|
.ok_or_else(|| FlowyError::collab_not_sync().with_context("open database error"))?;
|
2023-04-28 06:08:53 +00:00
|
|
|
|
|
|
|
let editor = Arc::new(DatabaseEditor::new(database, self.task_scheduler.clone()).await?);
|
2023-11-14 09:21:09 +00:00
|
|
|
self
|
|
|
|
.editors
|
|
|
|
.lock()
|
|
|
|
.await
|
2024-03-30 08:28:24 +00:00
|
|
|
.insert(database_id.to_string(), editor.clone());
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(editor)
|
|
|
|
}
|
|
|
|
|
2024-03-30 08:28:24 +00:00
|
|
|
pub async fn open_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
|
|
|
|
let view_id = view_id.as_ref();
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2024-03-30 08:28:24 +00:00
|
|
|
if let Some(database_id) = wdb.get_database_id_with_view_id(view_id) {
|
2024-04-02 14:15:42 +00:00
|
|
|
if let Some(database) = wdb.open_database(&database_id) {
|
|
|
|
if let Some(lock_database) = database.try_lock() {
|
|
|
|
if let Some(lock_collab) = lock_database.get_collab().try_lock() {
|
|
|
|
trace!("{} database start init sync", view_id);
|
|
|
|
lock_collab.start_init_sync();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-03-30 08:28:24 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-04-28 06:08:53 +00:00
|
|
|
pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
|
|
|
|
let view_id = view_id.as_ref();
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2023-07-14 05:37:13 +00:00
|
|
|
let database_id = wdb.get_database_id_with_view_id(view_id);
|
2023-04-28 06:08:53 +00:00
|
|
|
if let Some(database_id) = database_id {
|
2023-11-14 09:21:09 +00:00
|
|
|
let mut editors = self.editors.lock().await;
|
2024-03-30 08:28:24 +00:00
|
|
|
let mut should_remove = false;
|
2023-04-28 06:08:53 +00:00
|
|
|
if let Some(editor) = editors.get(&database_id) {
|
2024-02-03 21:49:45 +00:00
|
|
|
editor.close_view(view_id).await;
|
2024-03-30 08:28:24 +00:00
|
|
|
should_remove = editor.num_views().await == 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if should_remove {
|
|
|
|
trace!("remove database editor:{}", database_id);
|
|
|
|
editors.remove(&database_id);
|
|
|
|
wdb.close_database(&database_id);
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-06-05 01:42:11 +00:00
|
|
|
pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult<()> {
|
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
let _ = database.delete_database_view(view_id).await?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-04-28 06:08:53 +00:00
|
|
|
pub async fn duplicate_database(&self, view_id: &str) -> FlowyResult<Vec<u8>> {
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2024-04-19 09:24:11 +00:00
|
|
|
let data = wdb.get_database_data(view_id).await?;
|
2023-07-14 05:37:13 +00:00
|
|
|
let json_bytes = data.to_json_bytes()?;
|
|
|
|
Ok(json_bytes)
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
/// Create a new database with the given data that can be deserialized to [DatabaseData].
|
2023-04-28 06:08:53 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip_all, err)]
|
|
|
|
pub async fn create_database_with_database_data(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
|
|
|
data: Vec<u8>,
|
|
|
|
) -> FlowyResult<()> {
|
2024-04-19 09:24:11 +00:00
|
|
|
let database_data = DatabaseData::from_json_bytes(data)?;
|
|
|
|
|
|
|
|
let mut create_database_params = CreateDatabaseParams::from_database_data(database_data);
|
|
|
|
let old_view_id = create_database_params.inline_view_id.clone();
|
|
|
|
create_database_params.inline_view_id = view_id.to_string();
|
|
|
|
|
|
|
|
if let Some(create_view_params) = create_database_params
|
|
|
|
.views
|
|
|
|
.iter_mut()
|
|
|
|
.find(|view| view.view_id == old_view_id)
|
|
|
|
{
|
|
|
|
create_view_params.view_id = view_id.to_string();
|
|
|
|
}
|
2023-07-14 05:37:13 +00:00
|
|
|
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2024-04-19 09:24:11 +00:00
|
|
|
let _ = wdb.create_database(create_database_params)?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn create_database_with_params(&self, params: CreateDatabaseParams) -> FlowyResult<()> {
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2023-07-14 05:37:13 +00:00
|
|
|
let _ = wdb.create_database(params)?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-06-20 15:48:34 +00:00
|
|
|
/// A linked view is a view that is linked to existing database.
|
2023-06-06 09:19:53 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self), err)]
|
2023-04-28 06:08:53 +00:00
|
|
|
pub async fn create_linked_view(
|
|
|
|
&self,
|
|
|
|
name: String,
|
2023-06-20 15:48:34 +00:00
|
|
|
layout: DatabaseLayout,
|
2023-04-28 06:08:53 +00:00
|
|
|
database_id: String,
|
2023-06-01 12:23:27 +00:00
|
|
|
database_view_id: String,
|
2023-04-28 06:08:53 +00:00
|
|
|
) -> FlowyResult<()> {
|
2024-05-05 14:04:34 +00:00
|
|
|
let wdb = self.get_database_indexer().await?;
|
2023-09-01 14:40:17 +00:00
|
|
|
let mut params = CreateViewParams::new(database_id.clone(), database_view_id, name, layout);
|
2023-07-14 05:37:13 +00:00
|
|
|
if let Some(database) = wdb.get_database(&database_id).await {
|
2023-07-31 02:12:26 +00:00
|
|
|
let (field, layout_setting) = DatabaseLayoutDepsResolver::new(database, layout)
|
|
|
|
.resolve_deps_when_create_database_linked_view();
|
|
|
|
if let Some(field) = field {
|
2023-09-06 08:00:23 +00:00
|
|
|
params = params.with_deps_fields(vec![field], vec![default_field_settings_by_layout_map()]);
|
2023-07-31 02:12:26 +00:00
|
|
|
}
|
|
|
|
if let Some(layout_setting) = layout_setting {
|
|
|
|
params = params.with_layout_setting(layout_setting);
|
2023-07-14 05:37:13 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
wdb.create_database_linked_view(params).await?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-31 06:08:54 +00:00
|
|
|
pub async fn import_csv(
|
|
|
|
&self,
|
|
|
|
view_id: String,
|
|
|
|
content: String,
|
|
|
|
format: CSVFormat,
|
|
|
|
) -> FlowyResult<ImportResult> {
|
|
|
|
let params = tokio::task::spawn_blocking(move || {
|
|
|
|
CSVImporter.import_csv_from_string(view_id, content, format)
|
|
|
|
})
|
|
|
|
.await
|
|
|
|
.map_err(internal_error)??;
|
2023-05-27 13:29:18 +00:00
|
|
|
let result = ImportResult {
|
|
|
|
database_id: params.database_id.clone(),
|
2024-04-19 09:24:11 +00:00
|
|
|
view_id: params.inline_view_id.clone(),
|
2023-05-27 13:29:18 +00:00
|
|
|
};
|
2023-05-25 15:22:23 +00:00
|
|
|
self.create_database_with_params(params).await?;
|
2023-05-27 13:29:18 +00:00
|
|
|
Ok(result)
|
2023-05-25 15:22:23 +00:00
|
|
|
}
|
|
|
|
|
2023-05-31 06:08:54 +00:00
|
|
|
// will implement soon
|
|
|
|
pub async fn import_csv_from_file(
|
|
|
|
&self,
|
|
|
|
_file_path: String,
|
|
|
|
_format: CSVFormat,
|
|
|
|
) -> FlowyResult<()> {
|
2023-05-25 15:22:23 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-27 13:29:18 +00:00
|
|
|
pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult<String> {
|
2023-05-25 15:22:23 +00:00
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
database.export_csv(style).await
|
|
|
|
}
|
|
|
|
|
2023-06-01 12:23:27 +00:00
|
|
|
pub async fn update_database_layout(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
|
|
|
layout: DatabaseLayoutPB,
|
|
|
|
) -> FlowyResult<()> {
|
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
database.update_view_layout(view_id, layout.into()).await
|
|
|
|
}
|
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
pub async fn get_database_snapshots(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
2023-08-17 15:46:39 +00:00
|
|
|
limit: usize,
|
2023-07-05 12:57:09 +00:00
|
|
|
) -> FlowyResult<Vec<DatabaseSnapshotPB>> {
|
|
|
|
let database_id = self.get_database_id_with_view_id(view_id).await?;
|
2023-08-17 15:46:39 +00:00
|
|
|
let snapshots = self
|
2023-07-05 12:57:09 +00:00
|
|
|
.cloud_service
|
2024-02-03 21:49:45 +00:00
|
|
|
.get_database_collab_object_snapshots(&database_id, limit)
|
2023-07-05 12:57:09 +00:00
|
|
|
.await?
|
2023-08-17 15:46:39 +00:00
|
|
|
.into_iter()
|
2023-07-05 12:57:09 +00:00
|
|
|
.map(|snapshot| DatabaseSnapshotPB {
|
|
|
|
snapshot_id: snapshot.snapshot_id,
|
|
|
|
snapshot_desc: "".to_string(),
|
|
|
|
created_at: snapshot.created_at,
|
|
|
|
data: snapshot.data,
|
|
|
|
})
|
2023-08-17 15:46:39 +00:00
|
|
|
.collect::<Vec<_>>();
|
2023-07-05 12:57:09 +00:00
|
|
|
|
|
|
|
Ok(snapshots)
|
|
|
|
}
|
|
|
|
|
2024-05-05 14:04:34 +00:00
|
|
|
/// Return the database indexer.
|
|
|
|
/// Each workspace has itw own Database indexer that manages all the databases and database views
|
|
|
|
async fn get_database_indexer(&self) -> FlowyResult<Arc<WorkspaceDatabase>> {
|
2023-07-14 05:37:13 +00:00
|
|
|
let database = self.workspace_database.read().await;
|
2023-04-28 06:08:53 +00:00
|
|
|
match &*database {
|
2023-08-21 16:19:15 +00:00
|
|
|
None => Err(FlowyError::internal().with_context("Workspace database not initialized")),
|
2023-07-14 05:37:13 +00:00
|
|
|
Some(user_database) => Ok(user_database.clone()),
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-05 12:57:09 +00:00
|
|
|
|
2024-05-05 14:04:34 +00:00
|
|
|
#[instrument(level = "debug", skip_all)]
|
|
|
|
pub async fn summarize_row(
|
|
|
|
&self,
|
|
|
|
view_id: String,
|
|
|
|
row_id: RowId,
|
|
|
|
field_id: String,
|
|
|
|
) -> FlowyResult<()> {
|
|
|
|
let database = self.get_database_with_view_id(&view_id).await?;
|
|
|
|
|
|
|
|
//
|
|
|
|
let mut summary_row_content = SummaryRowContent::new();
|
|
|
|
if let Some(row) = database.get_row(&view_id, &row_id) {
|
|
|
|
let fields = database.get_fields(&view_id, None);
|
|
|
|
for field in fields {
|
2024-05-19 13:35:14 +00:00
|
|
|
// When summarizing a row, skip the content in the "AI summary" cell; it does not need to
|
|
|
|
// be summarized.
|
|
|
|
if field.id != field_id {
|
|
|
|
if let Some(cell) = row.cells.get(&field.id) {
|
|
|
|
summary_row_content.insert(field.name.clone(), stringify_cell(cell, &field));
|
|
|
|
}
|
2024-05-05 14:04:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Call the cloud service to summarize the row.
|
|
|
|
trace!(
|
2024-05-19 13:35:14 +00:00
|
|
|
"[AI]:summarize row:{}, content:{:?}",
|
2024-05-05 14:04:34 +00:00
|
|
|
row_id,
|
|
|
|
summary_row_content
|
|
|
|
);
|
|
|
|
let response = self
|
|
|
|
.cloud_service
|
|
|
|
.summary_database_row(&self.user.workspace_id()?, &row_id, summary_row_content)
|
|
|
|
.await?;
|
|
|
|
trace!("[AI]:summarize row response: {}", response);
|
|
|
|
|
|
|
|
// Update the cell with the response from the cloud service.
|
|
|
|
database
|
|
|
|
.update_cell_with_changeset(&view_id, &row_id, &field_id, BoxAny::new(response))
|
|
|
|
.await?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
/// Only expose this method for testing
|
|
|
|
#[cfg(debug_assertions)]
|
|
|
|
pub fn get_cloud_service(&self) -> &Arc<dyn DatabaseCloudService> {
|
|
|
|
&self.cloud_service
|
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
struct UserDatabaseCollabServiceImpl {
|
2024-04-26 01:44:07 +00:00
|
|
|
user: Arc<dyn DatabaseUser>,
|
2023-07-14 05:37:13 +00:00
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
|
|
|
cloud_service: Arc<dyn DatabaseCloudService>,
|
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
|
2023-12-29 05:02:27 +00:00
|
|
|
fn get_collab_doc_state(
|
2023-07-14 05:37:13 +00:00
|
|
|
&self,
|
|
|
|
object_id: &str,
|
2023-07-29 01:46:24 +00:00
|
|
|
object_ty: CollabType,
|
2024-04-15 06:50:28 +00:00
|
|
|
) -> CollabFuture<Result<DataSource, DatabaseError>> {
|
2024-04-26 01:44:07 +00:00
|
|
|
let workspace_id = self.user.workspace_id().unwrap();
|
2023-07-14 05:37:13 +00:00
|
|
|
let object_id = object_id.to_string();
|
|
|
|
let weak_cloud_service = Arc::downgrade(&self.cloud_service);
|
|
|
|
Box::pin(async move {
|
|
|
|
match weak_cloud_service.upgrade() {
|
2024-03-30 08:28:24 +00:00
|
|
|
None => Err(DatabaseError::Internal(anyhow!("Cloud service is dropped"))),
|
2023-07-14 05:37:13 +00:00
|
|
|
Some(cloud_service) => {
|
2024-03-23 01:18:47 +00:00
|
|
|
let doc_state = cloud_service
|
2024-02-03 21:49:45 +00:00
|
|
|
.get_database_object_doc_state(&object_id, object_ty, &workspace_id)
|
2023-07-29 01:46:24 +00:00
|
|
|
.await?;
|
2024-03-30 08:28:24 +00:00
|
|
|
match doc_state {
|
2024-04-15 06:50:28 +00:00
|
|
|
None => Ok(DataSource::Disk),
|
|
|
|
Some(doc_state) => Ok(DataSource::DocStateV1(doc_state)),
|
2024-03-30 08:28:24 +00:00
|
|
|
}
|
2023-07-14 05:37:13 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2023-05-15 14:16:05 +00:00
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
fn batch_get_collab_update(
|
|
|
|
&self,
|
|
|
|
object_ids: Vec<String>,
|
2023-07-29 01:46:24 +00:00
|
|
|
object_ty: CollabType,
|
2023-12-29 05:02:27 +00:00
|
|
|
) -> CollabFuture<Result<CollabDocStateByOid, DatabaseError>> {
|
2024-04-26 01:44:07 +00:00
|
|
|
let cloned_user = self.user.clone();
|
2023-07-14 05:37:13 +00:00
|
|
|
let weak_cloud_service = Arc::downgrade(&self.cloud_service);
|
|
|
|
Box::pin(async move {
|
2024-04-26 01:44:07 +00:00
|
|
|
let workspace_id = cloned_user
|
|
|
|
.workspace_id()
|
|
|
|
.map_err(|err| DatabaseError::Internal(err.into()))?;
|
2023-07-14 05:37:13 +00:00
|
|
|
match weak_cloud_service.upgrade() {
|
|
|
|
None => {
|
|
|
|
tracing::warn!("Cloud service is dropped");
|
2023-12-29 05:02:27 +00:00
|
|
|
Ok(CollabDocStateByOid::default())
|
2023-07-14 05:37:13 +00:00
|
|
|
},
|
|
|
|
Some(cloud_service) => {
|
|
|
|
let updates = cloud_service
|
2024-02-03 21:49:45 +00:00
|
|
|
.batch_get_database_object_doc_state(object_ids, object_ty, &workspace_id)
|
2023-07-29 01:46:24 +00:00
|
|
|
.await?;
|
2023-07-14 05:37:13 +00:00
|
|
|
Ok(updates)
|
|
|
|
},
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2023-05-15 14:16:05 +00:00
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
fn build_collab_with_config(
|
2023-05-15 14:16:05 +00:00
|
|
|
&self,
|
|
|
|
uid: i64,
|
|
|
|
object_id: &str,
|
2023-07-29 01:46:24 +00:00
|
|
|
object_type: CollabType,
|
2024-01-04 16:05:38 +00:00
|
|
|
collab_db: Weak<CollabKVDB>,
|
2024-04-15 06:50:28 +00:00
|
|
|
collab_raw_data: DataSource,
|
2024-04-26 01:44:07 +00:00
|
|
|
_persistence_config: CollabPersistenceConfig,
|
2024-03-30 08:28:24 +00:00
|
|
|
) -> Result<Arc<MutexCollab>, DatabaseError> {
|
2024-04-26 01:44:07 +00:00
|
|
|
let workspace_id = self
|
|
|
|
.user
|
|
|
|
.workspace_id()
|
|
|
|
.map_err(|err| DatabaseError::Internal(err.into()))?;
|
2024-03-30 08:28:24 +00:00
|
|
|
let collab = self.collab_builder.build_with_config(
|
2024-04-26 01:44:07 +00:00
|
|
|
&workspace_id,
|
2024-03-30 08:28:24 +00:00
|
|
|
uid,
|
|
|
|
object_id,
|
|
|
|
object_type.clone(),
|
|
|
|
collab_db.clone(),
|
|
|
|
collab_raw_data,
|
|
|
|
CollabBuilderConfig::default().sync_enable(true),
|
|
|
|
)?;
|
|
|
|
Ok(collab)
|
2023-05-15 14:16:05 +00:00
|
|
|
}
|
|
|
|
}
|