2023-04-28 06:08:53 +00:00
|
|
|
use std::collections::HashMap;
|
|
|
|
use std::ops::Deref;
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
2023-05-15 14:16:05 +00:00
|
|
|
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
|
2023-05-23 15:55:21 +00:00
|
|
|
use appflowy_integrate::{CollabPersistenceConfig, RocksCollabDB};
|
2023-05-15 14:16:05 +00:00
|
|
|
use collab::core::collab::MutexCollab;
|
2023-04-28 06:08:53 +00:00
|
|
|
use collab_database::database::DatabaseData;
|
2023-05-31 09:42:14 +00:00
|
|
|
use collab_database::user::{DatabaseCollabBuilder, UserDatabase as InnerUserDatabase};
|
2023-06-20 15:48:34 +00:00
|
|
|
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
|
2023-04-28 06:08:53 +00:00
|
|
|
use parking_lot::Mutex;
|
|
|
|
use tokio::sync::RwLock;
|
|
|
|
|
2023-05-25 15:22:23 +00:00
|
|
|
use flowy_error::{internal_error, FlowyError, FlowyResult};
|
2023-04-28 06:08:53 +00:00
|
|
|
use flowy_task::TaskDispatcher;
|
|
|
|
|
|
|
|
use crate::entities::{DatabaseDescriptionPB, DatabaseLayoutPB, RepeatedDatabaseDescriptionPB};
|
|
|
|
use crate::services::database::{DatabaseEditor, MutexDatabase};
|
2023-06-20 15:48:34 +00:00
|
|
|
use crate::services::database_view::DatabaseLayoutDepsResolver;
|
2023-05-27 13:29:18 +00:00
|
|
|
use crate::services::share::csv::{CSVFormat, CSVImporter, ImportResult};
|
2023-04-28 06:08:53 +00:00
|
|
|
|
|
|
|
pub trait DatabaseUser2: Send + Sync {
|
|
|
|
fn user_id(&self) -> Result<i64, FlowyError>;
|
2023-05-21 10:53:59 +00:00
|
|
|
fn token(&self) -> Result<Option<String>, FlowyError>;
|
2023-05-15 14:16:05 +00:00
|
|
|
fn collab_db(&self) -> Result<Arc<RocksCollabDB>, FlowyError>;
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct DatabaseManager2 {
|
|
|
|
user: Arc<dyn DatabaseUser2>,
|
|
|
|
user_database: UserDatabase,
|
|
|
|
task_scheduler: Arc<RwLock<TaskDispatcher>>,
|
|
|
|
editors: RwLock<HashMap<String, Arc<DatabaseEditor>>>,
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl DatabaseManager2 {
|
|
|
|
pub fn new(
|
|
|
|
database_user: Arc<dyn DatabaseUser2>,
|
|
|
|
task_scheduler: Arc<RwLock<TaskDispatcher>>,
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
2023-04-28 06:08:53 +00:00
|
|
|
) -> Self {
|
|
|
|
Self {
|
|
|
|
user: database_user,
|
|
|
|
user_database: UserDatabase::default(),
|
|
|
|
task_scheduler,
|
|
|
|
editors: Default::default(),
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder,
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-21 10:53:59 +00:00
|
|
|
pub async fn initialize(&self, user_id: i64) -> FlowyResult<()> {
|
2023-06-06 08:03:29 +00:00
|
|
|
let config = CollabPersistenceConfig::new().snapshot_per_update(10);
|
2023-05-15 14:16:05 +00:00
|
|
|
let db = self.user.collab_db()?;
|
2023-04-28 06:08:53 +00:00
|
|
|
*self.user_database.lock() = Some(InnerUserDatabase::new(
|
|
|
|
user_id,
|
|
|
|
db,
|
2023-06-06 08:03:29 +00:00
|
|
|
config,
|
2023-05-15 14:16:05 +00:00
|
|
|
UserDatabaseCollabBuilderImpl(self.collab_builder.clone()),
|
2023-04-28 06:08:53 +00:00
|
|
|
));
|
|
|
|
// do nothing
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-21 10:53:59 +00:00
|
|
|
pub async fn initialize_with_new_user(&self, user_id: i64, _token: &str) -> FlowyResult<()> {
|
|
|
|
self.initialize(user_id).await?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_all_databases_description(&self) -> RepeatedDatabaseDescriptionPB {
|
|
|
|
let databases_description = self.with_user_database(vec![], |database| {
|
|
|
|
database
|
|
|
|
.get_all_databases()
|
|
|
|
.into_iter()
|
|
|
|
.map(DatabaseDescriptionPB::from)
|
|
|
|
.collect()
|
|
|
|
});
|
|
|
|
|
|
|
|
RepeatedDatabaseDescriptionPB {
|
|
|
|
items: databases_description,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-25 15:22:23 +00:00
|
|
|
pub async fn get_database_with_view_id(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
2023-06-02 04:04:14 +00:00
|
|
|
let database_id = self.get_database_id_with_view_id(view_id).await?;
|
|
|
|
self.get_database(&database_id).await
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_database_id_with_view_id(&self, view_id: &str) -> FlowyResult<String> {
|
2023-04-28 06:08:53 +00:00
|
|
|
let database_id = self.with_user_database(Err(FlowyError::internal()), |database| {
|
|
|
|
database
|
|
|
|
.get_database_id_with_view_id(view_id)
|
|
|
|
.ok_or_else(FlowyError::record_not_found)
|
|
|
|
})?;
|
2023-06-02 04:04:14 +00:00
|
|
|
Ok(database_id)
|
2023-05-25 15:22:23 +00:00
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-05-25 15:22:23 +00:00
|
|
|
pub async fn get_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
|
|
|
if let Some(editor) = self.editors.read().await.get(database_id) {
|
2023-04-28 06:08:53 +00:00
|
|
|
return Ok(editor.clone());
|
|
|
|
}
|
|
|
|
|
2023-06-06 09:19:53 +00:00
|
|
|
tracing::trace!("create new editor for database {}", database_id);
|
2023-04-28 06:08:53 +00:00
|
|
|
let mut editors = self.editors.write().await;
|
|
|
|
let database = MutexDatabase::new(self.with_user_database(
|
|
|
|
Err(FlowyError::record_not_found()),
|
|
|
|
|database| {
|
|
|
|
database
|
2023-05-25 15:22:23 +00:00
|
|
|
.get_database(database_id)
|
2023-04-28 06:08:53 +00:00
|
|
|
.ok_or_else(FlowyError::record_not_found)
|
|
|
|
},
|
|
|
|
)?);
|
|
|
|
|
|
|
|
let editor = Arc::new(DatabaseEditor::new(database, self.task_scheduler.clone()).await?);
|
|
|
|
editors.insert(database_id.to_string(), editor.clone());
|
|
|
|
Ok(editor)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(level = "debug", skip_all)]
|
|
|
|
pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
|
|
|
|
let view_id = view_id.as_ref();
|
|
|
|
let database_id = self.with_user_database(None, |database| {
|
|
|
|
database.get_database_id_with_view_id(view_id)
|
|
|
|
});
|
|
|
|
|
|
|
|
if let Some(database_id) = database_id {
|
|
|
|
let mut editors = self.editors.write().await;
|
|
|
|
if let Some(editor) = editors.get(&database_id) {
|
|
|
|
if editor.close_view_editor(view_id).await {
|
|
|
|
editor.close().await;
|
|
|
|
editors.remove(&database_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-06-05 01:42:11 +00:00
|
|
|
pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult<()> {
|
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
let _ = database.delete_database_view(view_id).await?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-04-28 06:08:53 +00:00
|
|
|
pub async fn duplicate_database(&self, view_id: &str) -> FlowyResult<Vec<u8>> {
|
|
|
|
let database_data = self.with_user_database(Err(FlowyError::internal()), |database| {
|
|
|
|
let data = database.get_database_duplicated_data(view_id)?;
|
|
|
|
let json_bytes = data.to_json_bytes()?;
|
|
|
|
Ok(json_bytes)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(database_data)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(level = "trace", skip_all, err)]
|
|
|
|
pub async fn create_database_with_database_data(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
|
|
|
data: Vec<u8>,
|
|
|
|
) -> FlowyResult<()> {
|
|
|
|
let mut database_data = DatabaseData::from_json_bytes(data)?;
|
|
|
|
database_data.view.id = view_id.to_string();
|
|
|
|
self.with_user_database(
|
|
|
|
Err(FlowyError::internal().context("Create database with data failed")),
|
|
|
|
|database| {
|
|
|
|
let database = database.create_database_with_data(database_data)?;
|
|
|
|
Ok(database)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn create_database_with_params(&self, params: CreateDatabaseParams) -> FlowyResult<()> {
|
|
|
|
let _ = self.with_user_database(
|
|
|
|
Err(FlowyError::internal().context("Create database with params failed")),
|
|
|
|
|user_database| {
|
|
|
|
let database = user_database.create_database(params)?;
|
|
|
|
Ok(database)
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-06-20 15:48:34 +00:00
|
|
|
/// A linked view is a view that is linked to existing database.
|
2023-06-06 09:19:53 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self), err)]
|
2023-04-28 06:08:53 +00:00
|
|
|
pub async fn create_linked_view(
|
|
|
|
&self,
|
|
|
|
name: String,
|
2023-06-20 15:48:34 +00:00
|
|
|
layout: DatabaseLayout,
|
2023-04-28 06:08:53 +00:00
|
|
|
database_id: String,
|
2023-06-01 12:23:27 +00:00
|
|
|
database_view_id: String,
|
2023-04-28 06:08:53 +00:00
|
|
|
) -> FlowyResult<()> {
|
|
|
|
self.with_user_database(
|
|
|
|
Err(FlowyError::internal().context("Create database view failed")),
|
|
|
|
|user_database| {
|
2023-06-20 15:48:34 +00:00
|
|
|
let mut params = CreateViewParams::new(database_id.clone(), database_view_id, name, layout);
|
|
|
|
if let Some(database) = user_database.get_database(&database_id) {
|
|
|
|
if let Some((field, layout_setting)) = DatabaseLayoutDepsResolver::new(database, layout)
|
|
|
|
.resolve_deps_when_create_database_linked_view()
|
|
|
|
{
|
|
|
|
params = params
|
|
|
|
.with_deps_fields(vec![field])
|
|
|
|
.with_layout_setting(layout_setting);
|
|
|
|
}
|
|
|
|
};
|
2023-06-06 09:19:53 +00:00
|
|
|
user_database.create_database_linked_view(params)?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
},
|
|
|
|
)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-31 06:08:54 +00:00
|
|
|
pub async fn import_csv(
|
|
|
|
&self,
|
|
|
|
view_id: String,
|
|
|
|
content: String,
|
|
|
|
format: CSVFormat,
|
|
|
|
) -> FlowyResult<ImportResult> {
|
|
|
|
let params = tokio::task::spawn_blocking(move || {
|
|
|
|
CSVImporter.import_csv_from_string(view_id, content, format)
|
|
|
|
})
|
|
|
|
.await
|
|
|
|
.map_err(internal_error)??;
|
2023-05-27 13:29:18 +00:00
|
|
|
let result = ImportResult {
|
|
|
|
database_id: params.database_id.clone(),
|
|
|
|
view_id: params.view_id.clone(),
|
|
|
|
};
|
2023-05-25 15:22:23 +00:00
|
|
|
self.create_database_with_params(params).await?;
|
2023-05-27 13:29:18 +00:00
|
|
|
Ok(result)
|
2023-05-25 15:22:23 +00:00
|
|
|
}
|
|
|
|
|
2023-05-31 06:08:54 +00:00
|
|
|
// will implement soon
|
|
|
|
pub async fn import_csv_from_file(
|
|
|
|
&self,
|
|
|
|
_file_path: String,
|
|
|
|
_format: CSVFormat,
|
|
|
|
) -> FlowyResult<()> {
|
2023-05-25 15:22:23 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-27 13:29:18 +00:00
|
|
|
pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult<String> {
|
2023-05-25 15:22:23 +00:00
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
database.export_csv(style).await
|
|
|
|
}
|
|
|
|
|
2023-06-01 12:23:27 +00:00
|
|
|
pub async fn update_database_layout(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
|
|
|
layout: DatabaseLayoutPB,
|
|
|
|
) -> FlowyResult<()> {
|
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
database.update_view_layout(view_id, layout.into()).await
|
|
|
|
}
|
|
|
|
|
2023-04-28 06:08:53 +00:00
|
|
|
fn with_user_database<F, Output>(&self, default_value: Output, f: F) -> Output
|
|
|
|
where
|
|
|
|
F: FnOnce(&InnerUserDatabase) -> Output,
|
|
|
|
{
|
|
|
|
let database = self.user_database.lock();
|
|
|
|
match &*database {
|
|
|
|
None => default_value,
|
|
|
|
Some(folder) => f(folder),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone, Default)]
|
|
|
|
pub struct UserDatabase(Arc<Mutex<Option<InnerUserDatabase>>>);
|
|
|
|
|
|
|
|
impl Deref for UserDatabase {
|
|
|
|
type Target = Arc<Mutex<Option<InnerUserDatabase>>>;
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
unsafe impl Sync for UserDatabase {}
|
|
|
|
|
|
|
|
unsafe impl Send for UserDatabase {}
|
2023-05-15 14:16:05 +00:00
|
|
|
|
|
|
|
struct UserDatabaseCollabBuilderImpl(Arc<AppFlowyCollabBuilder>);
|
|
|
|
|
2023-05-31 09:42:14 +00:00
|
|
|
impl DatabaseCollabBuilder for UserDatabaseCollabBuilderImpl {
|
2023-05-15 14:16:05 +00:00
|
|
|
fn build_with_config(
|
|
|
|
&self,
|
|
|
|
uid: i64,
|
|
|
|
object_id: &str,
|
2023-05-31 09:42:14 +00:00
|
|
|
object_name: &str,
|
2023-05-15 14:16:05 +00:00
|
|
|
db: Arc<RocksCollabDB>,
|
2023-05-23 15:55:21 +00:00
|
|
|
config: &CollabPersistenceConfig,
|
2023-05-15 14:16:05 +00:00
|
|
|
) -> Arc<MutexCollab> {
|
2023-05-31 09:42:14 +00:00
|
|
|
self
|
|
|
|
.0
|
|
|
|
.build_with_config(uid, object_id, object_name, db, config)
|
2023-05-15 14:16:05 +00:00
|
|
|
}
|
|
|
|
}
|