2023-04-28 06:08:53 +00:00
|
|
|
use std::collections::HashMap;
|
2023-07-29 01:46:24 +00:00
|
|
|
use std::sync::{Arc, Weak};
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-05-15 14:16:05 +00:00
|
|
|
use appflowy_integrate::collab_builder::AppFlowyCollabBuilder;
|
2023-07-29 01:46:24 +00:00
|
|
|
use appflowy_integrate::{CollabPersistenceConfig, CollabType, RocksCollabDB};
|
2023-07-14 05:37:13 +00:00
|
|
|
use collab::core::collab::{CollabRawData, MutexCollab};
|
|
|
|
use collab_database::blocks::BlockEvent;
|
|
|
|
use collab_database::database::{DatabaseData, YrsDocAction};
|
|
|
|
use collab_database::error::DatabaseError;
|
|
|
|
use collab_database::user::{
|
2023-07-29 01:46:24 +00:00
|
|
|
CollabFuture, CollabObjectUpdate, CollabObjectUpdateByOid, DatabaseCollabService,
|
|
|
|
WorkspaceDatabase,
|
2023-07-14 05:37:13 +00:00
|
|
|
};
|
2023-06-20 15:48:34 +00:00
|
|
|
use collab_database::views::{CreateDatabaseParams, CreateViewParams, DatabaseLayout};
|
2023-04-28 06:08:53 +00:00
|
|
|
use tokio::sync::RwLock;
|
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
use flowy_database_deps::cloud::DatabaseCloudService;
|
2023-05-25 15:22:23 +00:00
|
|
|
use flowy_error::{internal_error, FlowyError, FlowyResult};
|
2023-04-28 06:08:53 +00:00
|
|
|
use flowy_task::TaskDispatcher;
|
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
use crate::entities::{
|
2023-07-14 05:37:13 +00:00
|
|
|
DatabaseDescriptionPB, DatabaseLayoutPB, DatabaseSnapshotPB, DidFetchRowPB,
|
|
|
|
RepeatedDatabaseDescriptionPB,
|
2023-07-05 12:57:09 +00:00
|
|
|
};
|
2023-07-14 05:37:13 +00:00
|
|
|
use crate::notification::{send_notification, DatabaseNotification};
|
|
|
|
use crate::services::database::DatabaseEditor;
|
2023-06-20 15:48:34 +00:00
|
|
|
use crate::services::database_view::DatabaseLayoutDepsResolver;
|
2023-05-27 13:29:18 +00:00
|
|
|
use crate::services::share::csv::{CSVFormat, CSVImporter, ImportResult};
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
pub trait DatabaseUser: Send + Sync {
|
|
|
|
fn user_id(&self) -> Result<i64, FlowyError>;
|
|
|
|
fn token(&self) -> Result<Option<String>, FlowyError>;
|
|
|
|
fn collab_db(&self, uid: i64) -> Result<Weak<RocksCollabDB>, FlowyError>;
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct DatabaseManager {
|
|
|
|
user: Arc<dyn DatabaseUser>,
|
2023-07-14 05:37:13 +00:00
|
|
|
workspace_database: Arc<RwLock<Option<Arc<WorkspaceDatabase>>>>,
|
2023-04-28 06:08:53 +00:00
|
|
|
task_scheduler: Arc<RwLock<TaskDispatcher>>,
|
|
|
|
editors: RwLock<HashMap<String, Arc<DatabaseEditor>>>,
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
2023-07-05 12:57:09 +00:00
|
|
|
cloud_service: Arc<dyn DatabaseCloudService>,
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
impl DatabaseManager {
|
2023-04-28 06:08:53 +00:00
|
|
|
pub fn new(
|
2023-07-29 01:46:24 +00:00
|
|
|
database_user: Arc<dyn DatabaseUser>,
|
2023-04-28 06:08:53 +00:00
|
|
|
task_scheduler: Arc<RwLock<TaskDispatcher>>,
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
2023-07-05 12:57:09 +00:00
|
|
|
cloud_service: Arc<dyn DatabaseCloudService>,
|
2023-04-28 06:08:53 +00:00
|
|
|
) -> Self {
|
|
|
|
Self {
|
|
|
|
user: database_user,
|
2023-07-14 05:37:13 +00:00
|
|
|
workspace_database: Default::default(),
|
2023-04-28 06:08:53 +00:00
|
|
|
task_scheduler,
|
|
|
|
editors: Default::default(),
|
2023-05-15 14:16:05 +00:00
|
|
|
collab_builder,
|
2023-07-05 12:57:09 +00:00
|
|
|
cloud_service,
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
fn is_collab_exist(&self, uid: i64, collab_db: &Weak<RocksCollabDB>, object_id: &str) -> bool {
|
|
|
|
match collab_db.upgrade() {
|
|
|
|
None => false,
|
|
|
|
Some(collab_db) => {
|
|
|
|
let read_txn = collab_db.read_txn();
|
|
|
|
read_txn.is_exist(uid, object_id)
|
|
|
|
},
|
|
|
|
}
|
2023-07-14 05:37:13 +00:00
|
|
|
}
|
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
pub async fn initialize(
|
|
|
|
&self,
|
|
|
|
uid: i64,
|
|
|
|
_workspace_id: String,
|
2023-08-17 15:46:39 +00:00
|
|
|
database_storage_id: String,
|
2023-07-29 01:46:24 +00:00
|
|
|
) -> FlowyResult<()> {
|
2023-07-14 05:37:13 +00:00
|
|
|
let collab_db = self.user.collab_db(uid)?;
|
|
|
|
let collab_builder = UserDatabaseCollabServiceImpl {
|
|
|
|
collab_builder: self.collab_builder.clone(),
|
|
|
|
cloud_service: self.cloud_service.clone(),
|
|
|
|
};
|
2023-06-06 08:03:29 +00:00
|
|
|
let config = CollabPersistenceConfig::new().snapshot_per_update(10);
|
2023-07-14 05:37:13 +00:00
|
|
|
let mut collab_raw_data = CollabRawData::default();
|
|
|
|
|
|
|
|
// If the workspace database not exist in disk, try to fetch from remote.
|
2023-08-17 15:46:39 +00:00
|
|
|
if !self.is_collab_exist(uid, &collab_db, &database_storage_id) {
|
2023-07-14 05:37:13 +00:00
|
|
|
tracing::trace!("workspace database not exist, try to fetch from remote");
|
|
|
|
match self
|
|
|
|
.cloud_service
|
2023-08-17 15:46:39 +00:00
|
|
|
.get_collab_update(&database_storage_id, CollabType::WorkspaceDatabase)
|
2023-07-14 05:37:13 +00:00
|
|
|
.await
|
|
|
|
{
|
2023-08-17 15:46:39 +00:00
|
|
|
Ok(updates) => {
|
|
|
|
collab_raw_data = updates;
|
|
|
|
},
|
2023-07-14 05:37:13 +00:00
|
|
|
Err(err) => {
|
|
|
|
return Err(FlowyError::record_not_found().context(format!(
|
|
|
|
"get workspace database :{} failed: {}",
|
2023-08-17 15:46:39 +00:00
|
|
|
database_storage_id, err,
|
2023-07-14 05:37:13 +00:00
|
|
|
)));
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Construct the workspace database.
|
2023-08-17 15:46:39 +00:00
|
|
|
tracing::trace!("open workspace database: {}", &database_storage_id);
|
2023-07-14 05:37:13 +00:00
|
|
|
let collab = collab_builder.build_collab_with_config(
|
|
|
|
uid,
|
2023-08-17 15:46:39 +00:00
|
|
|
&database_storage_id,
|
2023-07-29 01:46:24 +00:00
|
|
|
CollabType::WorkspaceDatabase,
|
2023-07-14 05:37:13 +00:00
|
|
|
collab_db.clone(),
|
|
|
|
collab_raw_data,
|
|
|
|
&config,
|
|
|
|
);
|
|
|
|
let workspace_database =
|
|
|
|
WorkspaceDatabase::open(uid, collab, collab_db, config, collab_builder);
|
|
|
|
subscribe_block_event(&workspace_database);
|
|
|
|
*self.workspace_database.write().await = Some(Arc::new(workspace_database));
|
2023-07-29 01:46:24 +00:00
|
|
|
|
|
|
|
// Remove all existing editors
|
|
|
|
self.editors.write().await.clear();
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-07-29 01:46:24 +00:00
|
|
|
pub async fn initialize_with_new_user(
|
|
|
|
&self,
|
|
|
|
user_id: i64,
|
|
|
|
workspace_id: String,
|
|
|
|
database_storage_id: String,
|
|
|
|
) -> FlowyResult<()> {
|
|
|
|
self
|
|
|
|
.initialize(user_id, workspace_id, database_storage_id)
|
|
|
|
.await?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_all_databases_description(&self) -> RepeatedDatabaseDescriptionPB {
|
2023-07-14 05:37:13 +00:00
|
|
|
let mut items = vec![];
|
|
|
|
if let Ok(wdb) = self.get_workspace_database().await {
|
|
|
|
items = wdb
|
2023-04-28 06:08:53 +00:00
|
|
|
.get_all_databases()
|
|
|
|
.into_iter()
|
|
|
|
.map(DatabaseDescriptionPB::from)
|
2023-07-14 05:37:13 +00:00
|
|
|
.collect();
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
2023-07-14 05:37:13 +00:00
|
|
|
RepeatedDatabaseDescriptionPB { items }
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-05-25 15:22:23 +00:00
|
|
|
pub async fn get_database_with_view_id(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
2023-06-02 04:04:14 +00:00
|
|
|
let database_id = self.get_database_id_with_view_id(view_id).await?;
|
|
|
|
self.get_database(&database_id).await
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_database_id_with_view_id(&self, view_id: &str) -> FlowyResult<String> {
|
2023-07-14 05:37:13 +00:00
|
|
|
let wdb = self.get_workspace_database().await?;
|
|
|
|
wdb.get_database_id_with_view_id(view_id).ok_or_else(|| {
|
|
|
|
FlowyError::record_not_found()
|
|
|
|
.context(format!("The database for view id: {} not found", view_id))
|
|
|
|
})
|
2023-05-25 15:22:23 +00:00
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-05-25 15:22:23 +00:00
|
|
|
pub async fn get_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
|
|
|
if let Some(editor) = self.editors.read().await.get(database_id) {
|
2023-04-28 06:08:53 +00:00
|
|
|
return Ok(editor.clone());
|
|
|
|
}
|
2023-07-05 12:57:09 +00:00
|
|
|
self.open_database(database_id).await
|
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
pub async fn open_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
|
2023-06-06 09:19:53 +00:00
|
|
|
tracing::trace!("create new editor for database {}", database_id);
|
2023-04-28 06:08:53 +00:00
|
|
|
let mut editors = self.editors.write().await;
|
2023-07-14 05:37:13 +00:00
|
|
|
|
|
|
|
let wdb = self.get_workspace_database().await?;
|
|
|
|
let database = wdb
|
|
|
|
.get_database(database_id)
|
|
|
|
.await
|
|
|
|
.ok_or_else(FlowyError::record_not_found)?;
|
2023-04-28 06:08:53 +00:00
|
|
|
|
|
|
|
let editor = Arc::new(DatabaseEditor::new(database, self.task_scheduler.clone()).await?);
|
|
|
|
editors.insert(database_id.to_string(), editor.clone());
|
|
|
|
Ok(editor)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[tracing::instrument(level = "debug", skip_all)]
|
|
|
|
pub async fn close_database_view<T: AsRef<str>>(&self, view_id: T) -> FlowyResult<()> {
|
2023-07-05 12:57:09 +00:00
|
|
|
// TODO(natan): defer closing the database if the sync is not finished
|
2023-04-28 06:08:53 +00:00
|
|
|
let view_id = view_id.as_ref();
|
2023-07-14 05:37:13 +00:00
|
|
|
let wdb = self.get_workspace_database().await?;
|
|
|
|
let database_id = wdb.get_database_id_with_view_id(view_id);
|
|
|
|
if database_id.is_some() {
|
|
|
|
wdb.close_database(database_id.as_ref().unwrap());
|
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
|
|
|
if let Some(database_id) = database_id {
|
|
|
|
let mut editors = self.editors.write().await;
|
|
|
|
if let Some(editor) = editors.get(&database_id) {
|
|
|
|
if editor.close_view_editor(view_id).await {
|
|
|
|
editor.close().await;
|
|
|
|
editors.remove(&database_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-06-05 01:42:11 +00:00
|
|
|
pub async fn delete_database_view(&self, view_id: &str) -> FlowyResult<()> {
|
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
let _ = database.delete_database_view(view_id).await?;
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-04-28 06:08:53 +00:00
|
|
|
pub async fn duplicate_database(&self, view_id: &str) -> FlowyResult<Vec<u8>> {
|
2023-07-14 05:37:13 +00:00
|
|
|
let wdb = self.get_workspace_database().await?;
|
|
|
|
let data = wdb.get_database_duplicated_data(view_id).await?;
|
|
|
|
let json_bytes = data.to_json_bytes()?;
|
|
|
|
Ok(json_bytes)
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
/// Create a new database with the given data that can be deserialized to [DatabaseData].
|
2023-04-28 06:08:53 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip_all, err)]
|
|
|
|
pub async fn create_database_with_database_data(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
|
|
|
data: Vec<u8>,
|
|
|
|
) -> FlowyResult<()> {
|
|
|
|
let mut database_data = DatabaseData::from_json_bytes(data)?;
|
|
|
|
database_data.view.id = view_id.to_string();
|
2023-07-14 05:37:13 +00:00
|
|
|
|
|
|
|
let wdb = self.get_workspace_database().await?;
|
|
|
|
let _ = wdb.create_database_with_data(database_data)?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn create_database_with_params(&self, params: CreateDatabaseParams) -> FlowyResult<()> {
|
2023-07-14 05:37:13 +00:00
|
|
|
let wdb = self.get_workspace_database().await?;
|
|
|
|
let _ = wdb.create_database(params)?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-06-20 15:48:34 +00:00
|
|
|
/// A linked view is a view that is linked to existing database.
|
2023-06-06 09:19:53 +00:00
|
|
|
#[tracing::instrument(level = "trace", skip(self), err)]
|
2023-04-28 06:08:53 +00:00
|
|
|
pub async fn create_linked_view(
|
|
|
|
&self,
|
|
|
|
name: String,
|
2023-06-20 15:48:34 +00:00
|
|
|
layout: DatabaseLayout,
|
2023-04-28 06:08:53 +00:00
|
|
|
database_id: String,
|
2023-06-01 12:23:27 +00:00
|
|
|
database_view_id: String,
|
2023-04-28 06:08:53 +00:00
|
|
|
) -> FlowyResult<()> {
|
2023-07-14 05:37:13 +00:00
|
|
|
let wdb = self.get_workspace_database().await?;
|
|
|
|
let mut params = CreateViewParams::new(database_id.clone(), database_view_id, name, layout);
|
|
|
|
if let Some(database) = wdb.get_database(&database_id).await {
|
2023-07-31 02:12:26 +00:00
|
|
|
let (field, layout_setting) = DatabaseLayoutDepsResolver::new(database, layout)
|
|
|
|
.resolve_deps_when_create_database_linked_view();
|
|
|
|
if let Some(field) = field {
|
|
|
|
params = params.with_deps_fields(vec![field]);
|
|
|
|
}
|
|
|
|
if let Some(layout_setting) = layout_setting {
|
|
|
|
params = params.with_layout_setting(layout_setting);
|
2023-07-14 05:37:13 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
wdb.create_database_linked_view(params).await?;
|
2023-04-28 06:08:53 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-31 06:08:54 +00:00
|
|
|
pub async fn import_csv(
|
|
|
|
&self,
|
|
|
|
view_id: String,
|
|
|
|
content: String,
|
|
|
|
format: CSVFormat,
|
|
|
|
) -> FlowyResult<ImportResult> {
|
|
|
|
let params = tokio::task::spawn_blocking(move || {
|
|
|
|
CSVImporter.import_csv_from_string(view_id, content, format)
|
|
|
|
})
|
|
|
|
.await
|
|
|
|
.map_err(internal_error)??;
|
2023-05-27 13:29:18 +00:00
|
|
|
let result = ImportResult {
|
|
|
|
database_id: params.database_id.clone(),
|
|
|
|
view_id: params.view_id.clone(),
|
|
|
|
};
|
2023-05-25 15:22:23 +00:00
|
|
|
self.create_database_with_params(params).await?;
|
2023-05-27 13:29:18 +00:00
|
|
|
Ok(result)
|
2023-05-25 15:22:23 +00:00
|
|
|
}
|
|
|
|
|
2023-05-31 06:08:54 +00:00
|
|
|
// will implement soon
|
|
|
|
pub async fn import_csv_from_file(
|
|
|
|
&self,
|
|
|
|
_file_path: String,
|
|
|
|
_format: CSVFormat,
|
|
|
|
) -> FlowyResult<()> {
|
2023-05-25 15:22:23 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2023-05-27 13:29:18 +00:00
|
|
|
pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult<String> {
|
2023-05-25 15:22:23 +00:00
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
database.export_csv(style).await
|
|
|
|
}
|
|
|
|
|
2023-06-01 12:23:27 +00:00
|
|
|
pub async fn update_database_layout(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
|
|
|
layout: DatabaseLayoutPB,
|
|
|
|
) -> FlowyResult<()> {
|
|
|
|
let database = self.get_database_with_view_id(view_id).await?;
|
|
|
|
database.update_view_layout(view_id, layout.into()).await
|
|
|
|
}
|
|
|
|
|
2023-07-05 12:57:09 +00:00
|
|
|
pub async fn get_database_snapshots(
|
|
|
|
&self,
|
|
|
|
view_id: &str,
|
2023-08-17 15:46:39 +00:00
|
|
|
limit: usize,
|
2023-07-05 12:57:09 +00:00
|
|
|
) -> FlowyResult<Vec<DatabaseSnapshotPB>> {
|
|
|
|
let database_id = self.get_database_id_with_view_id(view_id).await?;
|
2023-08-17 15:46:39 +00:00
|
|
|
let snapshots = self
|
2023-07-05 12:57:09 +00:00
|
|
|
.cloud_service
|
2023-08-17 15:46:39 +00:00
|
|
|
.get_collab_snapshots(&database_id, limit)
|
2023-07-05 12:57:09 +00:00
|
|
|
.await?
|
2023-08-17 15:46:39 +00:00
|
|
|
.into_iter()
|
2023-07-05 12:57:09 +00:00
|
|
|
.map(|snapshot| DatabaseSnapshotPB {
|
|
|
|
snapshot_id: snapshot.snapshot_id,
|
|
|
|
snapshot_desc: "".to_string(),
|
|
|
|
created_at: snapshot.created_at,
|
|
|
|
data: snapshot.data,
|
|
|
|
})
|
2023-08-17 15:46:39 +00:00
|
|
|
.collect::<Vec<_>>();
|
2023-07-05 12:57:09 +00:00
|
|
|
|
|
|
|
Ok(snapshots)
|
|
|
|
}
|
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
async fn get_workspace_database(&self) -> FlowyResult<Arc<WorkspaceDatabase>> {
|
|
|
|
let database = self.workspace_database.read().await;
|
2023-04-28 06:08:53 +00:00
|
|
|
match &*database {
|
2023-07-14 05:37:13 +00:00
|
|
|
None => Err(FlowyError::internal().context("Workspace database not initialized")),
|
|
|
|
Some(user_database) => Ok(user_database.clone()),
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
}
|
2023-07-05 12:57:09 +00:00
|
|
|
|
|
|
|
/// Only expose this method for testing
|
|
|
|
#[cfg(debug_assertions)]
|
|
|
|
pub fn get_cloud_service(&self) -> &Arc<dyn DatabaseCloudService> {
|
|
|
|
&self.cloud_service
|
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
/// Send notification to all clients that are listening to the given object.
|
|
|
|
fn subscribe_block_event(workspace_database: &WorkspaceDatabase) {
|
|
|
|
let mut block_event_rx = workspace_database.subscribe_block_event();
|
|
|
|
tokio::spawn(async move {
|
|
|
|
while let Ok(event) = block_event_rx.recv().await {
|
|
|
|
match event {
|
|
|
|
BlockEvent::DidFetchRow(row_details) => {
|
|
|
|
for row_detail in row_details {
|
|
|
|
tracing::trace!("Did fetch row: {:?}", row_detail.row.id);
|
|
|
|
let row_id = row_detail.row.id.clone();
|
|
|
|
let pb = DidFetchRowPB::from(row_detail);
|
|
|
|
send_notification(&row_id, DatabaseNotification::DidFetchRow)
|
|
|
|
.payload(pb)
|
|
|
|
.send();
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
2023-04-28 06:08:53 +00:00
|
|
|
}
|
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
struct UserDatabaseCollabServiceImpl {
|
|
|
|
collab_builder: Arc<AppFlowyCollabBuilder>,
|
|
|
|
cloud_service: Arc<dyn DatabaseCloudService>,
|
|
|
|
}
|
2023-04-28 06:08:53 +00:00
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
impl DatabaseCollabService for UserDatabaseCollabServiceImpl {
|
|
|
|
fn get_collab_update(
|
|
|
|
&self,
|
|
|
|
object_id: &str,
|
2023-07-29 01:46:24 +00:00
|
|
|
object_ty: CollabType,
|
2023-07-14 05:37:13 +00:00
|
|
|
) -> CollabFuture<Result<CollabObjectUpdate, DatabaseError>> {
|
|
|
|
let object_id = object_id.to_string();
|
|
|
|
let weak_cloud_service = Arc::downgrade(&self.cloud_service);
|
|
|
|
Box::pin(async move {
|
|
|
|
match weak_cloud_service.upgrade() {
|
|
|
|
None => {
|
|
|
|
tracing::warn!("Cloud service is dropped");
|
|
|
|
Ok(vec![])
|
|
|
|
},
|
|
|
|
Some(cloud_service) => {
|
|
|
|
let updates = cloud_service
|
2023-07-29 01:46:24 +00:00
|
|
|
.get_collab_update(&object_id, object_ty)
|
|
|
|
.await?;
|
2023-07-14 05:37:13 +00:00
|
|
|
Ok(updates)
|
|
|
|
},
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2023-05-15 14:16:05 +00:00
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
fn batch_get_collab_update(
|
|
|
|
&self,
|
|
|
|
object_ids: Vec<String>,
|
2023-07-29 01:46:24 +00:00
|
|
|
object_ty: CollabType,
|
2023-07-14 05:37:13 +00:00
|
|
|
) -> CollabFuture<Result<CollabObjectUpdateByOid, DatabaseError>> {
|
|
|
|
let weak_cloud_service = Arc::downgrade(&self.cloud_service);
|
|
|
|
Box::pin(async move {
|
|
|
|
match weak_cloud_service.upgrade() {
|
|
|
|
None => {
|
|
|
|
tracing::warn!("Cloud service is dropped");
|
|
|
|
Ok(CollabObjectUpdateByOid::default())
|
|
|
|
},
|
|
|
|
Some(cloud_service) => {
|
|
|
|
let updates = cloud_service
|
2023-07-29 01:46:24 +00:00
|
|
|
.batch_get_collab_updates(object_ids, object_ty)
|
|
|
|
.await?;
|
2023-07-14 05:37:13 +00:00
|
|
|
Ok(updates)
|
|
|
|
},
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2023-05-15 14:16:05 +00:00
|
|
|
|
2023-07-14 05:37:13 +00:00
|
|
|
fn build_collab_with_config(
|
2023-05-15 14:16:05 +00:00
|
|
|
&self,
|
|
|
|
uid: i64,
|
|
|
|
object_id: &str,
|
2023-07-29 01:46:24 +00:00
|
|
|
object_type: CollabType,
|
|
|
|
collab_db: Weak<RocksCollabDB>,
|
2023-07-14 05:37:13 +00:00
|
|
|
collab_raw_data: CollabRawData,
|
2023-05-23 15:55:21 +00:00
|
|
|
config: &CollabPersistenceConfig,
|
2023-05-15 14:16:05 +00:00
|
|
|
) -> Arc<MutexCollab> {
|
2023-05-31 09:42:14 +00:00
|
|
|
self
|
2023-07-14 05:37:13 +00:00
|
|
|
.collab_builder
|
|
|
|
.build_with_config(
|
|
|
|
uid,
|
|
|
|
object_id,
|
2023-07-29 01:46:24 +00:00
|
|
|
object_type,
|
2023-07-14 05:37:13 +00:00
|
|
|
collab_db,
|
|
|
|
collab_raw_data,
|
|
|
|
config,
|
|
|
|
)
|
|
|
|
.unwrap()
|
2023-05-15 14:16:05 +00:00
|
|
|
}
|
|
|
|
}
|