chore: cell data operation (#1656)

* chore: get all cells for specific field

* chore: auto format clippy wanrings

* chore: get cells for specific field type
This commit is contained in:
Nathan.fooo 2023-01-05 21:27:21 +08:00 committed by GitHub
parent 7949d3fe4a
commit d4946f1194
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
94 changed files with 673 additions and 609 deletions

View File

@ -21,13 +21,13 @@ pub struct SubscribeObject {
impl std::fmt::Display for SubscribeObject {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let _ = f.write_str(&format!("{} changed: ", &self.source))?;
f.write_str(&format!("{} changed: ", &self.source))?;
if let Some(payload) = &self.payload {
let _ = f.write_str(&format!("send {} payload", payload.len()))?;
f.write_str(&format!("send {} payload", payload.len()))?;
}
if let Some(payload) = &self.error {
let _ = f.write_str(&format!("receive {} error", payload.len()))?;
f.write_str(&format!("receive {} error", payload.len()))?;
}
Ok(())

View File

@ -35,7 +35,7 @@ pub fn init(storage_path: &str) -> Result<Database, io::Error> {
let pool_config = PoolConfig::default();
let database = Database::new(storage_path, DB_NAME, pool_config).map_err(as_io_error)?;
let conn = database.get_connection().map_err(as_io_error)?;
let _ = embedded_migrations::run(&*conn).map_err(as_io_error)?;
embedded_migrations::run(&*conn).map_err(as_io_error)?;
Ok(database)
}

View File

@ -63,12 +63,9 @@ pub fn category_from_str(type_str: String) -> TypeCategory {
let cache: ProtoCache = serde_json::from_str(&s).unwrap();
CACHE_INFO
.entry(TypeCategory::Protobuf)
.or_insert(vec![])
.or_default()
.extend(cache.structs);
CACHE_INFO
.entry(TypeCategory::Enum)
.or_insert(vec![])
.extend(cache.enums);
CACHE_INFO.entry(TypeCategory::Enum).or_default().extend(cache.enums);
}
}
}

View File

@ -82,7 +82,7 @@ impl RevisionObjectDeserializer for DocumentRevisionSerde {
fn deserialize_revisions(_object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
let mut tree = NodeTree::new(make_tree_context());
let transaction = make_transaction_from_revisions(&revisions)?;
let _ = tree.apply_transaction(transaction)?;
tree.apply_transaction(transaction)?;
let document = Document::new(tree);
Result::<Document, FlowyError>::Ok(document)
}
@ -106,7 +106,7 @@ impl RevisionMergeable for DocumentRevisionMergeable {
pub fn make_transaction_from_revisions(revisions: &[Revision]) -> FlowyResult<Transaction> {
let mut transaction = Transaction::new();
for revision in revisions {
let _ = transaction.compose(Transaction::from_bytes(&revision.bytes)?)?;
transaction.compose(Transaction::from_bytes(&revision.bytes)?)?;
}
Ok(transaction)
}

View File

@ -17,8 +17,8 @@ impl Serialize for Document {
S: Serializer,
{
let mut map = serializer.serialize_map(Some(1))?;
let _ = map.serialize_key("document")?;
let _ = map.serialize_value(&DocumentContentSerializer(self))?;
map.serialize_key("document")?;
map.serialize_value(&DocumentContentSerializer(self))?;
map.end()
}
}
@ -312,7 +312,7 @@ impl<'a> Serialize for DocumentContentSerializer<'a> {
let mut seq = serializer.serialize_seq(Some(children.len()))?;
for child in children {
if let Some(node_data) = get_document_node_data(child) {
let _ = seq.serialize_element(&node_data)?;
seq.serialize_element(&node_data)?;
}
}
seq.end()

View File

@ -50,7 +50,7 @@ impl AppFlowyDocumentEditor {
.command_sender
.send(Command::ComposeTransaction { transaction, ret })
.await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
}
@ -115,7 +115,7 @@ impl DocumentEditor for Arc<AppFlowyDocumentEditor> {
let this = self.clone();
FutureResult::new(async move {
let transaction = DocumentTransaction::from_bytes(data)?;
let _ = this.apply_transaction(transaction.into()).await?;
this.apply_transaction(transaction.into()).await?;
Ok(())
})
}

View File

@ -2,7 +2,7 @@ use crate::errors::ErrorCode;
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
use std::convert::TryInto;
#[derive(PartialEq, Debug, ProtoBuf_Enum, Clone)]
#[derive(PartialEq, Eq, Debug, ProtoBuf_Enum, Clone)]
pub enum ExportType {
Text = 0,
Markdown = 1,
@ -79,7 +79,7 @@ pub struct ExportPayloadPB {
pub document_version: DocumentVersionPB,
}
#[derive(PartialEq, Debug, ProtoBuf_Enum, Clone)]
#[derive(PartialEq, Eq, Debug, ProtoBuf_Enum, Clone)]
pub enum DocumentVersionPB {
/// this version's content of the document is build from `Delta`. It uses
/// `DeltaDocumentEditor`.

View File

@ -26,7 +26,7 @@ pub(crate) async fn apply_edit_handler(
manager: AFPluginState<Arc<DocumentManager>>,
) -> Result<(), FlowyError> {
let params: EditParams = data.into_inner().try_into()?;
let _ = manager.apply_edit(params).await?;
manager.apply_edit(params).await?;
Ok(())
}

View File

@ -109,7 +109,7 @@ impl DocumentManager {
/// Called immediately after the application launched with the user sign in/sign up.
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn initialize(&self, user_id: &str) -> FlowyResult<()> {
let _ = self.persistence.initialize(user_id)?;
self.persistence.initialize(user_id)?;
listen_ws_state_changed(self.rev_web_socket.clone(), self.editor_map.clone());
Ok(())
}
@ -138,7 +138,7 @@ impl DocumentManager {
pub async fn apply_edit(&self, params: EditParams) -> FlowyResult<()> {
let editor = self.get_document_editor(&params.doc_id).await?;
let _ = editor.compose_local_operations(Bytes::from(params.operations)).await?;
editor.compose_local_operations(Bytes::from(params.operations)).await?;
Ok(())
}
@ -147,7 +147,7 @@ impl DocumentManager {
let db_pool = self.persistence.database.db_pool()?;
// Maybe we could save the document to disk without creating the RevisionManager
let rev_manager = self.make_rev_manager(&doc_id, db_pool)?;
let _ = rev_manager.reset_object(revisions).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}

View File

@ -79,7 +79,7 @@ impl DeltaDocumentEditor {
ret,
};
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
}
@ -87,7 +87,7 @@ impl DeltaDocumentEditor {
let (ret, rx) = oneshot::channel::<CollaborateResult<()>>();
let msg = EditorCommand::Delete { interval, ret };
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
}
@ -99,7 +99,7 @@ impl DeltaDocumentEditor {
ret,
};
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
}
@ -111,7 +111,7 @@ impl DeltaDocumentEditor {
ret,
};
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
}
@ -133,7 +133,7 @@ impl DeltaDocumentEditor {
let (ret, rx) = oneshot::channel();
let msg = EditorCommand::Undo { ret };
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
}
@ -141,7 +141,7 @@ impl DeltaDocumentEditor {
let (ret, rx) = oneshot::channel();
let msg = EditorCommand::Redo { ret };
let _ = self.edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
}
}
@ -193,7 +193,7 @@ impl DocumentEditor for Arc<DeltaDocumentEditor> {
let msg = EditorCommand::ComposeLocalOperations { operations, ret };
let _ = edit_cmd_tx.send(msg).await;
let _ = rx.await.map_err(internal_error)??;
rx.await.map_err(internal_error)??;
Ok(())
})
}

View File

@ -69,7 +69,7 @@ impl EditDocumentQueue {
match command {
EditorCommand::ComposeLocalOperations { operations, ret } => {
let mut document = self.document.write().await;
let _ = document.compose_operations(operations.clone())?;
document.compose_operations(operations.clone())?;
let md5 = document.document_md5();
drop(document);
let _ = self.save_local_operations(operations, md5).await?;
@ -77,14 +77,14 @@ impl EditDocumentQueue {
}
EditorCommand::ComposeRemoteOperation { client_operations, ret } => {
let mut document = self.document.write().await;
let _ = document.compose_operations(client_operations.clone())?;
document.compose_operations(client_operations.clone())?;
let md5 = document.document_md5();
drop(document);
let _ = ret.send(Ok(md5.into()));
}
EditorCommand::ResetOperations { operations, ret } => {
let mut document = self.document.write().await;
let _ = document.set_operations(operations);
document.set_operations(operations);
let md5 = document.document_md5();
drop(document);
let _ = ret.send(Ok(md5.into()));

View File

@ -172,8 +172,7 @@ impl ConflictResolver<DeltaDocumentResolveOperations> for DocumentConflictResolv
let operations = operations.into_inner();
Box::pin(async move {
let (ret, rx) = oneshot::channel();
let _ = tx
.send(EditorCommand::ResetOperations { operations, ret })
tx.send(EditorCommand::ResetOperations { operations, ret })
.await
.map_err(internal_error)?;
let md5 = rx

View File

@ -25,7 +25,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let _ = DeltaRevisionSql::create(revision_records, &*conn)?;
DeltaRevisionSql::create(revision_records, &*conn)?;
Ok(())
}
@ -39,7 +39,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = DeltaRevisionSql::read(&self.user_id, object_id, rev_ids, &*conn)?;
let records = DeltaRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
@ -55,7 +55,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = conn.immediate_transaction::<_, FlowyError, _>(|| {
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = DeltaRevisionSql::update(changeset, conn)?;
}
@ -66,7 +66,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = DeltaRevisionSql::delete(object_id, rev_ids, conn)?;
DeltaRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
@ -78,8 +78,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDeltaDocumentRevisionPersi
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
let _ = DeltaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
let _ = DeltaRevisionSql::create(inserted_records, &*conn)?;
DeltaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
DeltaRevisionSql::create(inserted_records, &*conn)?;
Ok(())
})
}

View File

@ -24,7 +24,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let _ = DocumentRevisionSql::create(revision_records, &*conn)?;
DocumentRevisionSql::create(revision_records, &*conn)?;
Ok(())
}
@ -38,7 +38,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = DocumentRevisionSql::read(&self.user_id, object_id, rev_ids, &*conn)?;
let records = DocumentRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
@ -54,7 +54,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = conn.immediate_transaction::<_, FlowyError, _>(|| {
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = DocumentRevisionSql::update(changeset, conn)?;
}
@ -65,7 +65,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = DocumentRevisionSql::delete(object_id, rev_ids, conn)?;
DocumentRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
@ -77,8 +77,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteDocumentRevisionPersistenc
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
let _ = DocumentRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
let _ = DocumentRevisionSql::create(inserted_records, &*conn)?;
DocumentRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
DocumentRevisionSql::create(inserted_records, &*conn)?;
Ok(())
})
}

View File

@ -45,7 +45,7 @@ impl std::convert::From<TrashPB> for TrashRevision {
}
}
}
#[derive(PartialEq, Debug, Default, ProtoBuf, Clone)]
#[derive(PartialEq, Eq, Debug, Default, ProtoBuf, Clone)]
pub struct RepeatedTrashPB {
#[pb(index = 1)]
pub items: Vec<TrashPB>,
@ -158,7 +158,7 @@ impl std::convert::From<Vec<TrashRevision>> for RepeatedTrashIdPB {
}
}
#[derive(PartialEq, ProtoBuf, Default, Debug, Clone)]
#[derive(PartialEq, Eq, ProtoBuf, Default, Debug, Clone)]
pub struct TrashIdPB {
#[pb(index = 1)]
pub id: String,

View File

@ -41,7 +41,7 @@ impl std::convert::From<WorkspaceRevision> for WorkspacePB {
}
}
}
#[derive(PartialEq, Debug, Default, ProtoBuf)]
#[derive(PartialEq, Eq, Debug, Default, ProtoBuf)]
pub struct RepeatedWorkspacePB {
#[pb(index = 1)]
pub items: Vec<WorkspacePB>,

View File

@ -166,7 +166,7 @@ impl FolderManager {
}
tracing::debug!("Initialize folder editor");
let folder_id = FolderId::new(user_id);
let _ = self.persistence.initialize(user_id, &folder_id).await?;
self.persistence.initialize(user_id, &folder_id).await?;
let pool = self.persistence.db_pool()?;
let object_id = folder_id.as_ref();
@ -185,8 +185,8 @@ impl FolderManager {
let folder_editor = FolderEditor::new(user_id, &folder_id, token, rev_manager, self.web_socket.clone()).await?;
*self.folder_editor.write().await = Some(Arc::new(folder_editor));
let _ = self.app_controller.initialize()?;
let _ = self.view_controller.initialize()?;
self.app_controller.initialize()?;
self.view_controller.initialize()?;
write_guard.insert(user_id.to_owned(), true);
Ok(())
}
@ -235,7 +235,7 @@ impl DefaultFolderBuilder {
if index == 0 {
let _ = view_controller.set_latest_view(&view.id);
let layout_type = ViewLayoutTypePB::from(view.layout.clone());
let _ = view_controller
view_controller
.create_view(&view.id, view_data_type, layout_type, view_data)
.await?;
}
@ -243,7 +243,7 @@ impl DefaultFolderBuilder {
}
let folder = FolderPad::new(vec![workspace_rev.clone()], vec![])?;
let folder_id = FolderId::new(user_id);
let _ = persistence.save_folder(user_id, &folder_id, folder).await?;
persistence.save_folder(user_id, &folder_id, folder).await?;
let repeated_workspace = RepeatedWorkspacePB {
items: vec![workspace_rev.into()],
};

View File

@ -50,8 +50,7 @@ impl AppController {
}
pub(crate) async fn create_app_on_local(&self, app: AppRevision) -> Result<AppPB, FlowyError> {
let _ = self
.persistence
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.create_app(app.clone())?;
let _ = notify_apps_changed(&app.workspace_id, self.trash_controller.clone(), &transaction)?;
@ -75,7 +74,7 @@ impl AppController {
Ok(app)
})
.await?;
let _ = self.read_app_on_server(params)?;
self.read_app_on_server(params)?;
Ok(app)
}
@ -86,7 +85,7 @@ impl AppController {
let app: AppPB = self
.persistence
.begin_transaction(|transaction| {
let _ = transaction.update_app(changeset)?;
transaction.update_app(changeset)?;
let app = transaction.read_app(&app_id)?;
Ok(app)
})
@ -95,13 +94,12 @@ impl AppController {
send_dart_notification(&app_id, FolderNotification::AppUpdated)
.payload(app)
.send();
let _ = self.update_app_on_server(params)?;
self.update_app_on_server(params)?;
Ok(())
}
pub(crate) async fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
let _ = self
.persistence
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.move_app(app_id, from, to)?;
let app = transaction.read_app(app_id)?;
@ -211,7 +209,7 @@ async fn handle_trash_event(
.begin_transaction(|transaction| {
for identifier in identifiers.items {
let app = transaction.read_app(&identifier.id)?;
let _ = notify_apps_changed(&app.workspace_id, trash_controller.clone(), &transaction)?;
notify_apps_changed(&app.workspace_id, trash_controller.clone(), &transaction)?;
}
Ok(())
})
@ -229,7 +227,7 @@ async fn handle_trash_event(
}
for notify_id in notify_ids {
let _ = notify_apps_changed(&notify_id, trash_controller.clone(), &transaction)?;
notify_apps_changed(&notify_id, trash_controller.clone(), &transaction)?;
}
Ok(())
})

View File

@ -30,7 +30,7 @@ pub(crate) async fn delete_app_handler(
.map(|app_rev| app_rev.into())
.collect::<Vec<TrashRevision>>();
let _ = trash_controller.add(trash).await?;
trash_controller.add(trash).await?;
Ok(())
}
@ -40,7 +40,7 @@ pub(crate) async fn update_app_handler(
controller: AFPluginState<Arc<AppController>>,
) -> Result<(), FlowyError> {
let params: UpdateAppParams = data.into_inner().try_into()?;
let _ = controller.update_app(params).await?;
controller.update_app(params).await?;
Ok(())
}

View File

@ -90,7 +90,7 @@ impl FolderMigration {
if KV::get_bool(&key) {
return Ok(());
}
let _ = self.migration_folder_rev_struct(folder_id).await?;
self.migration_folder_rev_struct(folder_id).await?;
KV::set_bool(&key, true);
// tracing::info!("Run folder v2 migration");
Ok(())
@ -101,7 +101,7 @@ impl FolderMigration {
if KV::get_bool(&key) {
return Ok(());
}
let _ = self.migration_folder_rev_struct(folder_id).await?;
self.migration_folder_rev_struct(folder_id).await?;
KV::set_bool(&key, true);
tracing::trace!("Run folder v3 migration");
Ok(())

View File

@ -103,8 +103,8 @@ impl FolderPersistence {
self.save_folder(user_id, folder_id, migrated_folder).await?;
}
let _ = migrations.run_v2_migration(folder_id).await?;
let _ = migrations.run_v3_migration(folder_id).await?;
migrations.run_v2_migration(folder_id).await?;
migrations.run_v3_migration(folder_id).await?;
Ok(())
}

View File

@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let _ = FolderRevisionSql::create(revision_records, &*conn)?;
FolderRevisionSql::create(revision_records, &*conn)?;
Ok(())
}
@ -36,7 +36,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = FolderRevisionSql::read(&self.user_id, object_id, rev_ids, &*conn)?;
let records = FolderRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
@ -52,7 +52,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = conn.immediate_transaction::<_, FlowyError, _>(|| {
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = FolderRevisionSql::update(changeset, conn)?;
}
@ -63,7 +63,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = FolderRevisionSql::delete(object_id, rev_ids, conn)?;
FolderRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteFolderRevisionPersistence
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
let _ = FolderRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
let _ = FolderRevisionSql::create(inserted_records, &*conn)?;
FolderRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
FolderRevisionSql::create(inserted_records, &*conn)?;
Ok(())
})
}

View File

@ -15,47 +15,47 @@ pub struct V1Transaction<'a>(pub &'a DBConnection);
impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
fn create_workspace(&self, user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
let _ = WorkspaceTableSql::create_workspace(user_id, workspace_rev, &*self.0)?;
WorkspaceTableSql::create_workspace(user_id, workspace_rev, &*self.0)?;
Ok(())
}
fn read_workspaces(&self, user_id: &str, workspace_id: Option<String>) -> FlowyResult<Vec<WorkspaceRevision>> {
let tables = WorkspaceTableSql::read_workspaces(user_id, workspace_id, &*self.0)?;
let tables = WorkspaceTableSql::read_workspaces(user_id, workspace_id, self.0)?;
let workspaces = tables.into_iter().map(WorkspaceRevision::from).collect::<Vec<_>>();
Ok(workspaces)
}
fn update_workspace(&self, changeset: WorkspaceChangeset) -> FlowyResult<()> {
WorkspaceTableSql::update_workspace(changeset, &*self.0)
WorkspaceTableSql::update_workspace(changeset, self.0)
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
WorkspaceTableSql::delete_workspace(workspace_id, &*self.0)
WorkspaceTableSql::delete_workspace(workspace_id, self.0)
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
let _ = AppTableSql::create_app(app_rev, &*self.0)?;
AppTableSql::create_app(app_rev, &*self.0)?;
Ok(())
}
fn update_app(&self, changeset: AppChangeset) -> FlowyResult<()> {
let _ = AppTableSql::update_app(changeset, &*self.0)?;
AppTableSql::update_app(changeset, &*self.0)?;
Ok(())
}
fn read_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app_revision: AppRevision = AppTableSql::read_app(app_id, &*self.0)?.into();
let app_revision: AppRevision = AppTableSql::read_app(app_id, self.0)?.into();
Ok(app_revision)
}
fn read_workspace_apps(&self, workspace_id: &str) -> FlowyResult<Vec<AppRevision>> {
let tables = AppTableSql::read_workspace_apps(workspace_id, &*self.0)?;
let tables = AppTableSql::read_workspace_apps(workspace_id, self.0)?;
let apps = tables.into_iter().map(AppRevision::from).collect::<Vec<_>>();
Ok(apps)
}
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app_revision: AppRevision = AppTableSql::delete_app(app_id, &*self.0)?.into();
let app_revision: AppRevision = AppTableSql::delete_app(app_id, self.0)?.into();
Ok(app_revision)
}
@ -64,29 +64,29 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
let _ = ViewTableSql::create_view(view_rev, &*self.0)?;
ViewTableSql::create_view(view_rev, &*self.0)?;
Ok(())
}
fn read_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, &*self.0)?.into();
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
Ok(view_revision)
}
fn read_views(&self, belong_to_id: &str) -> FlowyResult<Vec<ViewRevision>> {
let tables = ViewTableSql::read_views(belong_to_id, &*self.0)?;
let tables = ViewTableSql::read_views(belong_to_id, self.0)?;
let views = tables.into_iter().map(ViewRevision::from).collect::<Vec<_>>();
Ok(views)
}
fn update_view(&self, changeset: ViewChangeset) -> FlowyResult<()> {
let _ = ViewTableSql::update_view(changeset, &*self.0)?;
ViewTableSql::update_view(changeset, &*self.0)?;
Ok(())
}
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, &*self.0)?.into();
let _ = ViewTableSql::delete_view(view_id, &*self.0)?;
let view_revision: ViewRevision = ViewTableSql::read_view(view_id, self.0)?.into();
ViewTableSql::delete_view(view_id, &*self.0)?;
Ok(view_revision)
}
@ -95,15 +95,15 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
let _ = TrashTableSql::create_trash(trashes, &*self.0)?;
TrashTableSql::create_trash(trashes, &*self.0)?;
Ok(())
}
fn read_trash(&self, trash_id: Option<String>) -> FlowyResult<Vec<TrashRevision>> {
match trash_id {
None => TrashTableSql::read_all(&*self.0),
None => TrashTableSql::read_all(self.0),
Some(trash_id) => {
let trash_revision: TrashRevision = TrashTableSql::read(&trash_id, &*self.0)?.into();
let trash_revision: TrashRevision = TrashTableSql::read(&trash_id, self.0)?.into();
Ok(vec![trash_revision])
}
}
@ -111,10 +111,10 @@ impl<'a> FolderPersistenceTransaction for V1Transaction<'a> {
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
match trash_ids {
None => TrashTableSql::delete_all(&*self.0),
None => TrashTableSql::delete_all(self.0),
Some(trash_ids) => {
for trash_id in &trash_ids {
let _ = TrashTableSql::delete_trash(trash_id, &*self.0)?;
TrashTableSql::delete_trash(trash_id, &*self.0)?;
}
Ok(())
}

View File

@ -56,7 +56,7 @@ impl WorkspaceTableSql {
}
}
#[derive(PartialEq, Clone, Debug, Queryable, Identifiable, Insertable)]
#[derive(PartialEq, Eq, Clone, Debug, Queryable, Identifiable, Insertable)]
#[table_name = "workspace_table"]
pub struct WorkspaceTable {
pub id: String,

View File

@ -9,7 +9,7 @@ use std::sync::Arc;
impl FolderPersistenceTransaction for FolderEditor {
fn create_workspace(&self, _user_id: &str, workspace_rev: WorkspaceRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_workspace(workspace_rev)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
@ -25,21 +25,21 @@ impl FolderPersistenceTransaction for FolderEditor {
.write()
.update_workspace(&changeset.id, changeset.name, changeset.desc)?
{
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
fn delete_workspace(&self, workspace_id: &str) -> FlowyResult<()> {
if let Some(change) = self.folder.write().delete_workspace(workspace_id)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
fn create_app(&self, app_rev: AppRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_app(app_rev)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
@ -50,7 +50,7 @@ impl FolderPersistenceTransaction for FolderEditor {
.write()
.update_app(&changeset.id, changeset.name, changeset.desc)?
{
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
@ -73,21 +73,21 @@ impl FolderPersistenceTransaction for FolderEditor {
fn delete_app(&self, app_id: &str) -> FlowyResult<AppRevision> {
let app = self.folder.read().read_app(app_id)?;
if let Some(change) = self.folder.write().delete_app(app_id)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(app)
}
fn move_app(&self, app_id: &str, from: usize, to: usize) -> FlowyResult<()> {
if let Some(change) = self.folder.write().move_app(app_id, from, to)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
fn create_view(&self, view_rev: ViewRevision) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_view(view_rev)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
@ -108,7 +108,7 @@ impl FolderPersistenceTransaction for FolderEditor {
.write()
.update_view(&changeset.id, changeset.name, changeset.desc, changeset.modified_time)?
{
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
@ -116,21 +116,21 @@ impl FolderPersistenceTransaction for FolderEditor {
fn delete_view(&self, view_id: &str) -> FlowyResult<ViewRevision> {
let view = self.folder.read().read_view(view_id)?;
if let Some(change) = self.folder.write().delete_view(&view.app_id, view_id)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(view)
}
fn move_view(&self, view_id: &str, from: usize, to: usize) -> FlowyResult<()> {
if let Some(change) = self.folder.write().move_view(view_id, from, to)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
fn create_trash(&self, trashes: Vec<TrashRevision>) -> FlowyResult<()> {
if let Some(change) = self.folder.write().create_trash(trashes)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}
@ -142,7 +142,7 @@ impl FolderPersistenceTransaction for FolderEditor {
fn delete_trash(&self, trash_ids: Option<Vec<String>>) -> FlowyResult<()> {
if let Some(change) = self.folder.write().delete_trash(trash_ids)? {
let _ = self.apply_change(change)?;
self.apply_change(change)?;
}
Ok(())
}

View File

@ -39,7 +39,7 @@ impl TrashController {
.persistence
.begin_transaction(|transaction| {
let mut repeated_trash = transaction.read_trash(Some(trash_id.to_owned()))?;
let _ = transaction.delete_trash(Some(vec![trash_id.to_owned()]))?;
transaction.delete_trash(Some(vec![trash_id.to_owned()]))?;
notify_trash_changed(transaction.read_trash(None)?);
if repeated_trash.is_empty() {
@ -54,14 +54,14 @@ impl TrashController {
ty: trash.ty.into(),
};
let _ = self.delete_trash_on_server(RepeatedTrashIdPB {
self.delete_trash_on_server(RepeatedTrashIdPB {
items: vec![identifier.clone()],
delete_all: false,
})?;
tracing::Span::current().record("putback", &format!("{:?}", &identifier).as_str());
let _ = self.notify.send(TrashEvent::Putback(vec![identifier].into(), tx));
let _ = rx.recv().await.unwrap()?;
rx.recv().await.unwrap()?;
Ok(())
}
@ -82,7 +82,7 @@ impl TrashController {
let _ = rx.recv().await;
notify_trash_changed(RepeatedTrashPB { items: vec![] });
let _ = self.delete_all_trash_on_server().await?;
self.delete_all_trash_on_server().await?;
Ok(())
}
@ -94,23 +94,23 @@ impl TrashController {
.await?
.into();
let _ = self.delete_with_identifiers(all_trash_identifiers).await?;
self.delete_with_identifiers(all_trash_identifiers).await?;
notify_trash_changed(RepeatedTrashPB { items: vec![] });
let _ = self.delete_all_trash_on_server().await?;
self.delete_all_trash_on_server().await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub async fn delete(&self, trash_identifiers: RepeatedTrashIdPB) -> FlowyResult<()> {
let _ = self.delete_with_identifiers(trash_identifiers.clone()).await?;
self.delete_with_identifiers(trash_identifiers.clone()).await?;
let trash_revs = self
.persistence
.begin_transaction(|transaction| transaction.read_trash(None))
.await?;
notify_trash_changed(trash_revs);
let _ = self.delete_trash_on_server(trash_identifiers)?;
self.delete_trash_on_server(trash_identifiers)?;
Ok(())
}
@ -128,8 +128,7 @@ impl TrashController {
Err(e) => log::error!("{}", e),
},
}
let _ = self
.persistence
self.persistence
.begin_transaction(|transaction| {
let ids = trash_identifiers
.items
@ -167,8 +166,7 @@ impl TrashController {
.as_str(),
);
let _ = self
.persistence
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.create_trash(trash_revs.clone())?;
let _ = self.create_trash_on_server(trash_revs);
@ -178,7 +176,7 @@ impl TrashController {
})
.await?;
let _ = self.notify.send(TrashEvent::NewTrash(identifiers.into(), tx));
let _ = rx.recv().await.unwrap()?;
rx.recv().await.unwrap()?;
Ok(())
}
@ -196,7 +194,7 @@ impl TrashController {
.map(|trash_rev| trash_rev.into())
.collect();
let _ = self.read_trash_on_server()?;
self.read_trash_on_server()?;
Ok(RepeatedTrashPB { items })
}
@ -255,7 +253,7 @@ impl TrashController {
tracing::debug!("Remote trash count: {}", trash_rev.len());
let result = persistence
.begin_transaction(|transaction| {
let _ = transaction.create_trash(trash_rev.clone())?;
transaction.create_trash(trash_rev.clone())?;
transaction.read_trash(None)
})
.await;

View File

@ -19,7 +19,7 @@ pub(crate) async fn putback_trash_handler(
identifier: AFPluginData<TrashIdPB>,
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
let _ = controller.putback(&identifier.id).await?;
controller.putback(&identifier.id).await?;
Ok(())
}
@ -28,7 +28,7 @@ pub(crate) async fn delete_trash_handler(
identifiers: AFPluginData<RepeatedTrashIdPB>,
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
let _ = controller.delete(identifiers.into_inner()).await?;
controller.delete(identifiers.into_inner()).await?;
Ok(())
}
@ -36,7 +36,7 @@ pub(crate) async fn delete_trash_handler(
pub(crate) async fn restore_all_trash_handler(
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
let _ = controller.restore_all_trash().await?;
controller.restore_all_trash().await?;
Ok(())
}
@ -44,6 +44,6 @@ pub(crate) async fn restore_all_trash_handler(
pub(crate) async fn delete_all_trash_handler(
controller: AFPluginState<Arc<TrashController>>,
) -> Result<(), FlowyError> {
let _ = controller.delete_all_trash().await?;
controller.delete_all_trash().await?;
Ok(())
}

View File

@ -81,18 +81,17 @@ impl ViewController {
params.layout.clone(),
)
.await?;
let _ = self
.create_view(
&params.view_id,
params.data_format.clone(),
params.layout.clone(),
delta_data,
)
.await?;
self.create_view(
&params.view_id,
params.data_format.clone(),
params.layout.clone(),
delta_data,
)
.await?;
};
let view_rev = self.create_view_on_server(params).await?;
let _ = self.create_view_on_local(view_rev.clone()).await?;
self.create_view_on_local(view_rev.clone()).await?;
Ok(view_rev)
}
@ -109,7 +108,7 @@ impl ViewController {
}
let user_id = self.user.user_id()?;
let processor = self.get_data_processor(data_type)?;
let _ = processor.create_view(&user_id, view_id, layout_type, view_data).await?;
processor.create_view(&user_id, view_id, layout_type, view_data).await?;
Ok(())
}
@ -118,8 +117,8 @@ impl ViewController {
self.persistence
.begin_transaction(|transaction| {
let belong_to_id = view_rev.app_id.clone();
let _ = transaction.create_view(view_rev)?;
let _ = notify_views_changed(&belong_to_id, trash_controller, &transaction)?;
transaction.create_view(view_rev)?;
notify_views_changed(&belong_to_id, trash_controller, &transaction)?;
Ok(())
})
.await
@ -196,7 +195,7 @@ impl ViewController {
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn close_view(&self, view_id: &str) -> Result<(), FlowyError> {
let processor = self.get_data_processor_from_view_id(view_id).await?;
let _ = processor.close_view(view_id).await?;
processor.close_view(view_id).await?;
Ok(())
}
@ -231,14 +230,13 @@ impl ViewController {
.send();
let processor = self.get_data_processor_from_view_id(&view_id).await?;
let _ = processor.close_view(&view_id).await?;
processor.close_view(&view_id).await?;
Ok(())
}
#[tracing::instrument(level = "debug", skip(self), err)]
pub(crate) async fn move_view(&self, view_id: &str, from: usize, to: usize) -> Result<(), FlowyError> {
let _ = self
.persistence
self.persistence
.begin_transaction(|transaction| {
let _ = transaction.move_view(view_id, from, to)?;
let view = transaction.read_view(view_id)?;
@ -290,13 +288,13 @@ impl ViewController {
let view_rev = self
.persistence
.begin_transaction(|transaction| {
let _ = transaction.update_view(changeset)?;
transaction.update_view(changeset)?;
let view_rev = transaction.read_view(&view_id)?;
let view: ViewPB = view_rev.clone().into();
send_dart_notification(&view_id, FolderNotification::ViewUpdated)
.payload(view)
.send();
let _ = notify_views_changed(&view_rev.app_id, self.trash_controller.clone(), &transaction)?;
notify_views_changed(&view_rev.app_id, self.trash_controller.clone(), &transaction)?;
Ok(view_rev)
})
.await?;
@ -439,7 +437,7 @@ async fn handle_trash_event(
.begin_transaction(|transaction| {
let view_revs = read_local_views_with_transaction(identifiers, &transaction)?;
for view_rev in view_revs {
let _ = notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_dart(view_rev.into(), FolderNotification::ViewDeleted);
}
Ok(())
@ -452,7 +450,7 @@ async fn handle_trash_event(
.begin_transaction(|transaction| {
let view_revs = read_local_views_with_transaction(identifiers, &transaction)?;
for view_rev in view_revs {
let _ = notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_views_changed(&view_rev.app_id, trash_can.clone(), &transaction)?;
notify_dart(view_rev.into(), FolderNotification::ViewRestored);
}
Ok(())
@ -473,7 +471,7 @@ async fn handle_trash_event(
}
}
for notify_id in notify_ids {
let _ = notify_views_changed(&notify_id, trash_can.clone(), &transaction)?;
notify_views_changed(&notify_id, trash_can.clone(), &transaction)?;
}
Ok(views)
})
@ -483,7 +481,7 @@ async fn handle_trash_event(
let data_type = view.data_format.clone().into();
match get_data_processor(data_processors.clone(), &data_type) {
Ok(processor) => {
let _ = processor.close_view(&view.id).await?;
processor.close_view(&view.id).await?;
}
Err(e) => tracing::error!("{}", e),
}

View File

@ -75,7 +75,7 @@ pub(crate) async fn delete_view_handler(
})
.collect::<Vec<TrashPB>>();
let _ = trash_controller.add(trash).await?;
trash_controller.add(trash).await?;
Ok(())
}
@ -85,8 +85,8 @@ pub(crate) async fn set_latest_view_handler(
controller: AFPluginState<Arc<ViewController>>,
) -> Result<(), FlowyError> {
let view_id: ViewIdPB = data.into_inner();
let _ = controller.set_latest_view(&view_id.value)?;
let _ = notify_workspace_setting_did_change(&folder, &view_id).await?;
controller.set_latest_view(&view_id.value)?;
notify_workspace_setting_did_change(&folder, &view_id).await?;
Ok(())
}
@ -95,7 +95,7 @@ pub(crate) async fn close_view_handler(
controller: AFPluginState<Arc<ViewController>>,
) -> Result<(), FlowyError> {
let view_id: ViewIdPB = data.into_inner();
let _ = controller.close_view(&view_id.value).await?;
controller.close_view(&view_id.value).await?;
Ok(())
}
@ -108,10 +108,10 @@ pub(crate) async fn move_item_handler(
let params: MoveFolderItemParams = data.into_inner().try_into()?;
match params.ty {
MoveFolderItemType::MoveApp => {
let _ = app_controller.move_app(&params.item_id, params.from, params.to).await?;
app_controller.move_app(&params.item_id, params.from, params.to).await?;
}
MoveFolderItemType::MoveView => {
let _ = view_controller
view_controller
.move_view(&params.item_id, params.from, params.to)
.await?;
}
@ -125,6 +125,6 @@ pub(crate) async fn duplicate_view_handler(
controller: AFPluginState<Arc<ViewController>>,
) -> Result<(), FlowyError> {
let view: ViewPB = data.into_inner();
let _ = controller.duplicate_view(view).await?;
controller.duplicate_view(view).await?;
Ok(())
}

View File

@ -45,7 +45,7 @@ impl WorkspaceController {
let workspaces = self
.persistence
.begin_transaction(|transaction| {
let _ = transaction.create_workspace(&user_id, workspace.clone())?;
transaction.create_workspace(&user_id, workspace.clone())?;
transaction.read_workspaces(&user_id, None)
})
.await?
@ -67,7 +67,7 @@ impl WorkspaceController {
let workspace = self
.persistence
.begin_transaction(|transaction| {
let _ = transaction.update_workspace(changeset)?;
transaction.update_workspace(changeset)?;
let user_id = self.user.user_id()?;
self.read_local_workspace(workspace_id.clone(), &user_id, &transaction)
})
@ -76,7 +76,7 @@ impl WorkspaceController {
send_dart_notification(&workspace_id, FolderNotification::WorkspaceUpdated)
.payload(workspace)
.send();
let _ = self.update_workspace_on_server(params)?;
self.update_workspace_on_server(params)?;
Ok(())
}
@ -88,14 +88,14 @@ impl WorkspaceController {
let repeated_workspace = self
.persistence
.begin_transaction(|transaction| {
let _ = transaction.delete_workspace(workspace_id)?;
transaction.delete_workspace(workspace_id)?;
self.read_local_workspaces(None, &user_id, &transaction)
})
.await?;
send_dart_notification(&token, FolderNotification::UserDeleteWorkspace)
.payload(repeated_workspace)
.send();
let _ = self.delete_workspace_on_server(workspace_id)?;
self.delete_workspace_on_server(workspace_id)?;
Ok(())
}

View File

@ -117,7 +117,7 @@ fn read_workspaces_on_server(
tokio::spawn(async move {
let workspace_revs = server.read_workspace(&token, params).await?;
let _ = persistence
persistence
.begin_transaction(|transaction| {
for workspace_rev in &workspace_revs {
let m_workspace = workspace_rev.clone();

View File

@ -79,28 +79,33 @@ pub struct CellPB {
#[pb(index = 1)]
pub field_id: String,
#[pb(index = 2)]
pub row_id: String,
/// Encoded the data using the helper struct `CellProtobufBlob`.
/// Check out the `CellProtobufBlob` for more information.
#[pb(index = 2)]
#[pb(index = 3)]
pub data: Vec<u8>,
/// the field_type will be None if the field with field_id is not found
#[pb(index = 3, one_of)]
#[pb(index = 4, one_of)]
pub field_type: Option<FieldType>,
}
impl CellPB {
pub fn new(field_id: &str, field_type: FieldType, data: Vec<u8>) -> Self {
pub fn new(field_id: &str, row_id: &str, field_type: FieldType, data: Vec<u8>) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_string(),
data,
field_type: Some(field_type),
}
}
pub fn empty(field_id: &str) -> Self {
pub fn empty(field_id: &str, row_id: &str) -> Self {
Self {
field_id: field_id.to_owned(),
row_id: row_id.to_owned(),
data: vec![],
field_type: None,
}

View File

@ -25,14 +25,14 @@ pub struct GroupRowsNotificationPB {
impl std::fmt::Display for GroupRowsNotificationPB {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
for inserted_row in &self.inserted_rows {
let _ = f.write_fmt(format_args!(
f.write_fmt(format_args!(
"Insert: {} row at {:?}",
inserted_row.row.id, inserted_row.index
))?;
}
for deleted_row in &self.deleted_rows {
let _ = f.write_fmt(format_args!("Delete: {} row", deleted_row))?;
f.write_fmt(format_args!("Delete: {} row", deleted_row))?;
}
Ok(())

View File

@ -44,26 +44,26 @@ pub(crate) async fn update_grid_setting_handler(
let editor = manager.get_grid_editor(&params.grid_id).await?;
if let Some(insert_params) = params.insert_group {
let _ = editor.insert_group(insert_params).await?;
editor.insert_group(insert_params).await?;
}
if let Some(delete_params) = params.delete_group {
let _ = editor.delete_group(delete_params).await?;
editor.delete_group(delete_params).await?;
}
if let Some(alter_filter) = params.insert_filter {
let _ = editor.create_or_update_filter(alter_filter).await?;
editor.create_or_update_filter(alter_filter).await?;
}
if let Some(delete_filter) = params.delete_filter {
let _ = editor.delete_filter(delete_filter).await?;
editor.delete_filter(delete_filter).await?;
}
if let Some(alter_sort) = params.alert_sort {
let _ = editor.create_or_update_sort(alter_sort).await?;
}
if let Some(delete_sort) = params.delete_sort {
let _ = editor.delete_sort(delete_sort).await?;
editor.delete_sort(delete_sort).await?;
}
Ok(())
}
@ -100,7 +100,7 @@ pub(crate) async fn update_field_handler(
) -> Result<(), FlowyError> {
let changeset: FieldChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&changeset.grid_id).await?;
let _ = editor.update_field(changeset).await?;
editor.update_field(changeset).await?;
Ok(())
}
@ -112,7 +112,7 @@ pub(crate) async fn update_field_type_option_handler(
let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.grid_id).await?;
let old_field_rev = editor.get_field_rev(&params.field_id).await;
let _ = editor
editor
.update_field_type_option(
&params.grid_id,
&params.field_id,
@ -130,7 +130,7 @@ pub(crate) async fn delete_field_handler(
) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.grid_id).await?;
let _ = editor.delete_field(&params.field_id).await?;
editor.delete_field(&params.field_id).await?;
Ok(())
}
@ -154,7 +154,7 @@ pub(crate) async fn switch_to_field_handler(
// Update the type-option data after the field type has been changed
let type_option_data = get_type_option_data(&new_field_rev, &params.field_type).await?;
let _ = editor
editor
.update_field_type_option(&params.grid_id, &new_field_rev.id, type_option_data, old_field_rev)
.await?;
@ -168,7 +168,7 @@ pub(crate) async fn duplicate_field_handler(
) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.grid_id).await?;
let _ = editor.duplicate_field(&params.field_id).await?;
editor.duplicate_field(&params.field_id).await?;
Ok(())
}
@ -223,7 +223,7 @@ pub(crate) async fn move_field_handler(
) -> Result<(), FlowyError> {
let params: MoveFieldParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.grid_id).await?;
let _ = editor.move_field(params).await?;
editor.move_field(params).await?;
Ok(())
}
@ -263,7 +263,7 @@ pub(crate) async fn delete_row_handler(
) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.grid_id).await?;
let _ = editor.delete_row(&params.row_id).await?;
editor.delete_row(&params.row_id).await?;
Ok(())
}
@ -274,7 +274,7 @@ pub(crate) async fn duplicate_row_handler(
) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.grid_id).await?;
let _ = editor.duplicate_row(&params.row_id).await?;
editor.duplicate_row(&params.row_id).await?;
Ok(())
}
@ -285,7 +285,7 @@ pub(crate) async fn move_row_handler(
) -> Result<(), FlowyError> {
let params: MoveRowParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.view_id).await?;
let _ = editor.move_row(params).await?;
editor.move_row(params).await?;
Ok(())
}
@ -308,7 +308,7 @@ pub(crate) async fn get_cell_handler(
let params: CellPathParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&params.view_id).await?;
match editor.get_cell(&params).await {
None => data_result(CellPB::empty(&params.field_id)),
None => data_result(CellPB::empty(&params.field_id, &params.row_id)),
Some(cell) => data_result(cell),
}
}
@ -320,7 +320,7 @@ pub(crate) async fn update_cell_handler(
) -> Result<(), FlowyError> {
let changeset: CellChangesetPB = data.into_inner();
let editor = manager.get_grid_editor(&changeset.grid_id).await?;
let _ = editor
editor
.update_cell_with_changeset(&changeset.row_id, &changeset.field_id, changeset.type_cell_data)
.await?;
Ok(())
@ -351,7 +351,7 @@ pub(crate) async fn update_select_option_handler(
let changeset: SelectOptionChangeset = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(&changeset.cell_path.view_id).await?;
let field_id = changeset.cell_path.field_id.clone();
let _ = editor
editor
.modify_field_rev(&field_id, |field_rev| {
let mut type_option = select_type_option_from_field_rev(field_rev)?;
let mut cell_changeset_str = None;
@ -445,7 +445,7 @@ pub(crate) async fn update_select_option_cell_handler(
delete_option_ids: params.delete_option_ids,
};
let _ = editor
editor
.update_cell_with_changeset(
&params.cell_identifier.row_id,
&params.cell_identifier.field_id,
@ -469,7 +469,7 @@ pub(crate) async fn update_date_cell_handler(
};
let editor = manager.get_grid_editor(&cell_path.view_id).await?;
let _ = editor
editor
.update_cell(cell_path.row_id, cell_path.field_id, cell_changeset)
.await?;
Ok(())
@ -504,7 +504,7 @@ pub(crate) async fn move_group_handler(
) -> FlowyResult<()> {
let params: MoveGroupParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(params.view_id.as_ref()).await?;
let _ = editor.move_group(params).await?;
editor.move_group(params).await?;
Ok(())
}
@ -515,6 +515,6 @@ pub(crate) async fn move_group_row_handler(
) -> FlowyResult<()> {
let params: MoveGroupRowParams = data.into_inner().try_into()?;
let editor = manager.get_grid_editor(params.view_id.as_ref()).await?;
let _ = editor.move_group_row(params).await?;
editor.move_group_row(params).await?;
Ok(())
}

View File

@ -71,7 +71,7 @@ impl GridManager {
let grid_id = grid_id.as_ref();
let db_pool = self.grid_user.db_pool()?;
let rev_manager = self.make_grid_rev_manager(grid_id, db_pool)?;
let _ = rev_manager.reset_object(revisions).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
@ -80,7 +80,7 @@ impl GridManager {
async fn create_grid_view<T: AsRef<str>>(&self, view_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
let view_id = view_id.as_ref();
let rev_manager = make_grid_view_rev_manager(&self.grid_user, view_id).await?;
let _ = rev_manager.reset_object(revisions).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
@ -88,7 +88,7 @@ impl GridManager {
pub async fn create_grid_block<T: AsRef<str>>(&self, block_id: T, revisions: Vec<Revision>) -> FlowyResult<()> {
let block_id = block_id.as_ref();
let rev_manager = make_grid_block_rev_manager(&self.grid_user, block_id)?;
let _ = rev_manager.reset_object(revisions).await?;
rev_manager.reset_object(revisions).await?;
Ok(())
}
@ -196,7 +196,7 @@ pub async fn make_grid_view_data(
let grid_block_delta = make_grid_block_operations(block_meta_data);
let block_delta_data = grid_block_delta.json_bytes();
let revision = Revision::initial_revision(block_id, block_delta_data);
let _ = grid_manager.create_grid_block(&block_id, vec![revision]).await?;
grid_manager.create_grid_block(&block_id, vec![revision]).await?;
}
// Will replace the grid_id with the value returned by the gen_grid_id()
@ -207,7 +207,7 @@ pub async fn make_grid_view_data(
let grid_rev_delta = make_grid_operations(&grid_rev);
let grid_rev_delta_bytes = grid_rev_delta.json_bytes();
let revision = Revision::initial_revision(&grid_id, grid_rev_delta_bytes.clone());
let _ = grid_manager.create_grid(&grid_id, vec![revision]).await?;
grid_manager.create_grid(&grid_id, vec![revision]).await?;
// Create grid view
let grid_view = if grid_view_revision_data.is_empty() {
@ -218,7 +218,7 @@ pub async fn make_grid_view_data(
let grid_view_delta = make_grid_view_operations(&grid_view);
let grid_view_delta_bytes = grid_view_delta.json_bytes();
let revision = Revision::initial_revision(view_id, grid_view_delta_bytes);
let _ = grid_manager.create_grid_view(view_id, vec![revision]).await?;
grid_manager.create_grid_view(view_id, vec![revision]).await?;
Ok(grid_rev_delta_bytes)
}

View File

@ -63,48 +63,45 @@ impl GridBlockRevisionEditor {
) -> FlowyResult<(i32, Option<i32>)> {
let mut row_count = 0;
let mut row_index = None;
let _ = self
.modify(|block_pad| {
if let Some(start_row_id) = prev_row_id.as_ref() {
match block_pad.index_of_row(start_row_id) {
None => {}
Some(index) => row_index = Some(index as i32 + 1),
}
self.modify(|block_pad| {
if let Some(start_row_id) = prev_row_id.as_ref() {
match block_pad.index_of_row(start_row_id) {
None => {}
Some(index) => row_index = Some(index as i32 + 1),
}
}
let change = block_pad.add_row_rev(row, prev_row_id)?;
row_count = block_pad.number_of_rows();
let change = block_pad.add_row_rev(row, prev_row_id)?;
row_count = block_pad.number_of_rows();
if row_index.is_none() {
row_index = Some(row_count - 1);
}
Ok(change)
})
.await?;
if row_index.is_none() {
row_index = Some(row_count - 1);
}
Ok(change)
})
.await?;
Ok((row_count, row_index))
}
pub async fn delete_rows(&self, ids: Vec<Cow<'_, String>>) -> FlowyResult<i32> {
let mut row_count = 0;
let _ = self
.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
})
.await?;
self.modify(|block_pad| {
let changeset = block_pad.delete_rows(ids)?;
row_count = block_pad.number_of_rows();
Ok(changeset)
})
.await?;
Ok(row_count)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
let _ = self.modify(|block_pad| Ok(block_pad.update_row(changeset)?)).await?;
self.modify(|block_pad| Ok(block_pad.update_row(changeset)?)).await?;
Ok(())
}
pub async fn move_row(&self, row_id: &str, from: usize, to: usize) -> FlowyResult<()> {
let _ = self
.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
self.modify(|block_pad| Ok(block_pad.move_row(row_id, from, to)?))
.await?;
Ok(())
}
@ -149,10 +146,10 @@ impl GridBlockRevisionEditor {
F: for<'a> FnOnce(&'a mut GridBlockRevisionPad) -> FlowyResult<Option<GridBlockRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
match f(&mut *write_guard)? {
match f(&mut write_guard)? {
None => {}
Some(change) => {
let _ = self.apply_change(change).await?;
self.apply_change(change).await?;
}
}
Ok(())

View File

@ -92,7 +92,7 @@ impl GridBlockManager {
#[tracing::instrument(level = "trace", skip(self, start_row_id), err)]
pub(crate) async fn create_row(&self, row_rev: RowRevision, start_row_id: Option<String>) -> FlowyResult<i32> {
let block_id = row_rev.block_id.clone();
let _ = self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let editor = self.get_block_editor(&row_rev.block_id).await?;
let mut row = InsertedRowPB::from(&row_rev);
@ -111,7 +111,7 @@ impl GridBlockManager {
for (block_id, row_revs) in rows_by_block_id {
let editor = self.get_block_editor(&block_id).await?;
for row_rev in row_revs {
let _ = self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
self.persistence.insert(&row_rev.block_id, &row_rev.id)?;
let mut row = InsertedRowPB::from(&row_rev);
row.index = editor.create_row(row_rev, None).await?.1;
let _ = self.event_notifier.send(GridBlockEvent::InsertRow {
@ -130,7 +130,7 @@ impl GridBlockManager {
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&changeset.row_id).await?;
let _ = editor.update_row(changeset.clone()).await?;
editor.update_row(changeset.clone()).await?;
match editor.get_row_rev(&changeset.row_id).await? {
None => tracing::error!("Update row failed, can't find the row with id: {}", changeset.row_id),
Some((_, row_rev)) => {
@ -190,7 +190,7 @@ impl GridBlockManager {
// This function will be moved to GridViewRevisionEditor
pub(crate) async fn move_row(&self, row_rev: Arc<RowRevision>, from: usize, to: usize) -> FlowyResult<()> {
let editor = self.get_editor_from_row_id(&row_rev.id).await?;
let _ = editor.move_row(&row_rev.id, from, to).await?;
editor.move_row(&row_rev.id, from, to).await?;
let delete_row_id = row_rev.id.clone();
let insert_row = InsertedRowPB {
@ -218,7 +218,7 @@ impl GridBlockManager {
pub async fn update_cell(&self, changeset: CellChangesetPB) -> FlowyResult<()> {
let row_changeset: RowChangeset = changeset.clone().into();
let _ = self.update_row(row_changeset).await?;
self.update_row(row_changeset).await?;
self.notify_did_update_cell(changeset).await?;
Ok(())
}
@ -228,10 +228,13 @@ impl GridBlockManager {
editor.get_row_rev(row_id).await
}
#[allow(dead_code)]
pub async fn get_row_revs(&self, block_id: &str) -> FlowyResult<Vec<Arc<RowRevision>>> {
let editor = self.get_block_editor(block_id).await?;
editor.get_row_revs::<&str>(None).await
pub async fn get_row_revs(&self) -> FlowyResult<Vec<Arc<RowRevision>>> {
let mut row_revs = vec![];
for iter in self.block_editors.iter() {
let editor = iter.value();
row_revs.extend(editor.get_row_revs::<&str>(None).await?);
}
Ok(row_revs)
}
pub(crate) async fn get_blocks(&self, block_ids: Option<Vec<String>>) -> FlowyResult<Vec<GridBlockRowRevision>> {

View File

@ -1,8 +1,10 @@
use crate::entities::FieldType;
use crate::services::cell::{AtomicCellDataCache, CellProtobufBlob, TypeCellData};
use crate::services::field::*;
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use grid_rev_model::{CellRevision, FieldRevision};
use std::fmt::Debug;
/// Decode the opaque cell data into readable format content
@ -134,8 +136,7 @@ pub fn try_decode_cell_str(
}
/// Returns a string that represents the current field_type's cell data.
/// If the cell data of the `FieldType` doesn't support displaying in String then will return an
/// empty string. For example, The string of the Multi-Select cell will be a list of the option's name
/// For example, The string of the Multi-Select cell will be a list of the option's name
/// separated by a comma.
///
/// # Arguments
@ -214,52 +215,6 @@ pub trait FromCellString {
Self: Sized;
}
/// IntoCellData is a helper struct used to deserialize string into a specific data type that implements
/// the `FromCellString` trait.
///
pub struct IntoCellData<T>(pub Option<T>);
impl<T> IntoCellData<T> {
pub fn try_into_inner(self) -> FlowyResult<T> {
match self.0 {
None => Err(ErrorCode::InvalidData.into()),
Some(data) => Ok(data),
}
}
}
impl<T> std::convert::From<String> for IntoCellData<T>
where
T: FromCellString,
{
fn from(s: String) -> Self {
match T::from_cell_str(&s) {
Ok(inner) => IntoCellData(Some(inner)),
Err(e) => {
tracing::error!("Deserialize Cell Data failed: {}", e);
IntoCellData(None)
}
}
}
}
impl<T> std::convert::From<T> for IntoCellData<T> {
fn from(val: T) -> Self {
IntoCellData(Some(val))
}
}
impl std::convert::From<usize> for IntoCellData<String> {
fn from(n: usize) -> Self {
IntoCellData(Some(n.to_string()))
}
}
impl std::convert::From<IntoCellData<String>> for String {
fn from(p: IntoCellData<String>) -> Self {
p.try_into_inner().unwrap_or_else(|_| String::new())
}
}
/// If the changeset applying to the cell is not String type, it should impl this trait.
/// Deserialize the string into cell specific changeset.
pub trait FromCellChangesetString {

View File

@ -1,5 +1,5 @@
use crate::entities::FieldType;
use crate::services::cell::{FromCellString, IntoCellData};
use bytes::Bytes;
use flowy_error::{internal_error, FlowyError, FlowyResult};
use grid_rev_model::CellRevision;
@ -50,15 +50,6 @@ impl std::convert::TryFrom<String> for TypeCellData {
}
}
impl<T> std::convert::From<TypeCellData> for IntoCellData<T>
where
T: FromCellString,
{
fn from(any_call_data: TypeCellData) -> Self {
IntoCellData::from(any_call_data.cell_str)
}
}
impl ToString for TypeCellData {
fn to_string(&self) -> String {
self.cell_str.clone()

View File

@ -22,7 +22,7 @@ where
action(&mut type_option);
let bytes = type_option.protobuf_bytes().to_vec();
let _ = editor
editor
.update_field_type_option(&editor.grid_id, field_id, bytes, old_field_rev)
.await?;
}

View File

@ -63,7 +63,7 @@ impl TypeOptionTransform for CheckboxTypeOptionPB {
_field_rev: &FieldRevision,
) -> Option<<Self as TypeOption>::CellData> {
if decoded_field_type.is_text() {
match CheckboxCellData::from_str(&cell_str) {
match CheckboxCellData::from_str(cell_str) {
Ok(cell_data) => Some(cell_data),
Err(_) => None,
}

View File

@ -11,6 +11,7 @@ use crate::services::field::{
use crate::services::filter::FilterType;
use flowy_error::FlowyResult;
use grid_rev_model::{FieldRevision, TypeOptionDataDeserializer, TypeOptionDataSerializer};
use std::any::Any;
use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
@ -49,6 +50,13 @@ pub trait TypeOptionCellDataHandler {
/// cell data.
fn stringify_cell_str(&self, cell_str: String, decoded_field_type: &FieldType, field_rev: &FieldRevision)
-> String;
fn get_cell_data(
&self,
cell_str: String,
decoded_field_type: &FieldType,
field_rev: &FieldRevision,
) -> FlowyResult<BoxCellData>;
}
struct CellDataCacheKey(u64);
@ -192,14 +200,10 @@ where
decoded_field_type: &FieldType,
field_rev: &FieldRevision,
) -> FlowyResult<CellProtobufBlob> {
let cell_data = if self.transformable() {
match self.transform_type_option_cell_str(&cell_str, decoded_field_type, field_rev) {
None => self.get_decoded_cell_data(cell_str, decoded_field_type, field_rev)?,
Some(cell_data) => cell_data,
}
} else {
self.get_decoded_cell_data(cell_str, decoded_field_type, field_rev)?
};
let cell_data = self
.get_cell_data(cell_str, decoded_field_type, field_rev)?
.unbox_or_default::<<Self as TypeOption>::CellData>();
CellProtobufBlob::from(self.convert_to_protobuf(cell_data))
}
@ -263,6 +267,23 @@ where
Err(_) => "".to_string(),
}
}
fn get_cell_data(
&self,
cell_str: String,
decoded_field_type: &FieldType,
field_rev: &FieldRevision,
) -> FlowyResult<BoxCellData> {
let cell_data = if self.transformable() {
match self.transform_type_option_cell_str(&cell_str, decoded_field_type, field_rev) {
None => self.get_decoded_cell_data(cell_str, decoded_field_type, field_rev)?,
Some(cell_data) => cell_data,
}
} else {
self.get_decoded_cell_data(cell_str, decoded_field_type, field_rev)?
};
Ok(BoxCellData::new(cell_data))
}
}
pub struct TypeOptionCellExt<'a> {
@ -293,6 +314,16 @@ impl<'a> TypeOptionCellExt<'a> {
this
}
pub fn get_cells<T>(&self) -> Vec<T> {
let field_type: FieldType = self.field_rev.ty.into();
match self.get_type_option_cell_data_handler(&field_type) {
None => vec![],
Some(_handler) => {
todo!()
}
}
}
pub fn get_type_option_cell_data_handler(
&self,
field_type: &FieldType,
@ -447,3 +478,49 @@ fn get_type_option_transform_handler(
}
}
}
pub struct BoxCellData(Box<dyn Any + Send + Sync + 'static>);
impl BoxCellData {
fn new<T>(value: T) -> Self
where
T: Send + Sync + 'static,
{
Self(Box::new(value))
}
fn unbox_or_default<T>(self) -> T
where
T: Default + 'static,
{
match self.0.downcast::<T>() {
Ok(value) => *value,
Err(_) => T::default(),
}
}
fn downcast_ref<T: 'static>(&self) -> Option<&T> {
self.0.downcast_ref()
}
}
pub struct RowSingleCellData {
pub row_id: String,
pub field_id: String,
pub field_type: FieldType,
pub cell_data: BoxCellData,
}
impl RowSingleCellData {
pub fn get_text_field_cell_data(&self) -> Option<&<RichTextTypeOptionPB as TypeOption>::CellData> {
self.cell_data.downcast_ref()
}
pub fn get_number_field_cell_data(&self) -> Option<&<NumberTypeOptionPB as TypeOption>::CellData> {
self.cell_data.downcast_ref()
}
pub fn get_url_field_cell_data(&self) -> Option<&<URLTypeOptionPB as TypeOption>::CellData> {
self.cell_data.downcast_ref()
}
}

View File

@ -105,7 +105,7 @@ impl TypeOptionCellDataFilter for URLTypeOptionPB {
return true;
}
filter.is_visible(&cell_data)
filter.is_visible(cell_data)
}
}

View File

@ -80,6 +80,6 @@ impl FromCellString for URLCellData {
impl ToString for URLCellData {
fn to_string(&self) -> String {
self.content.clone()
self.to_json().unwrap()
}
}

View File

@ -205,7 +205,7 @@ impl FilterController {
notification = Some(FilterChangesetNotificationPB::from_insert(&self.view_id, vec![filter]));
}
if let Some(filter_rev) = self.delegate.get_filter_rev(filter_type.clone()).await {
let _ = self.refresh_filters(vec![filter_rev]).await;
self.refresh_filters(vec![filter_rev]).await;
}
}
@ -222,7 +222,7 @@ impl FilterController {
// Update the corresponding filter in the cache
if let Some(filter_rev) = self.delegate.get_filter_rev(updated_filter_type.new.clone()).await {
let _ = self.refresh_filters(vec![filter_rev]).await;
self.refresh_filters(vec![filter_rev]).await;
}
if let Some(filter_id) = filter_id {
@ -244,8 +244,7 @@ impl FilterController {
self.cell_filter_cache.write().remove(filter_type);
}
let _ = self
.gen_task(FilterEvent::FilterDidChanged, QualityOfService::Background)
self.gen_task(FilterEvent::FilterDidChanged, QualityOfService::Background)
.await;
tracing::trace!("{:?}", notification);
notification

View File

@ -32,7 +32,7 @@ impl TaskHandler for FilterTaskHandler {
let filter_controller = self.filter_controller.clone();
Box::pin(async move {
if let TaskContent::Text(predicate) = content {
let _ = filter_controller
filter_controller
.write()
.await
.process(&predicate)

View File

@ -9,6 +9,7 @@ use crate::services::cell::{
};
use crate::services::field::{
default_type_option_builder_from_type, transform_type_option, type_option_builder_from_bytes, FieldBuilder,
RowSingleCellData, TypeOptionCellExt,
};
use crate::services::filter::FilterType;
@ -134,30 +135,28 @@ impl GridRevisionEditor {
return Ok(());
}
let field_rev = result.unwrap();
let _ = self
.modify(|grid| {
let changeset = grid.modify_field(field_id, |field| {
let deserializer = TypeOptionJsonDeserializer(field_rev.ty.into());
match deserializer.deserialize(type_option_data) {
Ok(json_str) => {
let field_type = field.ty;
field.insert_type_option_str(&field_type, json_str);
}
Err(err) => {
tracing::error!("Deserialize data to type option json failed: {}", err);
}
self.modify(|grid| {
let changeset = grid.modify_field(field_id, |field| {
let deserializer = TypeOptionJsonDeserializer(field_rev.ty.into());
match deserializer.deserialize(type_option_data) {
Ok(json_str) => {
let field_type = field.ty;
field.insert_type_option_str(&field_type, json_str);
}
Ok(Some(()))
})?;
Ok(changeset)
})
.await?;
Err(err) => {
tracing::error!("Deserialize data to type option json failed: {}", err);
}
}
Ok(Some(()))
})?;
Ok(changeset)
})
.await?;
let _ = self
.view_manager
self.view_manager
.did_update_view_field_type_option(field_id, old_field_rev)
.await?;
let _ = self.notify_did_update_grid_field(field_id).await?;
self.notify_did_update_grid_field(field_id).await?;
Ok(())
}
@ -169,8 +168,8 @@ impl GridRevisionEditor {
pub async fn create_new_field_rev(&self, field_rev: FieldRevision) -> FlowyResult<()> {
let field_id = field_rev.id.clone();
let _ = self.modify(|grid| Ok(grid.create_field_rev(field_rev, None)?)).await?;
let _ = self.notify_did_insert_grid_field(&field_id).await?;
self.modify(|grid| Ok(grid.create_field_rev(field_rev, None)?)).await?;
self.notify_did_insert_grid_field(&field_id).await?;
Ok(())
}
@ -185,10 +184,9 @@ impl GridRevisionEditor {
let type_option_builder = type_option_builder_from_bytes(type_option_data, field_type);
field_rev.insert_type_option(type_option_builder.serializer());
}
let _ = self
.modify(|grid| Ok(grid.create_field_rev(field_rev.clone(), None)?))
self.modify(|grid| Ok(grid.create_field_rev(field_rev.clone(), None)?))
.await?;
let _ = self.notify_did_insert_grid_field(&field_rev.id).await?;
self.notify_did_insert_grid_field(&field_rev.id).await?;
Ok(field_rev)
}
@ -199,33 +197,32 @@ impl GridRevisionEditor {
pub async fn update_field(&self, params: FieldChangesetParams) -> FlowyResult<()> {
let field_id = params.field_id.clone();
let _ = self
.modify(|grid| {
let changeset = grid.modify_field(&params.field_id, |field| {
if let Some(name) = params.name {
field.name = name;
}
if let Some(desc) = params.desc {
field.desc = desc;
}
if let Some(field_type) = params.field_type {
field.ty = field_type;
}
if let Some(frozen) = params.frozen {
field.frozen = frozen;
}
if let Some(visibility) = params.visibility {
field.visibility = visibility;
}
if let Some(width) = params.width {
field.width = width;
}
Ok(Some(()))
})?;
Ok(changeset)
})
.await?;
let _ = self.notify_did_update_grid_field(&field_id).await?;
self.modify(|grid| {
let changeset = grid.modify_field(&params.field_id, |field| {
if let Some(name) = params.name {
field.name = name;
}
if let Some(desc) = params.desc {
field.desc = desc;
}
if let Some(field_type) = params.field_type {
field.ty = field_type;
}
if let Some(frozen) = params.frozen {
field.frozen = frozen;
}
if let Some(visibility) = params.visibility {
field.visibility = visibility;
}
if let Some(width) = params.width {
field.width = width;
}
Ok(Some(()))
})?;
Ok(changeset)
})
.await?;
self.notify_did_update_grid_field(&field_id).await?;
Ok(())
}
@ -235,15 +232,14 @@ impl GridRevisionEditor {
{
let mut is_changed = false;
let old_field_rev = self.get_field_rev(field_id).await;
let _ = self
.modify(|grid| {
let changeset = grid.modify_field(field_id, |field_rev| {
f(field_rev).map_err(|e| CollaborateError::internal().context(e))
})?;
is_changed = changeset.is_some();
Ok(changeset)
})
.await?;
self.modify(|grid| {
let changeset = grid.modify_field(field_id, |field_rev| {
f(field_rev).map_err(|e| CollaborateError::internal().context(e))
})?;
is_changed = changeset.is_some();
Ok(changeset)
})
.await?;
if is_changed {
match self
@ -254,21 +250,21 @@ impl GridRevisionEditor {
Ok(_) => {}
Err(e) => tracing::error!("View manager update field failed: {:?}", e),
}
let _ = self.notify_did_update_grid_field(field_id).await?;
self.notify_did_update_grid_field(field_id).await?;
}
Ok(())
}
pub async fn delete_field(&self, field_id: &str) -> FlowyResult<()> {
let _ = self.modify(|grid_pad| Ok(grid_pad.delete_field_rev(field_id)?)).await?;
self.modify(|grid_pad| Ok(grid_pad.delete_field_rev(field_id)?)).await?;
let field_order = FieldIdPB::from(field_id);
let notified_changeset = GridFieldChangesetPB::delete(&self.grid_id, vec![field_order]);
let _ = self.notify_did_update_grid(notified_changeset).await?;
self.notify_did_update_grid(notified_changeset).await?;
Ok(())
}
pub async fn group_by_field(&self, field_id: &str) -> FlowyResult<()> {
let _ = self.view_manager.group_by_field(field_id).await?;
self.view_manager.group_by_field(field_id).await?;
Ok(())
}
@ -297,29 +293,27 @@ impl GridRevisionEditor {
transform_type_option(&new_type_option, new_field_type, old_type_option, old_field_type)
};
let _ = self
.modify(|grid| {
Ok(grid.switch_to_field(
field_id,
new_field_type.clone(),
make_default_type_option,
type_option_transform,
)?)
})
.await?;
self.modify(|grid| {
Ok(grid.switch_to_field(
field_id,
new_field_type.clone(),
make_default_type_option,
type_option_transform,
)?)
})
.await?;
let _ = self.notify_did_update_grid_field(field_id).await?;
self.notify_did_update_grid_field(field_id).await?;
Ok(())
}
pub async fn duplicate_field(&self, field_id: &str) -> FlowyResult<()> {
let duplicated_field_id = gen_field_id();
let _ = self
.modify(|grid| Ok(grid.duplicate_field_rev(field_id, &duplicated_field_id)?))
self.modify(|grid| Ok(grid.duplicate_field_rev(field_id, &duplicated_field_id)?))
.await?;
let _ = self.notify_did_insert_grid_field(&duplicated_field_id).await?;
self.notify_did_insert_grid_field(&duplicated_field_id).await?;
Ok(())
}
@ -348,15 +342,13 @@ impl GridRevisionEditor {
}
pub async fn create_block(&self, block_meta_rev: GridBlockMetaRevision) -> FlowyResult<()> {
let _ = self
.modify(|grid_pad| Ok(grid_pad.create_block_meta_rev(block_meta_rev)?))
self.modify(|grid_pad| Ok(grid_pad.create_block_meta_rev(block_meta_rev)?))
.await?;
Ok(())
}
pub async fn update_block(&self, changeset: GridBlockMetaRevisionChangeset) -> FlowyResult<()> {
let _ = self
.modify(|grid_pad| Ok(grid_pad.update_block_rev(changeset)?))
self.modify(|grid_pad| Ok(grid_pad.update_block_rev(changeset)?))
.await?;
Ok(())
}
@ -374,7 +366,7 @@ impl GridRevisionEditor {
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn move_group(&self, params: MoveGroupParams) -> FlowyResult<()> {
let _ = self.view_manager.move_group(params).await?;
self.view_manager.move_group(params).await?;
Ok(())
}
@ -391,14 +383,14 @@ impl GridRevisionEditor {
}
let changesets = self.block_manager.insert_row(rows_by_block_id).await?;
for changeset in changesets {
let _ = self.update_block(changeset).await?;
self.update_block(changeset).await?;
}
Ok(row_orders)
}
pub async fn update_row(&self, changeset: RowChangeset) -> FlowyResult<()> {
let row_id = changeset.row_id.clone();
let _ = self.block_manager.update_row(changeset).await?;
self.block_manager.update_row(changeset).await?;
self.view_manager.did_update_cell(&row_id).await;
Ok(())
}
@ -444,11 +436,23 @@ impl GridRevisionEditor {
Ok(())
}
/// Returns the cell data that encoded in protobuf.
pub async fn get_cell(&self, params: &CellPathParams) -> Option<CellPB> {
let (field_type, cell_bytes) = self.decode_cell_data_from(params).await?;
Some(CellPB::new(&params.field_id, field_type, cell_bytes.to_vec()))
Some(CellPB::new(
&params.field_id,
&params.row_id,
field_type,
cell_bytes.to_vec(),
))
}
/// Returns a string that represents the current field_type's cell data.
/// For example:
/// Multi-Select: list of the option's name separated by a comma.
/// Number: 123 => $123 if the currency set.
/// Date: 1653609600 => May 27,2022
///
pub async fn get_cell_display_str(&self, params: &CellPathParams) -> String {
let display_str = || async {
let field_rev = self.get_field_rev(&params.field_id).await?;
@ -463,10 +467,10 @@ impl GridRevisionEditor {
))
};
display_str().await.unwrap_or_else(|| "".to_string())
display_str().await.unwrap_or_default()
}
pub async fn get_cell_bytes(&self, params: &CellPathParams) -> Option<CellProtobufBlob> {
pub async fn get_cell_protobuf(&self, params: &CellPathParams) -> Option<CellProtobufBlob> {
let (_, cell_data) = self.decode_cell_data_from(params).await?;
Some(cell_data)
}
@ -492,6 +496,33 @@ impl GridRevisionEditor {
}
}
pub async fn get_cell_data_for_field(&self, field_id: &str) -> FlowyResult<Vec<RowSingleCellData>> {
let row_revs = self.block_manager.get_row_revs().await?;
let field_rev = self.get_field_rev(field_id).await.unwrap();
let field_type: FieldType = field_rev.ty.into();
let mut cells = vec![];
if let Some(handler) =
TypeOptionCellExt::new_with_cell_data_cache(&field_rev, Some(self.cell_data_cache.clone()))
.get_type_option_cell_data_handler(&field_type)
{
for row_rev in row_revs {
if let Some(cell_rev) = row_rev.cells.get(field_id) {
if let Ok(type_cell_data) = TypeCellData::try_from(cell_rev) {
if let Ok(cell_data) = handler.get_cell_data(type_cell_data.cell_str, &field_type, &field_rev) {
cells.push(RowSingleCellData {
row_id: row_rev.id.clone(),
field_id: field_rev.id.clone(),
field_type: field_type.clone(),
cell_data,
})
}
}
}
}
}
Ok(cells)
}
#[tracing::instrument(level = "trace", skip_all, err)]
pub async fn update_cell_with_changeset<T: ToCellChangesetString>(
&self,
@ -516,7 +547,7 @@ impl GridRevisionEditor {
field_id: field_id.to_owned(),
type_cell_data,
};
let _ = self.block_manager.update_cell(cell_changeset).await?;
self.block_manager.update_cell(cell_changeset).await?;
self.view_manager.did_update_cell(row_id).await;
Ok(())
}
@ -558,7 +589,7 @@ impl GridRevisionEditor {
pub async fn delete_rows(&self, block_rows: Vec<GridBlockRow>) -> FlowyResult<()> {
let changesets = self.block_manager.delete_rows(block_rows).await?;
for changeset in changesets {
let _ = self.update_block(changeset).await?;
self.update_block(changeset).await?;
}
Ok(())
}
@ -607,17 +638,17 @@ impl GridRevisionEditor {
}
pub async fn create_or_update_filter(&self, params: AlterFilterParams) -> FlowyResult<()> {
let _ = self.view_manager.create_or_update_filter(params).await?;
self.view_manager.create_or_update_filter(params).await?;
Ok(())
}
pub async fn delete_filter(&self, params: DeleteFilterParams) -> FlowyResult<()> {
let _ = self.view_manager.delete_filter(params).await?;
self.view_manager.delete_filter(params).await?;
Ok(())
}
pub async fn delete_sort(&self, params: DeleteSortParams) -> FlowyResult<()> {
let _ = self.view_manager.delete_sort(params).await?;
self.view_manager.delete_sort(params).await?;
Ok(())
}
@ -642,8 +673,7 @@ impl GridRevisionEditor {
) {
(Some(from_index), Some(to_index)) => {
tracing::trace!("Move row from {} to {}", from_index, to_index);
let _ = self
.block_manager
self.block_manager
.move_row(row_rev.clone(), from_index, to_index)
.await?;
}
@ -704,8 +734,7 @@ impl GridRevisionEditor {
to_index,
} = params;
let _ = self
.modify(|grid_pad| Ok(grid_pad.move_field(&field_id, from_index as usize, to_index as usize)?))
self.modify(|grid_pad| Ok(grid_pad.move_field(&field_id, from_index as usize, to_index as usize)?))
.await?;
if let Some((index, field_rev)) = self.grid_pad.read().await.get_field_rev(&field_id) {
let delete_field_order = FieldIdPB::from(field_id);
@ -717,7 +746,7 @@ impl GridRevisionEditor {
updated_fields: vec![],
};
let _ = self.notify_did_update_grid(notified_changeset).await?;
self.notify_did_update_grid(notified_changeset).await?;
}
Ok(())
}
@ -777,7 +806,7 @@ impl GridRevisionEditor {
// update block row count
let changeset = GridBlockMetaRevisionChangeset::from_row_count(block_id, row_count);
let _ = self.update_block(changeset).await?;
self.update_block(changeset).await?;
Ok(row_pb)
}
@ -786,8 +815,8 @@ impl GridRevisionEditor {
F: for<'a> FnOnce(&'a mut GridRevisionPad) -> FlowyResult<Option<GridRevisionChangeset>>,
{
let mut write_guard = self.grid_pad.write().await;
if let Some(changeset) = f(&mut *write_guard)? {
let _ = self.apply_change(changeset).await?;
if let Some(changeset) = f(&mut write_guard)? {
self.apply_change(changeset).await?;
}
Ok(())
}
@ -811,7 +840,7 @@ impl GridRevisionEditor {
if let Some((index, field_rev)) = self.grid_pad.read().await.get_field_rev(field_id) {
let index_field = IndexFieldPB::from_field_rev(field_rev, index);
let notified_changeset = GridFieldChangesetPB::insert(&self.grid_id, vec![index_field]);
let _ = self.notify_did_update_grid(notified_changeset).await?;
self.notify_did_update_grid(notified_changeset).await?;
}
Ok(())
}
@ -827,7 +856,7 @@ impl GridRevisionEditor {
{
let updated_field = FieldPB::from(field_rev);
let notified_changeset = GridFieldChangesetPB::update(&self.grid_id, vec![updated_field.clone()]);
let _ = self.notify_did_update_grid(notified_changeset).await?;
self.notify_did_update_grid(notified_changeset).await?;
send_dart_notification(field_id, GridDartNotification::DidUpdateField)
.payload(updated_field)

View File

@ -320,7 +320,7 @@ where
let configuration = Arc::make_mut(&mut self.configuration);
let is_changed = mut_configuration_fn(configuration);
if is_changed {
let configuration = (&*self.configuration).clone();
let configuration = (*self.configuration).clone();
let writer = self.writer.clone();
let field_id = self.field_rev.id.clone();
let field_type = self.field_rev.ty;

View File

@ -70,7 +70,7 @@ where
}
// Separates the rows into different groups
let _ = group_controller.fill_groups(&row_revs, &field_rev)?;
group_controller.fill_groups(&row_revs, &field_rev)?;
Ok(group_controller)
}

View File

@ -9,7 +9,7 @@ use flowy_database::{
use flowy_error::{FlowyError, FlowyResult};
use std::sync::Arc;
#[derive(PartialEq, Clone, Debug, Queryable, Identifiable, Insertable, Associations)]
#[derive(PartialEq, Eq, Clone, Debug, Queryable, Identifiable, Insertable, Associations)]
#[table_name = "kv_table"]
#[primary_key(key)]
pub struct KeyValue {

View File

@ -29,7 +29,7 @@ impl GridMigration {
if KV::get_bool(&key) {
return Ok(());
}
let _ = self.migration_grid_rev_struct(grid_id).await?;
self.migration_grid_rev_struct(grid_id).await?;
tracing::trace!("Run grid:{} v1 migration", grid_id);
KV::set_bool(&key, true);
Ok(())

View File

@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let _ = GridMetaRevisionSql::create(revision_records, &*conn)?;
GridMetaRevisionSql::create(revision_records, &*conn)?;
Ok(())
}
@ -36,7 +36,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = GridMetaRevisionSql::read(&self.user_id, object_id, rev_ids, &*conn)?;
let records = GridMetaRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
@ -52,7 +52,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = conn.immediate_transaction::<_, FlowyError, _>(|| {
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = GridMetaRevisionSql::update(changeset, conn)?;
}
@ -63,7 +63,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = GridMetaRevisionSql::delete(object_id, rev_ids, conn)?;
GridMetaRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridBlockRevisionPersisten
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
let _ = GridMetaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
let _ = GridMetaRevisionSql::create(inserted_records, &*conn)?;
GridMetaRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
GridMetaRevisionSql::create(inserted_records, &*conn)?;
Ok(())
})
}

View File

@ -22,7 +22,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let _ = GridRevisionSql::create(revision_records, &*conn)?;
GridRevisionSql::create(revision_records, &*conn)?;
Ok(())
}
@ -36,7 +36,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = GridRevisionSql::read(&self.user_id, object_id, rev_ids, &*conn)?;
let records = GridRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
@ -52,7 +52,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = conn.immediate_transaction::<_, FlowyError, _>(|| {
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = GridRevisionSql::update(changeset, conn)?;
}
@ -63,7 +63,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = GridRevisionSql::delete(object_id, rev_ids, conn)?;
GridRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
@ -75,8 +75,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridRevisionPersistence {
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
let _ = GridRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
let _ = GridRevisionSql::create(inserted_records, &*conn)?;
GridRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
GridRevisionSql::create(inserted_records, &*conn)?;
Ok(())
})
}

View File

@ -31,7 +31,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
fn create_revision_records(&self, revision_records: Vec<SyncRecord>) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let _ = GridViewRevisionSql::create(revision_records, &*conn)?;
GridViewRevisionSql::create(revision_records, &*conn)?;
Ok(())
}
@ -45,7 +45,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
rev_ids: Option<Vec<i64>>,
) -> Result<Vec<SyncRecord>, Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
let records = GridViewRevisionSql::read(&self.user_id, object_id, rev_ids, &*conn)?;
let records = GridViewRevisionSql::read(&self.user_id, object_id, rev_ids, &conn)?;
Ok(records)
}
@ -61,7 +61,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
fn update_revision_record(&self, changesets: Vec<RevisionChangeset>) -> FlowyResult<()> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = conn.immediate_transaction::<_, FlowyError, _>(|| {
conn.immediate_transaction::<_, FlowyError, _>(|| {
for changeset in changesets {
let _ = GridViewRevisionSql::update(changeset, conn)?;
}
@ -72,7 +72,7 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
fn delete_revision_records(&self, object_id: &str, rev_ids: Option<Vec<i64>>) -> Result<(), Self::Error> {
let conn = &*self.pool.get().map_err(internal_error)?;
let _ = GridViewRevisionSql::delete(object_id, rev_ids, conn)?;
GridViewRevisionSql::delete(object_id, rev_ids, conn)?;
Ok(())
}
@ -84,8 +84,8 @@ impl RevisionDiskCache<Arc<ConnectionPool>> for SQLiteGridViewRevisionPersistenc
) -> Result<(), Self::Error> {
let conn = self.pool.get().map_err(internal_error)?;
conn.immediate_transaction::<_, FlowyError, _>(|| {
let _ = GridViewRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
let _ = GridViewRevisionSql::create(inserted_records, &*conn)?;
GridViewRevisionSql::delete(object_id, deleted_rev_ids, &*conn)?;
GridViewRevisionSql::create(inserted_records, &*conn)?;
Ok(())
})
}

View File

@ -32,7 +32,7 @@ impl TaskHandler for SortTaskHandler {
let sort_controller = self.sort_controller.clone();
Box::pin(async move {
if let TaskContent::Text(predicate) = content {
let _ = sort_controller
sort_controller
.write()
.await
.process(&predicate)

View File

@ -310,8 +310,7 @@ impl GridViewRevisionEditor {
#[tracing::instrument(level = "trace", skip(self), err)]
pub async fn move_view_group(&self, params: MoveGroupParams) -> FlowyResult<()> {
let _ = self
.group_controller
self.group_controller
.write()
.await
.move_group(&params.from_group_id, &params.to_group_id)?;
@ -343,8 +342,7 @@ impl GridViewRevisionEditor {
pub async fn get_view_setting(&self) -> GridSettingPB {
let field_revs = self.delegate.get_field_revs(None).await;
let grid_setting = make_grid_setting(&*self.pad.read().await, &field_revs);
grid_setting
make_grid_setting(&*self.pad.read().await, &field_revs)
}
pub async fn get_all_view_filters(&self) -> Vec<Arc<FilterRevision>> {
@ -364,20 +362,16 @@ impl GridViewRevisionEditor {
///
pub async fn initialize_new_group(&self, params: InsertGroupParams) -> FlowyResult<()> {
if let Some(field_rev) = self.delegate.get_field_rev(&params.field_id).await {
let _ = self
.modify(|pad| {
let configuration = default_group_configuration(&field_rev);
let changeset = pad.insert_or_update_group_configuration(
&params.field_id,
&params.field_type_rev,
configuration,
)?;
Ok(changeset)
})
.await?;
self.modify(|pad| {
let configuration = default_group_configuration(&field_rev);
let changeset =
pad.insert_or_update_group_configuration(&params.field_id, &params.field_type_rev, configuration)?;
Ok(changeset)
})
.await?;
}
if self.group_controller.read().await.field_id() != params.field_id {
let _ = self.group_by_view_field(&params.field_id).await?;
self.group_by_view_field(&params.field_id).await?;
self.notify_did_update_setting().await;
}
Ok(())
@ -441,12 +435,11 @@ impl GridViewRevisionEditor {
.await;
let sort_type = params.sort_type;
let _ = self
.modify(|pad| {
let changeset = pad.delete_sort(&params.sort_id, &sort_type.field_id, sort_type.field_type)?;
Ok(changeset)
})
.await?;
self.modify(|pad| {
let changeset = pad.delete_sort(&params.sort_id, &sort_type.field_id, sort_type.field_type)?;
Ok(changeset)
})
.await?;
self.notify_did_update_sort(changeset).await;
Ok(())
@ -513,12 +506,11 @@ impl GridViewRevisionEditor {
.did_receive_changes(FilterChangeset::from_delete(filter_type.clone()))
.await;
let _ = self
.modify(|pad| {
let changeset = pad.delete_filter(&params.filter_id, &filter_type.field_id, filter_type.field_type)?;
Ok(changeset)
})
.await?;
self.modify(|pad| {
let changeset = pad.delete_filter(&params.filter_id, &filter_type.field_id, filter_type.field_type)?;
Ok(changeset)
})
.await?;
if changeset.is_some() {
self.notify_did_update_filter(changeset.unwrap()).await;
@ -638,10 +630,10 @@ impl GridViewRevisionEditor {
F: for<'a> FnOnce(&'a mut GridViewRevisionPad) -> FlowyResult<Option<GridViewRevisionChangeset>>,
{
let mut write_guard = self.pad.write().await;
match f(&mut *write_guard)? {
match f(&mut write_guard)? {
None => {}
Some(change) => {
let _ = apply_change(&self.user_id, self.rev_manager.clone(), change).await?;
apply_change(&self.user_id, self.rev_manager.clone(), change).await?;
}
}
Ok(())

View File

@ -103,7 +103,7 @@ impl GridViewManager {
pub async fn group_by_field(&self, field_id: &str) -> FlowyResult<()> {
let view_editor = self.get_default_view_editor().await?;
let _ = view_editor.group_by_view_field(field_id).await?;
view_editor.group_by_view_field(field_id).await?;
Ok(())
}
@ -166,7 +166,7 @@ impl GridViewManager {
pub async fn move_group(&self, params: MoveGroupParams) -> FlowyResult<()> {
let view_editor = self.get_default_view_editor().await?;
let _ = view_editor.move_view_group(params).await?;
view_editor.move_view_group(params).await?;
Ok(())
}
@ -212,10 +212,10 @@ impl GridViewManager {
) -> FlowyResult<()> {
let view_editor = self.get_default_view_editor().await?;
if view_editor.group_id().await == field_id {
let _ = view_editor.group_by_view_field(field_id).await?;
view_editor.group_by_view_field(field_id).await?;
}
let _ = view_editor
view_editor
.did_update_view_field_type_option(field_id, old_field_rev)
.await?;
Ok(())

View File

@ -99,7 +99,7 @@ impl GroupConfigurationWriter for GroupConfigurationWriterImpl {
)?;
if let Some(changeset) = changeset {
let _ = apply_change(&user_id, rev_manager, changeset).await?;
apply_change(&user_id, rev_manager, changeset).await?;
}
Ok(())
})

View File

@ -162,7 +162,7 @@ impl GridRowTest {
FieldType::RichText => {
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.parser::<TextCellDataParser>()
@ -177,7 +177,7 @@ impl GridRowTest {
.unwrap();
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.custom_parser(NumberCellCustomDataParser(number_type_option.format))
@ -187,7 +187,7 @@ impl GridRowTest {
FieldType::DateTime => {
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.parser::<DateCellDataParser>()
@ -198,7 +198,7 @@ impl GridRowTest {
FieldType::SingleSelect => {
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.parser::<SelectOptionCellDataParser>()
@ -209,7 +209,7 @@ impl GridRowTest {
FieldType::MultiSelect => {
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.parser::<SelectOptionCellDataParser>()
@ -228,7 +228,7 @@ impl GridRowTest {
FieldType::Checklist => {
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.parser::<SelectOptionCellDataParser>()
@ -246,7 +246,7 @@ impl GridRowTest {
FieldType::Checkbox => {
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.parser::<CheckboxCellDataParser>()
@ -256,7 +256,7 @@ impl GridRowTest {
FieldType::URL => {
let cell_data = self
.editor
.get_cell_bytes(&cell_id)
.get_cell_protobuf(&cell_id)
.await
.unwrap()
.parser::<URLCellDataParser>()

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use flowy_grid::services::field::{
ChecklistTypeOptionPB, DateCellChangeset, MultiSelectTypeOptionPB, SelectOptionPB, SingleSelectTypeOptionPB,
URLCellData,
URLCellChangeset, URLCellData,
};
use flowy_grid::services::row::RowRevisionBuilder;
use grid_rev_model::{FieldRevision, RowRevision};
@ -58,10 +58,9 @@ impl<'a> GridRowTestBuilder<'a> {
checkbox_field.id.clone()
}
pub fn insert_url_cell(&mut self, data: &str) -> String {
pub fn insert_url_cell(&mut self, content: &str) -> String {
let url_field = self.field_rev_with_type(&FieldType::URL);
let url_data = URLCellData::new(data).to_json().unwrap();
self.inner_builder.insert_text_cell(&url_field.id, url_data);
self.inner_builder.insert_url_cell(&url_field.id, content.to_string());
url_field.id.clone()
}

View File

@ -36,7 +36,7 @@ impl GridCellTest {
if is_err {
assert!(result.is_err())
} else {
let _ = result.unwrap();
result.unwrap();
}
} // CellScript::AssertGridRevisionPad => {
// sleep(Duration::from_millis(2 * REVISION_WRITE_INTERVAL_IN_MILLIS)).await;

View File

@ -57,3 +57,38 @@ async fn grid_cell_update() {
test.run_scripts(scripts).await;
}
#[tokio::test]
async fn text_cell_date_test() {
let test = GridCellTest::new().await;
let text_field = test.get_first_field_rev(FieldType::RichText);
let cells = test.editor.get_cell_data_for_field(&text_field.id).await.unwrap();
for (i, cell) in cells.iter().enumerate() {
let text = cell.get_text_field_cell_data().unwrap();
match i {
0 => assert_eq!(text.as_str(), "A"),
1 => assert_eq!(text.as_str(), ""),
2 => assert_eq!(text.as_str(), "C"),
3 => assert_eq!(text.as_str(), "DA"),
4 => assert_eq!(text.as_str(), "AE"),
5 => assert_eq!(text.as_str(), "AE"),
_ => {}
}
}
}
#[tokio::test]
async fn url_cell_date_test() {
let test = GridCellTest::new().await;
let url_field = test.get_first_field_rev(FieldType::URL);
let cells = test.editor.get_cell_data_for_field(&url_field.id).await.unwrap();
for (i, cell) in cells.iter().enumerate() {
let url_cell_data = cell.get_url_field_cell_data().unwrap();
match i {
0 => assert_eq!(url_cell_data.url.as_str(), "https://www.appflowy.io/"),
_ => {}
}
}
}

View File

@ -52,7 +52,7 @@ async fn grid_update_field_with_empty_change() {
let scripts = vec![CreateField { params }];
test.run_scripts(scripts).await;
let field_rev = (&*test.field_revs.clone().pop().unwrap()).clone();
let field_rev = (*test.field_revs.clone().pop().unwrap()).clone();
let changeset = FieldChangesetParams {
field_id: field_rev.id.clone(),
grid_id: test.view_id(),
@ -77,7 +77,7 @@ async fn grid_update_field() {
let create_field_index = test.field_count();
test.run_scripts(scripts).await;
//
let single_select_field = (&*test.field_revs.clone().pop().unwrap()).clone();
let single_select_field = (*test.field_revs.clone().pop().unwrap()).clone();
let mut single_select_type_option = SingleSelectTypeOptionPB::from(&single_select_field);
single_select_type_option.options.push(SelectOptionPB::new("Unknown"));
@ -113,7 +113,7 @@ async fn grid_delete_field() {
let scripts = vec![CreateField { params }];
test.run_scripts(scripts).await;
let text_field_rev = (&*test.field_revs.clone().pop().unwrap()).clone();
let text_field_rev = (*test.field_revs.clone().pop().unwrap()).clone();
let scripts = vec![
DeleteField {
field_rev: text_field_rev,

View File

@ -301,6 +301,7 @@ fn make_test_grid() -> BuildGridContext {
.insert_multi_select_cell(|mut options| vec![options.remove(0), options.remove(0)]),
FieldType::Checklist => row_builder.insert_checklist_cell(|options| options),
FieldType::Checkbox => row_builder.insert_checkbox_cell("true"),
FieldType::URL => row_builder.insert_url_cell("AppFlowy website - https://www.appflowy.io"),
_ => "".to_owned(),
};
}

View File

@ -41,7 +41,7 @@ impl DocumentCloudService for DocumentCloudServiceImpl {
}
pub async fn create_document_request(token: &str, params: CreateDocumentParams, url: &str) -> Result<(), FlowyError> {
let _ = request_builder()
request_builder()
.post(url)
.header(HEADER_TOKEN, token)
.json(params)?
@ -66,7 +66,7 @@ pub async fn read_document_request(
}
pub async fn reset_doc_request(token: &str, params: ResetDocumentParams, url: &str) -> Result<(), FlowyError> {
let _ = request_builder()
request_builder()
.patch(url)
.header(HEADER_TOKEN, token)
.json(params)?

View File

@ -57,7 +57,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.workspace_url();
FutureResult::new(async move {
let _ = update_workspace_request(&token, params, &url).await?;
update_workspace_request(&token, params, &url).await?;
Ok(())
})
}
@ -66,7 +66,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.workspace_url();
FutureResult::new(async move {
let _ = delete_workspace_request(&token, params, &url).await?;
delete_workspace_request(&token, params, &url).await?;
Ok(())
})
}
@ -93,7 +93,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.view_url();
FutureResult::new(async move {
let _ = delete_view_request(&token, params, &url).await?;
delete_view_request(&token, params, &url).await?;
Ok(())
})
}
@ -102,7 +102,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.view_url();
FutureResult::new(async move {
let _ = update_view_request(&token, params, &url).await?;
update_view_request(&token, params, &url).await?;
Ok(())
})
}
@ -129,7 +129,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.app_url();
FutureResult::new(async move {
let _ = update_app_request(&token, params, &url).await?;
update_app_request(&token, params, &url).await?;
Ok(())
})
}
@ -138,7 +138,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.app_url();
FutureResult::new(async move {
let _ = delete_app_request(&token, params, &url).await?;
delete_app_request(&token, params, &url).await?;
Ok(())
})
}
@ -147,7 +147,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.trash_url();
FutureResult::new(async move {
let _ = create_trash_request(&token, params, &url).await?;
create_trash_request(&token, params, &url).await?;
Ok(())
})
}
@ -156,7 +156,7 @@ impl FolderCouldServiceV1 for FolderHttpCloudService {
let token = token.to_owned();
let url = self.config.trash_url();
FutureResult::new(async move {
let _ = delete_trash_request(&token, params, &url).await?;
delete_trash_request(&token, params, &url).await?;
Ok(())
})
}

View File

@ -46,7 +46,7 @@ impl UserCloudService for UserHttpCloudService {
let token = token.to_owned();
let url = self.config.user_profile_url();
FutureResult::new(async move {
let _ = update_user_profile_request(&token, params, &url).await?;
update_user_profile_request(&token, params, &url).await?;
Ok(())
})
}
@ -76,7 +76,7 @@ pub async fn user_sign_in_request(params: SignInParams, url: &str) -> Result<Sig
}
pub async fn user_sign_out_request(token: &str, url: &str) -> Result<(), ServerError> {
let _ = request_builder().delete(url).header(HEADER_TOKEN, token).send().await?;
request_builder().delete(url).header(HEADER_TOKEN, token).send().await?;
Ok(())
}
@ -94,7 +94,7 @@ pub async fn update_user_profile_request(
params: UpdateUserProfileParams,
url: &str,
) -> Result<(), ServerError> {
let _ = request_builder()
request_builder()
.patch(url)
.header(HEADER_TOKEN, token)
.protobuf(params)?

View File

@ -66,7 +66,7 @@ impl FolderCloudPersistence for LocalDocumentCloudPersistence {
let folder_id = folder_id.to_owned();
let storage = self.storage.clone();
Box::pin(async move {
let _ = storage.set_revisions(revisions.clone()).await?;
storage.set_revisions(revisions.clone()).await?;
make_folder_from_revisions_pb(&folder_id, revisions)
})
}
@ -74,7 +74,7 @@ impl FolderCloudPersistence for LocalDocumentCloudPersistence {
fn save_folder_revisions(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), CollaborateError> {
let storage = self.storage.clone();
Box::pin(async move {
let _ = storage.set_revisions(revisions).await?;
storage.set_revisions(revisions).await?;
Ok(())
})
}
@ -93,7 +93,7 @@ impl FolderCloudPersistence for LocalDocumentCloudPersistence {
let storage = self.storage.clone();
let folder_id = folder_id.to_owned();
Box::pin(async move {
let _ = storage.reset_object(&folder_id, revisions).await?;
storage.reset_object(&folder_id, revisions).await?;
Ok(())
})
}
@ -120,7 +120,7 @@ impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
let doc_id = doc_id.to_owned();
let storage = self.storage.clone();
Box::pin(async move {
let _ = storage.set_revisions(revisions.clone()).await?;
storage.set_revisions(revisions.clone()).await?;
make_document_from_revision_pbs(&doc_id, revisions)
})
}
@ -138,7 +138,7 @@ impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
fn save_document_revisions(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), CollaborateError> {
let storage = self.storage.clone();
Box::pin(async move {
let _ = storage.set_revisions(revisions).await?;
storage.set_revisions(revisions).await?;
Ok(())
})
}
@ -147,7 +147,7 @@ impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
let storage = self.storage.clone();
let doc_id = doc_id.to_owned();
Box::pin(async move {
let _ = storage.reset_object(&doc_id, revisions).await?;
storage.reset_object(&doc_id, revisions).await?;
Ok(())
})
}

View File

@ -110,11 +110,11 @@ impl LocalWebSocketRunner {
let client_data = ClientRevisionWSData::try_from(bytes).map_err(internal_error)?;
match message.channel {
WSChannel::Document => {
let _ = self.handle_document_client_data(client_data, "".to_owned()).await?;
self.handle_document_client_data(client_data, "".to_owned()).await?;
Ok(())
}
WSChannel::Folder => {
let _ = self.handle_folder_client_data(client_data, "".to_owned()).await?;
self.handle_folder_client_data(client_data, "".to_owned()).await?;
Ok(())
}
WSChannel::Grid => {
@ -143,10 +143,10 @@ impl LocalWebSocketRunner {
let ty = client_data.ty.clone();
match ty {
ClientRevisionWSDataType::ClientPushRev => {
let _ = self.folder_manager.handle_client_revisions(user, client_data).await?;
self.folder_manager.handle_client_revisions(user, client_data).await?;
}
ClientRevisionWSDataType::ClientPing => {
let _ = self.folder_manager.handle_client_ping(user, client_data).await?;
self.folder_manager.handle_client_ping(user, client_data).await?;
}
}
Ok(())
@ -172,10 +172,10 @@ impl LocalWebSocketRunner {
let ty = client_data.ty.clone();
match ty {
ClientRevisionWSDataType::ClientPushRev => {
let _ = self.doc_manager.handle_client_revisions(user, client_data).await?;
self.doc_manager.handle_client_revisions(user, client_data).await?;
}
ClientRevisionWSDataType::ClientPing => {
let _ = self.doc_manager.handle_client_ping(user, client_data).await?;
self.doc_manager.handle_client_ping(user, client_data).await?;
}
}
Ok(())

View File

@ -63,7 +63,7 @@ impl FlowyWebSocketConnect {
pub async fn start(&self, token: String, user_id: String) -> Result<(), FlowyError> {
let addr = format!("{}/{}", self.addr, &token);
self.inner.stop_connect().await?;
let _ = self.inner.start_connect(addr, user_id).await?;
self.inner.start_connect(addr, user_id).await?;
Ok(())
}
@ -102,7 +102,7 @@ impl FlowyWebSocketConnect {
}
pub fn add_ws_message_receiver(&self, receiver: Arc<dyn WSMessageReceiver>) -> Result<(), FlowyError> {
let _ = self.inner.add_msg_receiver(receiver)?;
self.inner.add_msg_receiver(receiver)?;
Ok(())
}

View File

@ -17,7 +17,7 @@ impl FlowyRawWebSocket for Arc<WSController> {
fn start_connect(&self, addr: String, _user_id: String) -> FutureResult<(), FlowyError> {
let cloned_ws = self.clone();
FutureResult::new(async move {
let _ = cloned_ws.start(addr).await.map_err(internal_error)?;
cloned_ws.start(addr).await.map_err(internal_error)?;
Ok(())
})
}
@ -38,13 +38,13 @@ impl FlowyRawWebSocket for Arc<WSController> {
fn reconnect(&self, count: usize) -> FutureResult<(), FlowyError> {
let cloned_ws = self.clone();
FutureResult::new(async move {
let _ = cloned_ws.retry(count).await.map_err(internal_error)?;
cloned_ws.retry(count).await.map_err(internal_error)?;
Ok(())
})
}
fn add_msg_receiver(&self, receiver: Arc<dyn WSMessageReceiver>) -> Result<(), FlowyError> {
let _ = self.add_ws_message_receiver(receiver).map_err(internal_error)?;
self.add_ws_message_receiver(receiver).map_err(internal_error)?;
Ok(())
}
@ -64,7 +64,7 @@ impl FlowyRawWebSocket for Arc<WSController> {
impl FlowyWebSocket for WSSender {
fn send(&self, msg: WebSocketRawMessage) -> Result<(), FlowyError> {
let _ = self.send_msg(msg).map_err(internal_error)?;
self.send_msg(msg).map_err(internal_error)?;
Ok(())
}
}

View File

@ -43,14 +43,14 @@ where
pub async fn run(&self) -> FlowyResult<()> {
match self.target.read_record() {
None => {
let _ = self.reset_object().await?;
let _ = self.save_migrate_record()?;
self.reset_object().await?;
self.save_migrate_record()?;
}
Some(s) => {
let mut record = MigrationObjectRecord::from_str(&s).map_err(|e| FlowyError::serde().context(e))?;
let rev_str = self.target.default_target_rev_str()?;
if record.len < rev_str.len() {
let _ = self.reset_object().await?;
self.reset_object().await?;
record.len = rev_str.len();
self.target.set_record(record.to_string());
}

View File

@ -87,13 +87,13 @@ where
}
pub async fn ack_revision(&self, rev_id: i64) -> FlowyResult<()> {
let _ = self.rev_sink.ack(rev_id).await?;
self.rev_sink.ack(rev_id).await?;
Ok(())
}
pub async fn send_revisions(&self, range: RevisionRange) -> FlowyResult<()> {
let revisions = self.rev_manager.get_revisions_in_range(range).await?;
let _ = self.rev_sink.send(revisions).await?;
self.rev_sink.send(revisions).await?;
Ok(())
}
@ -123,13 +123,13 @@ where
// // server, and it needs to override the client delta.
let md5 = self.resolver.reset_operations(client_operations).await?;
debug_assert!(md5.is_equal(&revisions.last().unwrap().md5));
let _ = self.rev_manager.reset_object(revisions).await?;
self.rev_manager.reset_object(revisions).await?;
Ok(None)
}
Some(server_operations) => {
let md5 = self.resolver.compose_operations(client_operations.clone()).await?;
for revision in &revisions {
let _ = self.rev_manager.add_remote_revision(revision).await?;
self.rev_manager.add_remote_revision(revision).await?;
}
let (client_revision, server_revision) = make_client_and_server_revision(
&self.user_id,
@ -138,7 +138,7 @@ where
Some(server_operations),
md5,
);
let _ = self.rev_manager.add_remote_revision(&client_revision).await?;
self.rev_manager.add_remote_revision(&client_revision).await?;
Ok(server_revision)
}
}

View File

@ -137,7 +137,7 @@ impl<Connection: 'static> RevisionManager<Connection> {
let current_rev_id = revisions.last().as_ref().map(|revision| revision.rev_id).unwrap_or(0);
match B::deserialize_revisions(&self.object_id, revisions) {
Ok(object) => {
let _ = self.rev_persistence.sync_revision_records(&revision_records).await?;
self.rev_persistence.sync_revision_records(&revision_records).await?;
self.rev_id_counter.set(current_rev_id);
Ok(object)
}
@ -185,7 +185,7 @@ impl<Connection: 'static> RevisionManager<Connection> {
#[tracing::instrument(level = "debug", skip(self, revisions), err)]
pub async fn reset_object(&self, revisions: Vec<Revision>) -> FlowyResult<()> {
let rev_id = pair_rev_id_from_revisions(&revisions).1;
let _ = self.rev_persistence.reset(revisions).await?;
self.rev_persistence.reset(revisions).await?;
self.rev_id_counter.set(rev_id);
Ok(())
}
@ -196,7 +196,7 @@ impl<Connection: 'static> RevisionManager<Connection> {
return Err(FlowyError::internal().context("Remote revisions is empty"));
}
let _ = self.rev_persistence.add_ack_revision(revision).await?;
self.rev_persistence.add_ack_revision(revision).await?;
self.rev_id_counter.set(revision.rev_id);
Ok(())
}

View File

@ -137,8 +137,7 @@ where
state: RevisionState::Sync,
write_to_disk: true,
};
let _ = self
.disk_cache
self.disk_cache
.delete_and_insert_records(&self.object_id, Some(rev_ids), vec![record])?;
}
Ok(())
@ -192,7 +191,7 @@ where
let merged_revision = rev_compress.merge_revisions(&self.user_id, &self.object_id, revisions)?;
let rev_id = merged_revision.rev_id;
tracing::Span::current().record("rev_id", &merged_revision.rev_id);
let _ = sync_seq.recv(merged_revision.rev_id)?;
sync_seq.recv(merged_revision.rev_id)?;
// replace the revisions in range with compact revision
self.compact(&range, merged_revision).await?;
@ -251,10 +250,9 @@ where
})
.collect::<Vec<_>>();
let _ = self
.disk_cache
self.disk_cache
.delete_and_insert_records(&self.object_id, None, records.clone())?;
let _ = self.memory_cache.reset_with_revisions(records).await;
self.memory_cache.reset_with_revisions(records).await;
self.sync_seq.write().await.clear();
Ok(())
}
@ -277,8 +275,7 @@ where
async fn compact(&self, range: &RevisionRange, new_revision: Revision) -> FlowyResult<()> {
self.memory_cache.remove_with_range(range);
let rev_ids = range.to_rev_ids();
let _ = self
.disk_cache
self.disk_cache
.delete_revision_records(&self.object_id, Some(rev_ids))?;
self.add(new_revision, RevisionState::Sync, true).await?;
Ok(())
@ -341,8 +338,7 @@ where
#[allow(dead_code)]
pub fn delete_revisions_from_range(&self, range: RevisionRange) -> FlowyResult<()> {
let _ = self
.disk_cache
self.disk_cache
.delete_revision_records(&self.object_id, Some(range.to_rev_ids()))?;
Ok(())
}
@ -356,7 +352,7 @@ impl<C> RevisionMemoryCacheDelegate for Arc<dyn RevisionDiskCache<C, Error = Flo
"checkpoint_result",
&format!("{} records were saved", records.len()).as_str(),
);
let _ = self.create_revision_records(records)?;
self.create_revision_records(records)?;
}
Ok(())
}
@ -391,7 +387,7 @@ impl DeferSyncSequence {
/// When calling `compact` method, it will return a list of revision ids started from
/// the `compact_start_pos`, and ends with the `compact_length`.
fn merge_recv(&mut self, new_rev_id: i64) -> FlowyResult<()> {
let _ = self.recv(new_rev_id)?;
self.recv(new_rev_id)?;
self.compact_length += 1;
if self.compact_index.is_none() && !self.rev_ids.is_empty() {

View File

@ -116,7 +116,7 @@ impl RevisionWebSocketManager {
#[tracing::instrument(level = "debug", skip(self, data), err)]
pub async fn receive_ws_data(&self, data: ServerRevisionWSData) -> Result<(), FlowyError> {
let _ = self.ws_passthrough_tx.send(data).await.map_err(|e| {
self.ws_passthrough_tx.send(data).await.map_err(|e| {
let err_msg = format!("{} passthrough error: {}", self.object_id, e);
FlowyError::internal().context(err_msg)
})?;
@ -216,11 +216,11 @@ impl RevisionWSStream {
match payload {
WSRevisionPayload::ServerPushRev { revisions } => {
tracing::trace!("[{}]: new push revision: {}", self, object_id);
let _ = self.consumer.receive_push_revision(revisions).await?;
self.consumer.receive_push_revision(revisions).await?;
}
WSRevisionPayload::ServerPullRev { range } => {
tracing::trace!("[{}]: new pull: {}:{:?}", self, object_id, range);
let _ = self.consumer.pull_revisions_in_range(range).await?;
self.consumer.pull_revisions_in_range(range).await?;
}
WSRevisionPayload::ServerAck { rev_id } => {
tracing::trace!("[{}]: new ack: {}:{}", self, object_id, rev_id);
@ -412,7 +412,7 @@ impl WSDataProvider {
Ok(())
}
Source::Revision => {
let _ = self.data_source.ack_revision(rev_id).await?;
self.data_source.ack_revision(rev_id).await?;
Ok::<(), FlowyError>(())
}
}

View File

@ -265,7 +265,7 @@ impl RevisionMergeable for RevisionMergeableMock {
let mut object = RevisionObjectMock::new("");
for revision in revisions {
if let Ok(other) = RevisionObjectMock::from_bytes(&revision.bytes) {
let _ = object.compose(other)?;
object.compose(other)?;
}
}
Ok(Bytes::from(object.to_bytes()))
@ -326,7 +326,7 @@ impl RevisionObjectDeserializer for RevisionObjectMockSerde {
for revision in revisions {
if let Ok(revision_object) = RevisionObjectMock::from_bytes(&revision.bytes) {
let _ = object.compose(revision_object)?;
object.compose(revision_object)?;
}
}

View File

@ -54,7 +54,7 @@ impl DocumentUser for BlockUserImpl {
let doc_dir = format!("{}/document", dir);
if !Path::new(&doc_dir).exists() {
let _ = std::fs::create_dir_all(&doc_dir)?;
std::fs::create_dir_all(&doc_dir)?;
}
Ok(doc_dir)
}

View File

@ -156,7 +156,7 @@ impl ViewDataProcessor for DocumentViewDataProcessor {
let manager = self.0.clone();
FutureResult::new(async move {
let _ = manager.create_document(view_id, vec![revision]).await?;
manager.create_document(view_id, vec![revision]).await?;
Ok(())
})
}
@ -165,7 +165,7 @@ impl ViewDataProcessor for DocumentViewDataProcessor {
let manager = self.0.clone();
let view_id = view_id.to_string();
FutureResult::new(async move {
let _ = manager.close_document_editor(view_id).await?;
manager.close_document_editor(view_id).await?;
Ok(())
})
}
@ -195,7 +195,7 @@ impl ViewDataProcessor for DocumentViewDataProcessor {
FutureResult::new(async move {
let delta_data = Bytes::from(document_content);
let revision = Revision::initial_revision(&view_id, delta_data.clone());
let _ = manager.create_document(view_id, vec![revision]).await?;
manager.create_document(view_id, vec![revision]).await?;
Ok(delta_data)
})
}
@ -229,7 +229,7 @@ impl ViewDataProcessor for GridViewDataProcessor {
let view_id = view_id.to_string();
let grid_manager = self.0.clone();
FutureResult::new(async move {
let _ = grid_manager.create_grid(view_id, vec![revision]).await?;
grid_manager.create_grid(view_id, vec![revision]).await?;
Ok(())
})
}
@ -238,7 +238,7 @@ impl ViewDataProcessor for GridViewDataProcessor {
let grid_manager = self.0.clone();
let view_id = view_id.to_string();
FutureResult::new(async move {
let _ = grid_manager.close_grid(view_id).await?;
grid_manager.close_grid(view_id).await?;
Ok(())
})
}

View File

@ -265,20 +265,20 @@ async fn _listen_user_status(
match status {
UserStatus::Login { token, user_id } => {
tracing::trace!("User did login");
let _ = folder_manager.initialize(&user_id, &token).await?;
let _ = document_manager.initialize(&user_id).await?;
let _ = grid_manager.initialize(&user_id, &token).await?;
let _ = ws_conn.start(token, user_id).await?;
folder_manager.initialize(&user_id, &token).await?;
document_manager.initialize(&user_id).await?;
grid_manager.initialize(&user_id, &token).await?;
ws_conn.start(token, user_id).await?;
}
UserStatus::Logout { token: _, user_id } => {
tracing::trace!("User did logout");
folder_manager.clear(&user_id).await;
let _ = ws_conn.stop().await;
ws_conn.stop().await;
}
UserStatus::Expired { token: _, user_id } => {
tracing::trace!("User session has been expired");
folder_manager.clear(&user_id).await;
let _ = ws_conn.stop().await;
ws_conn.stop().await;
}
UserStatus::SignUp { profile, ret } => {
tracing::trace!("User did sign up");
@ -287,18 +287,18 @@ async fn _listen_user_status(
DocumentVersionPB::V0 => ViewDataFormatPB::DeltaFormat,
DocumentVersionPB::V1 => ViewDataFormatPB::TreeFormat,
};
let _ = folder_manager
folder_manager
.initialize_with_new_user(&profile.id, &profile.token, view_data_type)
.await?;
let _ = document_manager
document_manager
.initialize_with_new_user(&profile.id, &profile.token)
.await?;
let _ = grid_manager
grid_manager
.initialize_with_new_user(&profile.id, &profile.token)
.await?;
let _ = ws_conn.start(profile.token.clone(), profile.id.clone()).await?;
ws_conn.start(profile.token.clone(), profile.id.clone()).await?;
let _ = ret.send(());
}
}

View File

@ -128,18 +128,18 @@ impl ClientDocument {
pub fn insert<T: ToString>(&mut self, index: usize, data: T) -> Result<DeltaTextOperations, CollaborateError> {
let text = data.to_string();
let interval = Interval::new(index, index);
let _ = validate_interval(&self.operations, &interval)?;
validate_interval(&self.operations, &interval)?;
let operations = self.view.insert(&self.operations, &text, interval)?;
self.compose_operations(operations.clone())?;
Ok(operations)
}
pub fn delete(&mut self, interval: Interval) -> Result<DeltaTextOperations, CollaborateError> {
let _ = validate_interval(&self.operations, &interval)?;
validate_interval(&self.operations, &interval)?;
debug_assert!(!interval.is_empty());
let operations = self.view.delete(&self.operations, interval)?;
if !operations.is_empty() {
let _ = self.compose_operations(operations.clone())?;
self.compose_operations(operations.clone())?;
}
Ok(operations)
}
@ -149,7 +149,7 @@ impl ClientDocument {
interval: Interval,
attribute: AttributeEntry,
) -> Result<DeltaTextOperations, CollaborateError> {
let _ = validate_interval(&self.operations, &interval)?;
validate_interval(&self.operations, &interval)?;
tracing::trace!("format {} with {:?}", interval, attribute);
let operations = self.view.format(&self.operations, attribute, interval).unwrap();
self.compose_operations(operations.clone())?;
@ -161,7 +161,7 @@ impl ClientDocument {
interval: Interval,
data: T,
) -> Result<DeltaTextOperations, CollaborateError> {
let _ = validate_interval(&self.operations, &interval)?;
validate_interval(&self.operations, &interval)?;
let mut operations = DeltaTextOperations::default();
let text = data.to_string();
if !text.is_empty() {

View File

@ -115,7 +115,7 @@ impl std::default::Default for FolderNodePad {
path: folder_path(),
nodes: vec![folder_node],
};
let _ = tree.write().apply_op(operation).unwrap();
tree.write().apply_op(operation).unwrap();
let node_id = tree.read().node_id_at_path(folder_path()).unwrap();
workspaces.node_id = Some(tree.read().node_id_at_path(workspaces_path()).unwrap());
trash.node_id = Some(tree.read().node_id_at_path(trash_path()).unwrap());

View File

@ -47,7 +47,7 @@ impl FolderPad {
let folder_rev = FolderRevision::deserialize(&mut deserializer).map_err(|e| {
tracing::error!("Deserialize folder from {} failed", content);
return CollaborateError::internal().context(format!("Deserialize operations to folder failed: {}", e));
CollaborateError::internal().context(format!("Deserialize operations to folder failed: {}", e))
})?;
Ok(Self { folder_rev, operations })

View File

@ -30,7 +30,7 @@ pub fn set_attributes_str_value(
old: old_attributes,
},
};
let _ = tree.write().apply_op(update_operation)?;
tree.write().apply_op(update_operation)?;
Ok(())
}

View File

@ -41,7 +41,7 @@ impl GridRevisionPad {
.blocks
.iter()
.map(|block| {
let mut duplicated_block = (&**block).clone();
let mut duplicated_block = (**block).clone();
duplicated_block.block_id = gen_block_id();
duplicated_block
})

View File

@ -91,7 +91,7 @@ impl ServerDocumentManager {
Ok(())
}
Some(handler) => {
let _ = handler.apply_revisions(user, client_data.revisions).await?;
handler.apply_revisions(user, client_data.revisions).await?;
Ok(())
}
};
@ -117,7 +117,7 @@ impl ServerDocumentManager {
Ok(())
}
Some(handler) => {
let _ = handler.apply_ping(rev_id, user).await?;
handler.apply_ping(rev_id, user).await?;
Ok(())
}
}
@ -136,7 +136,7 @@ impl ServerDocumentManager {
Ok(())
}
Some(handler) => {
let _ = handler.apply_document_reset(revisions).await?;
handler.apply_document_reset(revisions).await?;
Ok(())
}
}
@ -234,29 +234,25 @@ impl OpenDocumentHandler {
self.users.insert(user.user_id(), user.clone());
let msg = DocumentCommand::ApplyRevisions { user, revisions, ret };
let result = self.send(msg, rx).await?;
result
self.send(msg, rx).await?
}
async fn apply_ping(&self, rev_id: i64, user: Arc<dyn RevisionUser>) -> Result<(), CollaborateError> {
let (ret, rx) = oneshot::channel();
self.users.insert(user.user_id(), user.clone());
let msg = DocumentCommand::Ping { user, rev_id, ret };
let result = self.send(msg, rx).await?;
result
self.send(msg, rx).await?
}
#[tracing::instrument(level = "debug", skip(self, revisions), err)]
async fn apply_document_reset(&self, revisions: Vec<Revision>) -> Result<(), CollaborateError> {
let (ret, rx) = oneshot::channel();
let msg = DocumentCommand::Reset { revisions, ret };
let result = self.send(msg, rx).await?;
result
self.send(msg, rx).await?
}
async fn send<T>(&self, msg: DocumentCommand, rx: oneshot::Receiver<T>) -> CollaborateResult<T> {
let _ = self
.sender
self.sender
.send(msg)
.await
.map_err(|e| CollaborateError::internal().context(format!("Send document command failed: {}", e)))?;

View File

@ -88,7 +88,7 @@ impl ServerFolderManager {
Ok(())
}
Some(handler) => {
let _ = handler.apply_revisions(user, client_data.revisions).await?;
handler.apply_revisions(user, client_data.revisions).await?;
Ok(())
}
};
@ -115,7 +115,7 @@ impl ServerFolderManager {
Ok(())
}
Some(handler) => {
let _ = handler.apply_ping(rev_id, user).await?;
handler.apply_ping(rev_id, user).await?;
Ok(())
}
}
@ -215,8 +215,7 @@ impl OpenFolderHandler {
}
async fn send<T>(&self, msg: FolderCommand, rx: oneshot::Receiver<T>) -> CollaborateResult<T> {
let _ = self
.sender
self.sender
.send(msg)
.await
.map_err(|e| CollaborateError::internal().context(format!("Send folder command failed: {}", e)))?;

View File

@ -116,9 +116,9 @@ where
if server_base_rev_id == first_revision.base_rev_id || server_rev_id == first_revision.rev_id {
// The rev is in the right order, just compose it.
for revision in revisions.iter() {
let _ = self.compose_revision(revision)?;
self.compose_revision(revision)?;
}
let _ = self.persistence.save_revisions(revisions).await?;
self.persistence.save_revisions(revisions).await?;
} else {
// The server ops is outdated, pull the missing revision from the client.
let range = RevisionRange {
@ -139,7 +139,7 @@ where
// ops.
let from_rev_id = first_revision.rev_id;
let to_rev_id = server_base_rev_id;
let _ = self.push_revisions_to_user(user, from_rev_id, to_rev_id).await;
self.push_revisions_to_user(user, from_rev_id, to_rev_id).await;
}
}
Ok(())
@ -162,7 +162,7 @@ where
let from_rev_id = client_rev_id;
let to_rev_id = server_rev_id;
tracing::trace!("Push revisions to user");
let _ = self.push_revisions_to_user(user, from_rev_id, to_rev_id).await;
self.push_revisions_to_user(user, from_rev_id, to_rev_id).await;
}
}
Ok(())
@ -174,7 +174,7 @@ where
tracing::Span::current().record("object_id", &object_id.as_str());
let (_, rev_id) = pair_rev_id_from_revision_pbs(&revisions);
let operations = make_operations_from_revisions(revisions.clone())?;
let _ = self.persistence.reset_object(&object_id, revisions).await?;
self.persistence.reset_object(&object_id, revisions).await?;
self.object.write().set_operations(operations);
let _ = self.rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(rev_id));
Ok(())
@ -186,7 +186,7 @@ where
fn compose_revision(&self, revision: &Revision) -> Result<(), CollaborateError> {
let operations = RevisionOperations::<Attribute>::from_bytes(&revision.bytes)?;
let _ = self.compose_operations(operations)?;
self.compose_operations(operations)?;
let _ = self.rev_id.fetch_update(SeqCst, SeqCst, |_e| Some(revision.rev_id));
Ok(())
}
@ -209,7 +209,7 @@ where
match self.object.try_write_for(Duration::from_millis(300)) {
None => log::error!("Failed to acquire write lock of object"),
Some(mut write_guard) => {
let _ = write_guard.compose(&operations)?;
write_guard.compose(&operations)?;
}
}
Ok(())

View File

@ -171,7 +171,7 @@ pub fn cal_diff<T: OperationAttributes>(old: String, new: String) -> Option<Delt
delta_builder = delta_builder.delete(OTString::from(*s).utf16_len());
}
Chunk::Insert(s) => {
delta_builder = delta_builder.insert(*s);
delta_builder = delta_builder.insert(s);
}
}
}

View File

@ -23,7 +23,7 @@ impl FolderNodePadTest {
pub fn new() -> FolderNodePadTest {
let mut folder_pad = FolderNodePad::default();
let workspace = WorkspaceNode::new(folder_pad.tree.clone(), "1".to_string(), "workspace name".to_string());
let _ = folder_pad.workspaces.add_workspace(workspace).unwrap();
folder_pad.workspaces.add_workspace(workspace).unwrap();
Self { folder_pad }
}
@ -54,7 +54,7 @@ impl FolderNodePadTest {
FolderNodePadScript::CreateApp { id, name } => {
let app_node = AppNode::new(self.folder_pad.tree.clone(), id, name);
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();
let _ = workspace_node.add_app(app_node).unwrap();
workspace_node.add_app(app_node).unwrap();
}
FolderNodePadScript::DeleteApp { id } => {
let workspace_node = self.folder_pad.get_mut_workspace("1").unwrap();

View File

@ -56,7 +56,7 @@ impl TaskQueue {
let head = self.queue.pop()?;
let result = {
let mut ref_head = head.borrow_mut();
f(&mut *ref_head)
f(&mut ref_head)
};
if !head.borrow().tasks.is_empty() {
self.queue.push(head);

View File

@ -9,7 +9,7 @@ use std::{convert::TryInto, sync::Arc};
#[tracing::instrument(level = "debug", skip(session))]
pub async fn init_user_handler(session: AFPluginState<Arc<UserSession>>) -> Result<(), FlowyError> {
let _ = session.init_user().await?;
session.init_user().await?;
Ok(())
}
@ -29,7 +29,7 @@ pub async fn get_user_profile_handler(
#[tracing::instrument(level = "debug", name = "sign_out", skip(session))]
pub async fn sign_out(session: AFPluginState<Arc<UserSession>>) -> Result<(), FlowyError> {
let _ = session.sign_out().await?;
session.sign_out().await?;
Ok(())
}

View File

@ -88,7 +88,7 @@ impl UserSession {
} else {
let resp = self.cloud_service.sign_in(params).await?;
let session: Session = resp.clone().into();
let _ = self.set_session(Some(session))?;
self.set_session(Some(session))?;
let user_table = self.save_user(resp.into()).await?;
let user_profile: UserProfilePB = user_table.into();
self.notifier.notify_login(&user_profile.token, &user_profile.id);
@ -103,7 +103,7 @@ impl UserSession {
} else {
let resp = self.cloud_service.sign_up(params).await?;
let session: Session = resp.clone().into();
let _ = self.set_session(Some(session))?;
self.set_session(Some(session))?;
let user_table = self.save_user(resp.into()).await?;
let user_profile: UserProfilePB = user_table.into();
let (ret, mut tx) = mpsc::channel(1);
@ -119,10 +119,10 @@ impl UserSession {
let session = self.get_session()?;
let _ =
diesel::delete(dsl::user_table.filter(dsl::id.eq(&session.user_id))).execute(&*(self.db_connection()?))?;
let _ = self.database.close_user_db(&session.user_id)?;
let _ = self.set_session(None)?;
self.database.close_user_db(&session.user_id)?;
self.set_session(None)?;
self.notifier.notify_logout(&session.token, &session.user_id);
let _ = self.sign_out_on_server(&session.token).await?;
self.sign_out_on_server(&session.token).await?;
Ok(())
}
@ -137,7 +137,7 @@ impl UserSession {
dart_notify(&session.token, UserNotification::UserProfileUpdated)
.payload(user_profile)
.send();
let _ = self.update_user_on_server(&session.token, params).await?;
self.update_user_on_server(&session.token, params).await?;
Ok(())
}
@ -152,7 +152,7 @@ impl UserSession {
.filter(user_table::id.eq(&user_id))
.first::<UserTable>(&*(self.db_connection()?))?;
let _ = self.read_user_profile_on_server(&token)?;
self.read_user_profile_on_server(&token)?;
Ok(user.into())
}
@ -162,7 +162,7 @@ impl UserSession {
.filter(user_table::id.eq(&user_id))
.first::<UserTable>(&*(self.db_connection()?))?;
let _ = self.read_user_profile_on_server(&token)?;
self.read_user_profile_on_server(&token)?;
Ok(user.into())
}

View File

@ -67,8 +67,8 @@ impl Builder {
// let _ = set_global_default(subscriber).map_err(|e| format!("{:?}", e))?;
// }
let _ = set_global_default(subscriber).map_err(|e| format!("{:?}", e))?;
let _ = LogTracer::builder()
set_global_default(subscriber).map_err(|e| format!("{:?}", e))?;
LogTracer::builder()
.with_max_level(LevelFilter::Trace)
.init()
.map_err(|e| format!("{:?}", e))?;
@ -84,7 +84,7 @@ mod tests {
// run cargo test --features="use_bunyan" or cargo test
#[test]
fn test_log() {
let _ = Builder::new("flowy", ".").env_filter("debug").build().unwrap();
Builder::new("flowy", ".").env_filter("debug").build().unwrap();
tracing::info!("😁 tracing::info call");
log::debug!("😁 log::debug call");