Feat: import and export csv (#2617)

* feat: import csv

* feat: export

* chore: implement import export csv in the backend

* chore: patch
This commit is contained in:
Nathan.fooo 2023-05-25 23:22:23 +08:00 committed by GitHub
parent 2746666123
commit 70bb7f2ad6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 555 additions and 150 deletions

View File

@ -1387,6 +1387,27 @@ dependencies = [
"syn 1.0.109", "syn 1.0.109",
] ]
[[package]]
name = "csv"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad"
dependencies = [
"csv-core",
"itoa 1.0.6",
"ryu",
"serde",
]
[[package]]
name = "csv-core"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "ctor" name = "ctor"
version = "0.1.26" version = "0.1.26"
@ -1829,6 +1850,7 @@ dependencies = [
"chrono-tz 0.8.2", "chrono-tz 0.8.2",
"collab", "collab",
"collab-database", "collab-database",
"csv",
"dashmap", "dashmap",
"fancy-regex 0.10.0", "fancy-regex 0.10.0",
"flowy-codegen", "flowy-codegen",

View File

@ -34,12 +34,12 @@ default = ["custom-protocol"]
custom-protocol = ["tauri/custom-protocol"] custom-protocol = ["tauri/custom-protocol"]
[patch.crates-io] [patch.crates-io]
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab-persistence = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
#collab = { path = "../../AppFlowy-Collab/collab" } #collab = { path = "../../AppFlowy-Collab/collab" }
#collab-folder = { path = "../../AppFlowy-Collab/collab-folder" } #collab-folder = { path = "../../AppFlowy-Collab/collab-folder" }

View File

@ -85,7 +85,7 @@ checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
[[package]] [[package]]
name = "appflowy-integrate" name = "appflowy-integrate"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
@ -886,7 +886,7 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -903,7 +903,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-client-ws" name = "collab-client-ws"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"bytes", "bytes",
"collab-sync", "collab-sync",
@ -921,7 +921,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -946,7 +946,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-derive" name = "collab-derive"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -958,7 +958,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
@ -975,7 +975,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",
@ -993,7 +993,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-persistence" name = "collab-persistence"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"bincode", "bincode",
"chrono", "chrono",
@ -1013,7 +1013,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1043,7 +1043,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-sync" name = "collab-sync"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=6052d5#6052d509982f705c6d43a859c937a722a8c3358b" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=d6af3a#d6af3a7662a57202e4e5b0a297c28757a18f3372"
dependencies = [ dependencies = [
"bytes", "bytes",
"collab", "collab",
@ -1212,6 +1212,27 @@ dependencies = [
"typenum", "typenum",
] ]
[[package]]
name = "csv"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad"
dependencies = [
"csv-core",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "csv-core"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "cxx" name = "cxx"
version = "1.0.94" version = "1.0.94"
@ -1609,6 +1630,7 @@ dependencies = [
"chrono-tz 0.8.2", "chrono-tz 0.8.2",
"collab", "collab",
"collab-database", "collab-database",
"csv",
"dashmap", "dashmap",
"fancy-regex 0.10.0", "fancy-regex 0.10.0",
"flowy-codegen", "flowy-codegen",

View File

@ -33,11 +33,11 @@ opt-level = 3
incremental = false incremental = false
[patch.crates-io] [patch.crates-io]
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "6052d5" } appflowy-integrate = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "d6af3a" }
#collab = { path = "../AppFlowy-Collab/collab" } #collab = { path = "../AppFlowy-Collab/collab" }
#collab-folder = { path = "../AppFlowy-Collab/collab-folder" } #collab-folder = { path = "../AppFlowy-Collab/collab-folder" }

View File

@ -39,6 +39,7 @@ rayon = "1.6.1"
nanoid = "0.4.0" nanoid = "0.4.0"
chrono-tz = "0.8.1" chrono-tz = "0.8.1"
async-trait = "0.1" async-trait = "0.1"
csv = "1.1.6"
strum = "0.21" strum = "0.21"
strum_macros = "0.21" strum_macros = "0.21"

View File

@ -12,6 +12,7 @@ mod view_entities;
#[macro_use] #[macro_use]
mod macros; mod macros;
mod share_entities;
mod type_option_entities; mod type_option_entities;
pub use calendar_entities::*; pub use calendar_entities::*;
@ -22,6 +23,7 @@ pub use filter_entities::*;
pub use group_entities::*; pub use group_entities::*;
pub use row_entities::*; pub use row_entities::*;
pub use setting_entities::*; pub use setting_entities::*;
pub use share_entities::*;
pub use sort_entities::*; pub use sort_entities::*;
pub use type_option_entities::*; pub use type_option_entities::*;
pub use view_entities::*; pub use view_entities::*;

View File

@ -0,0 +1,24 @@
use flowy_derive::{ProtoBuf, ProtoBuf_Enum};
#[derive(Clone, Debug, ProtoBuf_Enum)]
pub enum ImportTypePB {
CSV = 0,
}
impl Default for ImportTypePB {
fn default() -> Self {
Self::CSV
}
}
#[derive(Clone, Debug, ProtoBuf, Default)]
pub struct DatabaseImportPB {
#[pb(index = 1, one_of)]
pub data: Option<String>,
#[pb(index = 2, one_of)]
pub uri: Option<String>,
#[pb(index = 3)]
pub import_type: ImportTypePB,
}

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use collab_database::rows::RowId; use collab_database::rows::RowId;
use collab_database::views::DatabaseLayout; use collab_database::views::DatabaseLayout;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult}; use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use crate::entities::*; use crate::entities::*;
@ -19,7 +19,7 @@ pub(crate) async fn get_database_data_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<DatabasePB, FlowyError> { ) -> DataResult<DatabasePB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let data = database_editor.get_database_data(view_id.as_ref()).await; let data = database_editor.get_database_data(view_id.as_ref()).await;
data_result_ok(data) data_result_ok(data)
} }
@ -30,7 +30,7 @@ pub(crate) async fn get_database_setting_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<DatabaseViewSettingPB, FlowyError> { ) -> DataResult<DatabaseViewSettingPB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let data = database_editor let data = database_editor
.get_database_view_setting(view_id.as_ref()) .get_database_view_setting(view_id.as_ref())
.await?; .await?;
@ -43,7 +43,7 @@ pub(crate) async fn update_database_setting_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?; let params: DatabaseSettingChangesetParams = data.into_inner().try_into()?;
let editor = manager.get_database(&params.view_id).await?; let editor = manager.get_database_with_view_id(&params.view_id).await?;
if let Some(insert_params) = params.insert_group { if let Some(insert_params) = params.insert_group {
editor.insert_group(insert_params).await?; editor.insert_group(insert_params).await?;
@ -76,7 +76,7 @@ pub(crate) async fn get_all_filters_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<RepeatedFilterPB, FlowyError> { ) -> DataResult<RepeatedFilterPB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let filters = database_editor.get_all_filters(view_id.as_ref()).await; let filters = database_editor.get_all_filters(view_id.as_ref()).await;
data_result_ok(filters) data_result_ok(filters)
} }
@ -87,7 +87,7 @@ pub(crate) async fn get_all_sorts_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<RepeatedSortPB, FlowyError> { ) -> DataResult<RepeatedSortPB, FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
let sorts = database_editor.get_all_sorts(view_id.as_ref()).await; let sorts = database_editor.get_all_sorts(view_id.as_ref()).await;
data_result_ok(sorts) data_result_ok(sorts)
} }
@ -98,7 +98,7 @@ pub(crate) async fn delete_all_sorts_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let view_id: DatabaseViewIdPB = data.into_inner(); let view_id: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database(view_id.as_ref()).await?; let database_editor = manager.get_database_with_view_id(view_id.as_ref()).await?;
database_editor.delete_all_sorts(view_id.as_ref()).await; database_editor.delete_all_sorts(view_id.as_ref()).await;
Ok(()) Ok(())
} }
@ -109,7 +109,7 @@ pub(crate) async fn get_fields_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<RepeatedFieldPB, FlowyError> { ) -> DataResult<RepeatedFieldPB, FlowyError> {
let params: GetFieldParams = data.into_inner().try_into()?; let params: GetFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let fields = database_editor let fields = database_editor
.get_fields(&params.view_id, params.field_ids) .get_fields(&params.view_id, params.field_ids)
.into_iter() .into_iter()
@ -125,7 +125,7 @@ pub(crate) async fn update_field_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: FieldChangesetParams = data.into_inner().try_into()?; let params: FieldChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor.update_field(params).await?; database_editor.update_field(params).await?;
Ok(()) Ok(())
} }
@ -136,7 +136,7 @@ pub(crate) async fn update_field_type_option_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: TypeOptionChangesetParams = data.into_inner().try_into()?; let params: TypeOptionChangesetParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
if let Some(old_field) = database_editor.get_field(&params.field_id) { if let Some(old_field) = database_editor.get_field(&params.field_id) {
let field_type = FieldType::from(old_field.field_type); let field_type = FieldType::from(old_field.field_type);
let type_option_data = let type_option_data =
@ -159,7 +159,7 @@ pub(crate) async fn delete_field_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor.delete_field(&params.field_id).await?; database_editor.delete_field(&params.field_id).await?;
Ok(()) Ok(())
} }
@ -170,7 +170,7 @@ pub(crate) async fn switch_to_field_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: EditFieldParams = data.into_inner().try_into()?; let params: EditFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let old_field = database_editor.get_field(&params.field_id); let old_field = database_editor.get_field(&params.field_id);
database_editor database_editor
.switch_to_field_type(&params.field_id, &params.field_type) .switch_to_field_type(&params.field_id, &params.field_type)
@ -205,7 +205,7 @@ pub(crate) async fn duplicate_field_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: FieldIdParams = data.into_inner().try_into()?; let params: FieldIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.duplicate_field(&params.view_id, &params.field_id) .duplicate_field(&params.view_id, &params.field_id)
.await?; .await?;
@ -219,7 +219,7 @@ pub(crate) async fn get_field_type_option_data_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<TypeOptionPB, FlowyError> { ) -> DataResult<TypeOptionPB, FlowyError> {
let params: TypeOptionPathParams = data.into_inner().try_into()?; let params: TypeOptionPathParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
if let Some((field, data)) = database_editor if let Some((field, data)) = database_editor
.get_field_type_option_data(&params.field_id) .get_field_type_option_data(&params.field_id)
.await .await
@ -242,7 +242,7 @@ pub(crate) async fn create_field_type_option_data_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<TypeOptionPB, FlowyError> { ) -> DataResult<TypeOptionPB, FlowyError> {
let params: CreateFieldParams = data.into_inner().try_into()?; let params: CreateFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let (field, data) = database_editor let (field, data) = database_editor
.create_field_with_type_option(&params.view_id, &params.field_type, params.type_option_data) .create_field_with_type_option(&params.view_id, &params.field_type, params.type_option_data)
.await; .await;
@ -261,7 +261,7 @@ pub(crate) async fn move_field_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: MoveFieldParams = data.into_inner().try_into()?; let params: MoveFieldParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.move_field( .move_field(
&params.view_id, &params.view_id,
@ -279,7 +279,7 @@ pub(crate) async fn get_row_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<OptionalRowPB, FlowyError> { ) -> DataResult<OptionalRowPB, FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let row = database_editor.get_row(&params.row_id).map(RowPB::from); let row = database_editor.get_row(&params.row_id).map(RowPB::from);
data_result_ok(OptionalRowPB { row }) data_result_ok(OptionalRowPB { row })
} }
@ -290,7 +290,7 @@ pub(crate) async fn delete_row_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor.delete_row(&params.row_id).await; database_editor.delete_row(&params.row_id).await;
Ok(()) Ok(())
} }
@ -301,7 +301,7 @@ pub(crate) async fn duplicate_row_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.duplicate_row(&params.view_id, &params.row_id) .duplicate_row(&params.view_id, &params.row_id)
.await; .await;
@ -314,7 +314,7 @@ pub(crate) async fn move_row_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: MoveRowParams = data.into_inner().try_into()?; let params: MoveRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.move_row(&params.view_id, params.from_row_id, params.to_row_id) .move_row(&params.view_id, params.from_row_id, params.to_row_id)
.await; .await;
@ -327,7 +327,7 @@ pub(crate) async fn create_row_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<RowPB, FlowyError> { ) -> DataResult<RowPB, FlowyError> {
let params: CreateRowParams = data.into_inner().try_into()?; let params: CreateRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
match database_editor.create_row(params).await? { match database_editor.create_row(params).await? {
None => Err(FlowyError::internal().context("Create row fail")), None => Err(FlowyError::internal().context("Create row fail")),
Some(row) => data_result_ok(RowPB::from(row)), Some(row) => data_result_ok(RowPB::from(row)),
@ -340,7 +340,7 @@ pub(crate) async fn get_cell_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<CellPB, FlowyError> { ) -> DataResult<CellPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let cell = database_editor let cell = database_editor
.get_cell(&params.field_id, params.row_id) .get_cell(&params.field_id, params.row_id)
.await; .await;
@ -353,7 +353,7 @@ pub(crate) async fn update_cell_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: CellChangesetPB = data.into_inner(); let params: CellChangesetPB = data.into_inner();
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.update_cell_with_changeset( .update_cell_with_changeset(
&params.view_id, &params.view_id,
@ -371,7 +371,7 @@ pub(crate) async fn new_select_option_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<SelectOptionPB, FlowyError> { ) -> DataResult<SelectOptionPB, FlowyError> {
let params: CreateSelectOptionParams = data.into_inner().try_into()?; let params: CreateSelectOptionParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let result = database_editor let result = database_editor
.create_select_option(&params.field_id, params.option_name) .create_select_option(&params.field_id, params.option_name)
.await; .await;
@ -389,7 +389,7 @@ pub(crate) async fn insert_or_update_select_option_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params = data.into_inner(); let params = data.into_inner();
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.insert_select_options( .insert_select_options(
&params.view_id, &params.view_id,
@ -407,7 +407,7 @@ pub(crate) async fn delete_select_option_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params = data.into_inner(); let params = data.into_inner();
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.delete_select_options( .delete_select_options(
&params.view_id, &params.view_id,
@ -425,7 +425,7 @@ pub(crate) async fn get_select_option_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<SelectOptionCellDataPB, FlowyError> { ) -> DataResult<SelectOptionCellDataPB, FlowyError> {
let params: CellIdParams = data.into_inner().try_into()?; let params: CellIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let options = database_editor let options = database_editor
.get_select_options(params.row_id, &params.field_id) .get_select_options(params.row_id, &params.field_id)
.await; .await;
@ -439,7 +439,7 @@ pub(crate) async fn update_select_option_cell_handler(
) -> Result<(), FlowyError> { ) -> Result<(), FlowyError> {
let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?; let params: SelectOptionCellChangesetParams = data.into_inner().try_into()?;
let database_editor = manager let database_editor = manager
.get_database(&params.cell_identifier.view_id) .get_database_with_view_id(&params.cell_identifier.view_id)
.await?; .await?;
let changeset = SelectOptionCellChangeset { let changeset = SelectOptionCellChangeset {
insert_option_ids: params.insert_option_ids, insert_option_ids: params.insert_option_ids,
@ -469,7 +469,7 @@ pub(crate) async fn update_date_cell_handler(
include_time: data.include_time, include_time: data.include_time,
timezone_id: data.timezone_id, timezone_id: data.timezone_id,
}; };
let database_editor = manager.get_database(&cell_id.view_id).await?; let database_editor = manager.get_database_with_view_id(&cell_id.view_id).await?;
database_editor database_editor
.update_cell_with_changeset( .update_cell_with_changeset(
&cell_id.view_id, &cell_id.view_id,
@ -487,7 +487,7 @@ pub(crate) async fn get_groups_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<RepeatedGroupPB, FlowyError> { ) -> DataResult<RepeatedGroupPB, FlowyError> {
let params: DatabaseViewIdPB = data.into_inner(); let params: DatabaseViewIdPB = data.into_inner();
let database_editor = manager.get_database(params.as_ref()).await?; let database_editor = manager.get_database_with_view_id(params.as_ref()).await?;
let groups = database_editor.load_groups(params.as_ref()).await?; let groups = database_editor.load_groups(params.as_ref()).await?;
data_result_ok(groups) data_result_ok(groups)
} }
@ -498,7 +498,7 @@ pub(crate) async fn get_group_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<GroupPB, FlowyError> { ) -> DataResult<GroupPB, FlowyError> {
let params: DatabaseGroupIdParams = data.into_inner().try_into()?; let params: DatabaseGroupIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let group = database_editor let group = database_editor
.get_group(&params.view_id, &params.group_id) .get_group(&params.view_id, &params.group_id)
.await?; .await?;
@ -511,7 +511,7 @@ pub(crate) async fn move_group_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let params: MoveGroupParams = data.into_inner().try_into()?; let params: MoveGroupParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.move_group(&params.view_id, &params.from_group_id, &params.to_group_id) .move_group(&params.view_id, &params.from_group_id, &params.to_group_id)
.await?; .await?;
@ -524,7 +524,7 @@ pub(crate) async fn move_group_row_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let params: MoveGroupRowParams = data.into_inner().try_into()?; let params: MoveGroupRowParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
database_editor database_editor
.move_group_row( .move_group_row(
&params.view_id, &params.view_id,
@ -550,7 +550,7 @@ pub(crate) async fn set_layout_setting_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
let params: LayoutSettingChangeset = data.into_inner().try_into()?; let params: LayoutSettingChangeset = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let layout_params = LayoutSettingParams { let layout_params = LayoutSettingParams {
calendar: params.calendar, calendar: params.calendar,
}; };
@ -565,7 +565,7 @@ pub(crate) async fn get_layout_setting_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<LayoutSettingPB, FlowyError> { ) -> DataResult<LayoutSettingPB, FlowyError> {
let params: DatabaseLayoutId = data.into_inner().try_into()?; let params: DatabaseLayoutId = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let layout_setting_pb = database_editor let layout_setting_pb = database_editor
.get_layout_setting(&params.view_id, params.layout) .get_layout_setting(&params.view_id, params.layout)
.await .await
@ -580,7 +580,7 @@ pub(crate) async fn get_calendar_events_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<RepeatedCalendarEventPB, FlowyError> { ) -> DataResult<RepeatedCalendarEventPB, FlowyError> {
let params: CalendarEventRequestParams = data.into_inner().try_into()?; let params: CalendarEventRequestParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let events = database_editor let events = database_editor
.get_all_calendar_events(&params.view_id) .get_all_calendar_events(&params.view_id)
.await; .await;
@ -593,7 +593,7 @@ pub(crate) async fn get_calendar_event_handler(
manager: AFPluginState<Arc<DatabaseManager2>>, manager: AFPluginState<Arc<DatabaseManager2>>,
) -> DataResult<CalendarEventPB, FlowyError> { ) -> DataResult<CalendarEventPB, FlowyError> {
let params: RowIdParams = data.into_inner().try_into()?; let params: RowIdParams = data.into_inner().try_into()?;
let database_editor = manager.get_database(&params.view_id).await?; let database_editor = manager.get_database_with_view_id(&params.view_id).await?;
let event = database_editor let event = database_editor
.get_calendar_event(&params.view_id, params.row_id) .get_calendar_event(&params.view_id, params.row_id)
.await; .await;
@ -602,3 +602,27 @@ pub(crate) async fn get_calendar_event_handler(
Some(event) => data_result_ok(event), Some(event) => data_result_ok(event),
} }
} }
#[tracing::instrument(level = "debug", skip(data, manager), err)]
pub(crate) async fn import_data_handler(
data: AFPluginData<DatabaseImportPB>,
manager: AFPluginState<Arc<DatabaseManager2>>,
) -> FlowyResult<()> {
let params = data.into_inner();
match params.import_type {
ImportTypePB::CSV => {
if let Some(data) = params.data {
manager.import_csv(data).await?;
} else if let Some(uri) = params.uri {
manager.import_csv_data_from_uri(uri).await?;
} else {
return Err(FlowyError::new(
ErrorCode::InvalidData,
"No data or uri provided",
));
}
},
}
Ok(())
}

View File

@ -58,7 +58,9 @@ pub fn init(database_manager: Arc<DatabaseManager2>) -> AFPlugin {
.event(DatabaseEvent::GetCalendarEvent, get_calendar_event_handler) .event(DatabaseEvent::GetCalendarEvent, get_calendar_event_handler)
// Layout setting // Layout setting
.event(DatabaseEvent::SetLayoutSetting, set_layout_setting_handler) .event(DatabaseEvent::SetLayoutSetting, set_layout_setting_handler)
.event(DatabaseEvent::GetLayoutSetting, get_layout_setting_handler); .event(DatabaseEvent::GetLayoutSetting, get_layout_setting_handler)
// import
.event(DatabaseEvent::ImportCSV, import_data_handler);
plugin plugin
} }
@ -259,4 +261,7 @@ pub enum DatabaseEvent {
#[event(input = "MoveCalendarEventPB")] #[event(input = "MoveCalendarEventPB")]
MoveCalendarEvent = 119, MoveCalendarEvent = 119,
#[event(input = "DatabaseImportPB")]
ImportCSV = 120,
} }

View File

@ -11,11 +11,12 @@ use collab_database::views::{CreateDatabaseParams, CreateViewParams};
use parking_lot::Mutex; use parking_lot::Mutex;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_task::TaskDispatcher; use flowy_task::TaskDispatcher;
use crate::entities::{DatabaseDescriptionPB, DatabaseLayoutPB, RepeatedDatabaseDescriptionPB}; use crate::entities::{DatabaseDescriptionPB, DatabaseLayoutPB, RepeatedDatabaseDescriptionPB};
use crate::services::database::{DatabaseEditor, MutexDatabase}; use crate::services::database::{DatabaseEditor, MutexDatabase};
use crate::services::share::csv::{CSVImporter, ExportStyle};
pub trait DatabaseUser2: Send + Sync { pub trait DatabaseUser2: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>; fn user_id(&self) -> Result<i64, FlowyError>;
@ -77,14 +78,17 @@ impl DatabaseManager2 {
} }
} }
pub async fn get_database(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> { pub async fn get_database_with_view_id(&self, view_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
let database_id = self.with_user_database(Err(FlowyError::internal()), |database| { let database_id = self.with_user_database(Err(FlowyError::internal()), |database| {
database database
.get_database_id_with_view_id(view_id) .get_database_id_with_view_id(view_id)
.ok_or_else(FlowyError::record_not_found) .ok_or_else(FlowyError::record_not_found)
})?; })?;
self.get_database(&database_id).await
}
if let Some(editor) = self.editors.read().await.get(&database_id) { pub async fn get_database(&self, database_id: &str) -> FlowyResult<Arc<DatabaseEditor>> {
if let Some(editor) = self.editors.read().await.get(database_id) {
return Ok(editor.clone()); return Ok(editor.clone());
} }
@ -93,7 +97,7 @@ impl DatabaseManager2 {
Err(FlowyError::record_not_found()), Err(FlowyError::record_not_found()),
|database| { |database| {
database database
.get_database(&database_id) .get_database(database_id)
.ok_or_else(FlowyError::record_not_found) .ok_or_else(FlowyError::record_not_found)
}, },
)?); )?);
@ -191,6 +195,24 @@ impl DatabaseManager2 {
Ok(()) Ok(())
} }
pub async fn import_csv(&self, content: String) -> FlowyResult<String> {
let params = tokio::task::spawn_blocking(move || CSVImporter.import_csv_from_string(content))
.await
.map_err(internal_error)??;
let database_id = params.database_id.clone();
self.create_database_with_params(params).await?;
Ok(database_id)
}
pub async fn import_csv_data_from_uri(&self, _uri: String) -> FlowyResult<()> {
Ok(())
}
pub async fn export_csv(&self, view_id: &str, style: ExportStyle) -> FlowyResult<String> {
let database = self.get_database_with_view_id(view_id).await?;
database.export_csv(style).await
}
fn with_user_database<F, Output>(&self, default_value: Output, f: F) -> Output fn with_user_database<F, Output>(&self, default_value: Output, f: F) -> Output
where where
F: FnOnce(&InnerUserDatabase) -> Output, F: FnOnce(&InnerUserDatabase) -> Output,

View File

@ -74,7 +74,7 @@ pub fn apply_cell_changeset<C: ToCellChangeset>(
} }
} }
pub fn get_type_cell_protobuf( pub fn get_cell_protobuf(
cell: &Cell, cell: &Cell,
field: &Field, field: &Field,
cell_cache: Option<CellCache>, cell_cache: Option<CellCache>,
@ -101,25 +101,6 @@ pub fn get_type_cell_protobuf(
} }
} }
pub fn get_type_cell_data<Output>(
cell: &Cell,
field: &Field,
cell_data_cache: Option<CellCache>,
) -> Option<Output>
where
Output: Default + 'static,
{
let from_field_type = get_field_type_from_cell(cell)?;
let to_field_type = FieldType::from(field.field_type);
try_decode_cell_to_cell_data(
cell,
&from_field_type,
&to_field_type,
field,
cell_data_cache,
)
}
/// Decode the opaque cell data from one field type to another using the corresponding `TypeOption` /// Decode the opaque cell data from one field type to another using the corresponding `TypeOption`
/// ///
/// The cell data might become an empty string depends on the to_field_type's `TypeOption` /// The cell data might become an empty string depends on the to_field_type's `TypeOption`
@ -174,22 +155,22 @@ pub fn try_decode_cell_to_cell_data<T: Default + 'static>(
/// ///
/// * `cell_str`: the opaque cell string that can be decoded by corresponding structs that implement the /// * `cell_str`: the opaque cell string that can be decoded by corresponding structs that implement the
/// `FromCellString` trait. /// `FromCellString` trait.
/// * `decoded_field_type`: the field_type of the cell_str /// * `to_field_type`: the cell will be decoded to this field type's cell data.
/// * `field_type`: use this field type's `TypeOption` to stringify this cell_str /// * `from_field_type`: the original field type of the passed-in cell data.
/// * `field_rev`: used to get the corresponding TypeOption for the specified field type. /// * `field_rev`: used to get the corresponding TypeOption for the specified field type.
/// ///
/// returns: String /// returns: String
pub fn stringify_cell_data( pub fn stringify_cell_data(
cell: &Cell, cell: &Cell,
decoded_field_type: &FieldType, to_field_type: &FieldType,
field_type: &FieldType, from_field_type: &FieldType,
field: &Field, field: &Field,
) -> String { ) -> String {
match TypeOptionCellExt::new_with_cell_data_cache(field, None) match TypeOptionCellExt::new_with_cell_data_cache(field, None)
.get_type_option_cell_data_handler(field_type) .get_type_option_cell_data_handler(from_field_type)
{ {
None => "".to_string(), None => "".to_string(),
Some(handler) => handler.stringify_cell_str(cell, decoded_field_type, field), Some(handler) => handler.stringify_cell_str(cell, to_field_type, field),
} }
} }
@ -312,17 +293,17 @@ where
// } // }
// } // }
pub struct CellBuilder { pub struct CellBuilder<'a> {
cells: Cells, cells: Cells,
field_maps: HashMap<String, Field>, field_maps: HashMap<String, &'a Field>,
} }
impl CellBuilder { impl<'a> CellBuilder<'a> {
pub fn with_cells(cell_by_field_id: HashMap<String, String>, fields: Vec<Field>) -> Self { pub fn with_cells(cell_by_field_id: HashMap<String, String>, fields: &'a [Field]) -> Self {
let field_maps = fields let field_maps = fields
.into_iter() .into_iter()
.map(|field| (field.id.clone(), field)) .map(|field| (field.id.clone(), field))
.collect::<HashMap<String, Field>>(); .collect::<HashMap<String, &Field>>();
let mut cells = Cells::new(); let mut cells = Cells::new();
for (field_id, cell_str) in cell_by_field_id { for (field_id, cell_str) in cell_by_field_id {

View File

@ -10,7 +10,7 @@ use collab_database::views::{DatabaseLayout, DatabaseView, LayoutSetting};
use parking_lot::Mutex; use parking_lot::Mutex;
use tokio::sync::{broadcast, RwLock}; use tokio::sync::{broadcast, RwLock};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{internal_error, FlowyError, FlowyResult};
use flowy_task::TaskDispatcher; use flowy_task::TaskDispatcher;
use lib_infra::future::{to_fut, Fut}; use lib_infra::future::{to_fut, Fut};
@ -23,8 +23,7 @@ use crate::entities::{
}; };
use crate::notification::{send_notification, DatabaseNotification}; use crate::notification::{send_notification, DatabaseNotification};
use crate::services::cell::{ use crate::services::cell::{
apply_cell_changeset, get_type_cell_protobuf, AnyTypeCache, CellBuilder, CellCache, apply_cell_changeset, get_cell_protobuf, AnyTypeCache, CellBuilder, CellCache, ToCellChangeset,
ToCellChangeset,
}; };
use crate::services::database::util::database_view_setting_pb_from_view; use crate::services::database::util::database_view_setting_pb_from_view;
use crate::services::database_view::{DatabaseViewChanged, DatabaseViewData, DatabaseViews}; use crate::services::database_view::{DatabaseViewChanged, DatabaseViewData, DatabaseViews};
@ -36,6 +35,7 @@ use crate::services::field::{
}; };
use crate::services::filter::Filter; use crate::services::filter::Filter;
use crate::services::group::{default_group_setting, GroupSetting, RowChangeset}; use crate::services::group::{default_group_setting, GroupSetting, RowChangeset};
use crate::services::share::csv::{CSVExport, ExportStyle};
use crate::services::sort::Sort; use crate::services::sort::Sort;
#[derive(Clone)] #[derive(Clone)]
@ -303,7 +303,7 @@ impl DatabaseEditor {
pub async fn create_row(&self, params: CreateRowParams) -> FlowyResult<Option<Row>> { pub async fn create_row(&self, params: CreateRowParams) -> FlowyResult<Option<Row>> {
let fields = self.database.lock().get_fields(&params.view_id, None); let fields = self.database.lock().get_fields(&params.view_id, None);
let mut cells = let mut cells =
CellBuilder::with_cells(params.cell_data_by_field_id.unwrap_or_default(), fields).build(); CellBuilder::with_cells(params.cell_data_by_field_id.unwrap_or_default(), &fields).build();
for view in self.database_views.editors().await { for view in self.database_views.editors().await {
view.v_will_create_row(&mut cells, &params.group_id).await; view.v_will_create_row(&mut cells, &params.group_id).await;
} }
@ -434,7 +434,7 @@ impl DatabaseEditor {
match (field, cell) { match (field, cell) {
(Some(field), Some(cell)) => { (Some(field), Some(cell)) => {
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
let cell_bytes = get_type_cell_protobuf(&cell, &field, Some(self.cell_cache.clone())); let cell_bytes = get_cell_protobuf(&cell, &field, Some(self.cell_cache.clone()));
CellPB { CellPB {
field_id: field_id.to_string(), field_id: field_id.to_string(),
row_id: row_id.into(), row_id: row_id.into(),
@ -808,6 +808,18 @@ impl DatabaseEditor {
rows, rows,
} }
} }
pub async fn export_csv(&self, style: ExportStyle) -> FlowyResult<String> {
let database = self.database.clone();
let csv = tokio::task::spawn_blocking(move || {
let database_guard = database.lock();
let csv = CSVExport.export_database(&database_guard, style)?;
Ok::<String, FlowyError>(csv)
})
.await
.map_err(internal_error)??;
Ok(csv)
}
} }
pub(crate) async fn notify_did_update_cell(changesets: Vec<CellChangesetNotifyPB>) { pub(crate) async fn notify_did_update_cell(changesets: Vec<CellChangesetNotifyPB>) {

View File

@ -40,10 +40,10 @@ impl TypeOptionTransform for CheckboxTypeOption {
fn transform_type_option_cell( fn transform_type_option_cell(
&self, &self,
cell: &Cell, cell: &Cell,
_decoded_field_type: &FieldType, transformed_field_type: &FieldType,
_field: &Field, _field: &Field,
) -> Option<<Self as TypeOption>::CellData> { ) -> Option<<Self as TypeOption>::CellData> {
if _decoded_field_type.is_text() { if transformed_field_type.is_text() {
Some(CheckboxCellData::from(cell)) Some(CheckboxCellData::from(cell))
} else { } else {
None None

View File

@ -95,11 +95,14 @@ where
fn transform_type_option_cell( fn transform_type_option_cell(
&self, &self,
cell: &Cell, cell: &Cell,
_decoded_field_type: &FieldType, transformed_field_type: &FieldType,
_field: &Field, _field: &Field,
) -> Option<<Self as TypeOption>::CellData> { ) -> Option<<Self as TypeOption>::CellData> {
match _decoded_field_type { match transformed_field_type {
FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => None, FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Checklist => {
// If the transformed field type is SingleSelect, MultiSelect or Checklist, Do nothing.
None
},
FieldType::Checkbox => { FieldType::Checkbox => {
let cell_content = CheckboxCellData::from(cell).to_string(); let cell_content = CheckboxCellData::from(cell).to_string();
let mut transformed_ids = Vec::new(); let mut transformed_ids = Vec::new();

View File

@ -64,19 +64,19 @@ impl TypeOptionTransform for RichTextTypeOption {
fn transform_type_option_cell( fn transform_type_option_cell(
&self, &self,
cell: &Cell, cell: &Cell,
_decoded_field_type: &FieldType, transformed_field_type: &FieldType,
_field: &Field, _field: &Field,
) -> Option<<Self as TypeOption>::CellData> { ) -> Option<<Self as TypeOption>::CellData> {
if _decoded_field_type.is_date() if transformed_field_type.is_date()
|| _decoded_field_type.is_single_select() || transformed_field_type.is_single_select()
|| _decoded_field_type.is_multi_select() || transformed_field_type.is_multi_select()
|| _decoded_field_type.is_number() || transformed_field_type.is_number()
|| _decoded_field_type.is_url() || transformed_field_type.is_url()
{ {
Some(StrCellData::from(stringify_cell_data( Some(StrCellData::from(stringify_cell_data(
cell, cell,
_decoded_field_type, transformed_field_type,
_decoded_field_type, transformed_field_type,
_field, _field,
))) )))
} else { } else {

View File

@ -99,13 +99,13 @@ pub trait TypeOptionTransform: TypeOption {
/// # Arguments /// # Arguments
/// ///
/// * `cell_str`: the cell string of the current field type /// * `cell_str`: the cell string of the current field type
/// * `decoded_field_type`: the field type of the cell data that's going to be transformed into /// * `transformed_field_type`: the cell will be transformed to the is field type's cell data.
/// current `TypeOption` field type. /// current `TypeOption` field type.
/// ///
fn transform_type_option_cell( fn transform_type_option_cell(
&self, &self,
_cell: &Cell, _cell: &Cell,
_decoded_field_type: &FieldType, _transformed_field_type: &FieldType,
_field: &Field, _field: &Field,
) -> Option<<Self as TypeOption>::CellData> { ) -> Option<<Self as TypeOption>::CellData> {
None None

View File

@ -21,6 +21,7 @@ use crate::services::field::{
pub const CELL_DATA: &str = "data"; pub const CELL_DATA: &str = "data";
/// Each [FieldType] has its own [TypeOptionCellDataHandler].
/// A helper trait that used to erase the `Self` of `TypeOption` trait to make it become a Object-safe trait /// A helper trait that used to erase the `Self` of `TypeOption` trait to make it become a Object-safe trait
/// Only object-safe traits can be made into trait objects. /// Only object-safe traits can be made into trait objects.
/// > Object-safe traits are traits with methods that follow these two rules: /// > Object-safe traits are traits with methods that follow these two rules:
@ -46,19 +47,20 @@ pub trait TypeOptionCellDataHandler: Send + Sync + 'static {
fn handle_cell_filter(&self, field_type: &FieldType, field: &Field, cell: &Cell) -> bool; fn handle_cell_filter(&self, field_type: &FieldType, field: &Field, cell: &Cell) -> bool;
/// Decode the cell_str to corresponding cell data, and then return the display string of the /// Format the cell to string using the passed-in [FieldType] and [Field].
/// cell data. /// The [Cell] is generic, so we need to know the [FieldType] and [Field] to format the cell.
fn stringify_cell_str( ///
&self, /// For example, the field type of the [TypeOptionCellDataHandler] is [FieldType::Date], and
cell: &Cell, /// the if field_type is [FieldType::RichText], then the string would be something like "Mar 14, 2022".
decoded_field_type: &FieldType, ///
field: &Field, fn stringify_cell_str(&self, cell: &Cell, field_type: &FieldType, field: &Field) -> String;
) -> String;
/// Format the cell to [BoxCellData] using the passed-in [FieldType] and [Field].
/// The caller can get the cell data by calling [BoxCellData::unbox_or_none].
fn get_cell_data( fn get_cell_data(
&self, &self,
cell: &Cell, cell: &Cell,
decoded_field_type: &FieldType, field_type: &FieldType,
field: &Field, field: &Field,
) -> FlowyResult<BoxCellData>; ) -> FlowyResult<BoxCellData>;
} }
@ -252,14 +254,16 @@ where
perform_filter().unwrap_or(true) perform_filter().unwrap_or(true)
} }
fn stringify_cell_str( /// Stringify [Cell] to string
&self, /// if the [TypeOptionCellDataHandler] supports transform, it will try to transform the [Cell] to
cell: &Cell, /// the passed-in field type [Cell].
decoded_field_type: &FieldType, /// For example, the field type of the [TypeOptionCellDataHandler] is [FieldType::MultiSelect], the field_type
field: &Field, /// is [FieldType::RichText], then the string will be transformed to a string that separated by comma with the
) -> String { /// option's name.
///
fn stringify_cell_str(&self, cell: &Cell, field_type: &FieldType, field: &Field) -> String {
if self.transformable() { if self.transformable() {
let cell_data = self.transform_type_option_cell(cell, decoded_field_type, field); let cell_data = self.transform_type_option_cell(cell, field_type, field);
if let Some(cell_data) = cell_data { if let Some(cell_data) = cell_data {
return self.decode_cell_data_to_str(cell_data); return self.decode_cell_data_to_str(cell_data);
} }
@ -270,17 +274,17 @@ where
fn get_cell_data( fn get_cell_data(
&self, &self,
cell: &Cell, cell: &Cell,
decoded_field_type: &FieldType, field_type: &FieldType,
field: &Field, field: &Field,
) -> FlowyResult<BoxCellData> { ) -> FlowyResult<BoxCellData> {
// tracing::debug!("get_cell_data: {:?}", std::any::type_name::<Self>()); // tracing::debug!("get_cell_data: {:?}", std::any::type_name::<Self>());
let cell_data = if self.transformable() { let cell_data = if self.transformable() {
match self.transform_type_option_cell(cell, decoded_field_type, field) { match self.transform_type_option_cell(cell, field_type, field) {
None => self.get_decoded_cell_data(cell, decoded_field_type, field)?, None => self.get_decoded_cell_data(cell, field_type, field)?,
Some(cell_data) => cell_data, Some(cell_data) => cell_data,
} }
} else { } else {
self.get_decoded_cell_data(cell, decoded_field_type, field)? self.get_decoded_cell_data(cell, field_type, field)?
}; };
Ok(BoxCellData::new(cell_data)) Ok(BoxCellData::new(cell_data))
} }

View File

@ -10,7 +10,7 @@ use serde::Serialize;
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
use crate::entities::{FieldType, GroupChangesetPB, GroupRowsNotificationPB, InsertedRowPB}; use crate::entities::{FieldType, GroupChangesetPB, GroupRowsNotificationPB, InsertedRowPB};
use crate::services::cell::{get_type_cell_protobuf, CellProtobufBlobParser, DecodedCellData}; use crate::services::cell::{get_cell_protobuf, CellProtobufBlobParser, DecodedCellData};
use crate::services::group::action::{ use crate::services::group::action::{
DidMoveGroupRowResult, DidUpdateGroupRowResult, GroupControllerActions, GroupCustomize, DidMoveGroupRowResult, DidUpdateGroupRowResult, GroupControllerActions, GroupCustomize,
}; };
@ -228,7 +228,7 @@ where
if let Some(cell) = cell { if let Some(cell) = cell {
let mut grouped_rows: Vec<GroupedRow> = vec![]; let mut grouped_rows: Vec<GroupedRow> = vec![];
let cell_bytes = get_type_cell_protobuf(&cell, field, None); let cell_bytes = get_cell_protobuf(&cell, field, None);
let cell_data = cell_bytes.parser::<P>()?; let cell_data = cell_bytes.parser::<P>()?;
for group in self.group_ctx.groups() { for group in self.group_ctx.groups() {
if self.can_group(&group.filter_content, &cell_data) { if self.can_group(&group.filter_content, &cell_data) {
@ -311,7 +311,7 @@ where
row_changesets: vec![], row_changesets: vec![],
}; };
if let Some(cell) = row.cells.get(&self.grouping_field_id) { if let Some(cell) = row.cells.get(&self.grouping_field_id) {
let cell_bytes = get_type_cell_protobuf(cell, field, None); let cell_bytes = get_cell_protobuf(cell, field, None);
let cell_data = cell_bytes.parser::<P>()?; let cell_data = cell_bytes.parser::<P>()?;
if !cell_data.is_empty() { if !cell_data.is_empty() {
tracing::error!("did_delete_delete_row {:?}", cell); tracing::error!("did_delete_delete_row {:?}", cell);
@ -349,7 +349,7 @@ where
}; };
if let Some(cell) = cell_rev { if let Some(cell) = cell_rev {
let cell_bytes = get_type_cell_protobuf(&cell, context.field, None); let cell_bytes = get_cell_protobuf(&cell, context.field, None);
let cell_data = cell_bytes.parser::<P>()?; let cell_data = cell_bytes.parser::<P>()?;
result.deleted_group = self.delete_group_when_move_row(context.row, &cell_data); result.deleted_group = self.delete_group_when_move_row(context.row, &cell_data);
result.row_changesets = self.move_row(&cell_data, context); result.row_changesets = self.move_row(&cell_data, context);
@ -374,6 +374,6 @@ fn get_cell_data_from_row<P: CellProtobufBlobParser>(
field: &Field, field: &Field,
) -> Option<P::Object> { ) -> Option<P::Object> {
let cell = row.and_then(|row| row.cells.get(&field.id))?; let cell = row.and_then(|row| row.cells.get(&field.id))?;
let cell_bytes = get_type_cell_protobuf(cell, field, None); let cell_bytes = get_cell_protobuf(cell, field, None);
cell_bytes.parser::<P>().ok() cell_bytes.parser::<P>().ok()
} }

View File

@ -5,4 +5,5 @@ pub mod field;
pub mod filter; pub mod filter;
pub mod group; pub mod group;
pub mod setting; pub mod setting;
pub mod share;
pub mod sort; pub mod sort;

View File

@ -0,0 +1,68 @@
use crate::entities::FieldType;
use crate::services::cell::stringify_cell_data;
use collab_database::database::Database;
use flowy_error::{FlowyError, FlowyResult};
use std::collections::HashMap;
pub enum ExportStyle {
/// The export data will be pure data, without any meta data.
/// Will lost the field type information.
SIMPLE,
/// The export data contains meta data, such as field type.
/// It can be used to fully restore the database.
META,
}
pub struct CSVExport;
impl CSVExport {
pub fn export_database(&self, database: &Database, style: ExportStyle) -> FlowyResult<String> {
let mut wtr = csv::Writer::from_writer(vec![]);
let inline_view_id = database.get_inline_view_id();
let fields = database.get_fields(&inline_view_id, None);
// Write fields
let field_records = fields
.iter()
.map(|field| match &style {
ExportStyle::SIMPLE => field.name.clone(),
ExportStyle::META => serde_json::to_string(&field).unwrap(),
})
.collect::<Vec<String>>();
wtr
.write_record(&field_records)
.map_err(|e| FlowyError::internal().context(e))?;
// Write rows
let field_by_field_id = fields
.into_iter()
.map(|field| (field.id.clone(), field))
.collect::<HashMap<_, _>>();
let rows = database.get_rows_for_view(&inline_view_id);
for row in rows {
let cells = field_by_field_id
.iter()
.map(|(field_id, field)| match row.cells.get(field_id) {
None => "".to_string(),
Some(cell) => {
let field_type = FieldType::from(field.field_type);
match style {
ExportStyle::SIMPLE => stringify_cell_data(cell, &field_type, &field_type, field),
ExportStyle::META => serde_json::to_string(cell).unwrap_or_else(|_| "".to_string()),
}
},
})
.collect::<Vec<_>>();
if let Err(e) = wtr.write_record(&cells) {
tracing::warn!("CSV failed to write record: {}", e);
}
}
let data = wtr
.into_inner()
.map_err(|e| FlowyError::internal().context(e))?;
let csv = String::from_utf8(data).map_err(|e| FlowyError::internal().context(e))?;
Ok(csv)
}
}

View File

@ -0,0 +1,157 @@
use crate::entities::FieldType;
use crate::services::cell::CellBuilder;
use crate::services::field::default_type_option_data_for_type;
use collab_database::database::{gen_database_id, gen_database_view_id, gen_field_id, gen_row_id};
use collab_database::fields::Field;
use collab_database::rows::CreateRowParams;
use collab_database::views::{CreateDatabaseParams, DatabaseLayout};
use flowy_error::{FlowyError, FlowyResult};
use rayon::prelude::*;
use std::collections::HashMap;
use std::{fs::File, io::prelude::*};
#[derive(Default)]
pub struct CSVImporter;
impl CSVImporter {
pub fn import_csv_from_file(&self, path: &str) -> FlowyResult<CreateDatabaseParams> {
let mut file = File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
let fields_with_rows = self.get_fields_and_rows(content)?;
let database_data = database_from_fields_and_rows(fields_with_rows);
Ok(database_data)
}
pub fn import_csv_from_string(&self, content: String) -> FlowyResult<CreateDatabaseParams> {
let fields_with_rows = self.get_fields_and_rows(content)?;
let database_data = database_from_fields_and_rows(fields_with_rows);
Ok(database_data)
}
fn get_fields_and_rows(&self, content: String) -> Result<FieldsRows, FlowyError> {
let mut fields: Vec<String> = vec![];
if content.is_empty() {
return Err(FlowyError::invalid_data().context("Import content is empty"));
}
let mut reader = csv::Reader::from_reader(content.as_bytes());
if let Ok(headers) = reader.headers() {
for header in headers {
fields.push(header.to_string());
}
} else {
return Err(FlowyError::invalid_data().context("Header not found"));
}
let rows = reader
.records()
.into_iter()
.flat_map(|r| r.ok())
.map(|record| {
record
.into_iter()
.map(|s| s.to_string())
.collect::<Vec<String>>()
})
.collect();
Ok(FieldsRows { fields, rows })
}
}
fn database_from_fields_and_rows(fields_and_rows: FieldsRows) -> CreateDatabaseParams {
let (fields, rows) = fields_and_rows.split();
let view_id = gen_database_view_id();
let database_id = gen_database_id();
let fields = fields
.into_iter()
.enumerate()
.map(
|(index, field_str)| match serde_json::from_str(&field_str) {
Ok(field) => field,
Err(_) => {
let field_type = FieldType::RichText;
let type_option_data = default_type_option_data_for_type(&field_type);
let is_primary = index == 0;
Field::new(
gen_field_id(),
field_str,
field_type.clone().into(),
is_primary,
)
.with_type_option_data(field_type, type_option_data)
},
},
)
.collect::<Vec<Field>>();
let created_rows = rows
.par_iter()
.map(|row| {
let mut cell_by_field_id = HashMap::new();
let mut params = CreateRowParams::new(gen_row_id());
for (index, cell) in row.iter().enumerate() {
if let Some(field) = fields.get(index) {
cell_by_field_id.insert(field.id.clone(), cell.to_string());
}
}
params.cells = CellBuilder::with_cells(cell_by_field_id, &fields).build();
params
})
.collect::<Vec<CreateRowParams>>();
CreateDatabaseParams {
database_id,
view_id,
name: "".to_string(),
layout: DatabaseLayout::Grid,
layout_settings: Default::default(),
filters: vec![],
groups: vec![],
sorts: vec![],
created_rows,
fields,
}
}
struct FieldsRows {
fields: Vec<String>,
rows: Vec<Vec<String>>,
}
impl FieldsRows {
fn split(self) -> (Vec<String>, Vec<Vec<String>>) {
(self.fields, self.rows)
}
}
#[cfg(test)]
mod tests {
use crate::services::share::csv::CSVImporter;
#[test]
fn test_import_csv_from_str() {
let s = r#"Name,Tags,Number,Date,Checkbox,URL
1,tag 1,1,"May 26, 2023",Yes,appflowy.io
2,tag 2,2,"May 22, 2023",No,
,,,,Yes,"#;
let importer = CSVImporter;
let result = importer.import_csv_from_string(s.to_string()).unwrap();
assert_eq!(result.created_rows.len(), 3);
assert_eq!(result.fields.len(), 6);
assert_eq!(result.fields[0].name, "Name");
assert_eq!(result.fields[1].name, "Tags");
assert_eq!(result.fields[2].name, "Number");
assert_eq!(result.fields[3].name, "Date");
assert_eq!(result.fields[4].name, "Checkbox");
assert_eq!(result.fields[5].name, "URL");
assert_eq!(result.created_rows[0].cells.len(), 6);
assert_eq!(result.created_rows[1].cells.len(), 6);
assert_eq!(result.created_rows[2].cells.len(), 6);
println!("{:?}", result);
}
}

View File

@ -0,0 +1,5 @@
mod export;
mod import;
pub use export::*;
pub use import::*;

View File

@ -0,0 +1 @@
pub mod csv;

View File

@ -62,7 +62,7 @@ impl DatabaseEditorTest {
let editor = sdk let editor = sdk
.database_manager .database_manager
.get_database(&test.child_view.id) .get_database_with_view_id(&test.child_view.id)
.await .await
.unwrap(); .unwrap();
let fields = editor let fields = editor
@ -223,21 +223,34 @@ impl DatabaseEditorTest {
let cell_changeset = SelectOptionCellChangeset::from_insert_option_id(option_id); let cell_changeset = SelectOptionCellChangeset::from_insert_option_id(option_id);
self.update_cell(&field.id, row_id, cell_changeset).await self.update_cell(&field.id, row_id, cell_changeset).await
} }
pub async fn import(&self, s: String) -> String {
self.sdk.database_manager.import_csv(s).await.unwrap()
} }
pub struct TestRowBuilder { pub async fn get_database(&self, database_id: &str) -> Option<Arc<DatabaseEditor>> {
self
.sdk
.database_manager
.get_database(database_id)
.await
.ok()
}
}
pub struct TestRowBuilder<'a> {
row_id: RowId, row_id: RowId,
fields: Vec<Field>, fields: &'a [Field],
cell_build: CellBuilder, cell_build: CellBuilder<'a>,
} }
impl TestRowBuilder { impl<'a> TestRowBuilder<'a> {
pub fn new(row_id: RowId, fields: Vec<Field>) -> Self { pub fn new(row_id: RowId, fields: &'a [Field]) -> Self {
let inner_builder = CellBuilder::with_cells(Default::default(), fields.clone()); let cell_build = CellBuilder::with_cells(Default::default(), fields);
Self { Self {
row_id, row_id,
fields, fields,
cell_build: inner_builder, cell_build,
} }
} }

View File

@ -111,7 +111,7 @@ pub fn make_test_board() -> DatabaseData {
// We have many assumptions base on the number of the rows, so do not change the number of the loop. // We have many assumptions base on the number of the rows, so do not change the number of the loop.
for i in 0..5 { for i in 0..5 {
let mut row_builder = TestRowBuilder::new(i.into(), fields.clone()); let mut row_builder = TestRowBuilder::new(i.into(), &fields);
match i { match i {
0 => { 0 => {
for field_type in FieldType::iter() { for field_type in FieldType::iter() {

View File

@ -40,7 +40,7 @@ pub fn make_test_calendar() -> DatabaseData {
let calendar_setting: LayoutSetting = CalendarLayoutSetting::new(date_field_id).into(); let calendar_setting: LayoutSetting = CalendarLayoutSetting::new(date_field_id).into();
for i in 0..5 { for i in 0..5 {
let mut row_builder = TestRowBuilder::new(i.into(), fields.clone()); let mut row_builder = TestRowBuilder::new(i.into(), &fields);
match i { match i {
0 => { 0 => {
for field_type in FieldType::iter() { for field_type in FieldType::iter() {

View File

@ -111,7 +111,7 @@ pub fn make_test_grid() -> DatabaseData {
} }
for i in 0..6 { for i in 0..6 {
let mut row_builder = TestRowBuilder::new(i.into(), fields.clone()); let mut row_builder = TestRowBuilder::new(i.into(), &fields);
match i { match i {
0 => { 0 => {
for field_type in FieldType::iter() { for field_type in FieldType::iter() {

View File

@ -7,3 +7,4 @@ mod layout_test;
mod sort_test; mod sort_test;
mod mock_data; mod mock_data;
mod share_test;

View File

@ -0,0 +1,36 @@
use crate::database::database_editor::DatabaseEditorTest;
use flowy_database2::services::share::csv::ExportStyle;
#[tokio::test]
async fn export_and_then_import_test() {
let test = DatabaseEditorTest::new_grid().await;
let database = test.editor.clone();
let csv_1 = database.export_csv(ExportStyle::SIMPLE).await.unwrap();
let imported_database_id = test.import(csv_1.clone()).await;
let csv_2 = test
.get_database(&imported_database_id)
.await
.unwrap()
.export_csv(ExportStyle::SIMPLE)
.await
.unwrap();
let mut reader = csv::Reader::from_reader(csv_1.as_bytes());
let export_csv_records_1 = reader.records();
let mut reader = csv::Reader::from_reader(csv_2.as_bytes());
let export_csv_records_2 = reader.records();
let mut a = export_csv_records_1
.map(|v| v.unwrap())
.flat_map(|v| v.iter().map(|v| v.to_string()).collect::<Vec<_>>())
.collect::<Vec<String>>();
let mut b = export_csv_records_2
.map(|v| v.unwrap())
.flat_map(|v| v.iter().map(|v| v.to_string()).collect::<Vec<_>>())
.collect::<Vec<String>>();
a.sort();
b.sort();
assert_eq!(a, b);
}

View File

@ -0,0 +1 @@
mod export_test;