chore: import history database (#2638)

This commit is contained in:
Nathan.fooo 2023-05-27 21:29:18 +08:00 committed by GitHub
parent 6935653e15
commit 45d0d41830
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 302 additions and 106 deletions

View File

@ -606,24 +606,13 @@ impl_into_field_type!(u8);
impl From<FieldType> for i64 {
fn from(ty: FieldType) -> Self {
match ty {
FieldType::RichText => 0,
FieldType::Number => 1,
FieldType::DateTime => 2,
FieldType::SingleSelect => 3,
FieldType::MultiSelect => 4,
FieldType::Checkbox => 5,
FieldType::URL => 6,
FieldType::Checklist => 7,
FieldType::UpdatedAt => 8,
FieldType::CreatedAt => 9,
}
(ty as u8) as i64
}
}
impl From<&FieldType> for i64 {
fn from(ty: &FieldType) -> Self {
ty.clone() as i64
i64::from(ty.clone())
}
}

View File

@ -12,6 +12,7 @@ use crate::manager::DatabaseManager2;
use crate::services::field::{
type_option_data_from_pb_or_default, DateCellChangeset, SelectOptionCellChangeset,
};
use crate::services::share::csv::CSVFormat;
#[tracing::instrument(level = "trace", skip(data, manager), err)]
pub(crate) async fn get_database_data_handler(
@ -613,9 +614,9 @@ pub(crate) async fn import_data_handler(
match params.import_type {
ImportTypePB::CSV => {
if let Some(data) = params.data {
manager.import_csv(data).await?;
manager.import_csv(data, CSVFormat::META).await?;
} else if let Some(uri) = params.uri {
manager.import_csv_data_from_uri(uri).await?;
manager.import_csv_from_uri(uri, CSVFormat::META).await?;
} else {
return Err(FlowyError::new(
ErrorCode::InvalidData,

View File

@ -16,7 +16,7 @@ use flowy_task::TaskDispatcher;
use crate::entities::{DatabaseDescriptionPB, DatabaseLayoutPB, RepeatedDatabaseDescriptionPB};
use crate::services::database::{DatabaseEditor, MutexDatabase};
use crate::services::share::csv::{CSVImporter, ExportStyle};
use crate::services::share::csv::{CSVFormat, CSVImporter, ImportResult};
pub trait DatabaseUser2: Send + Sync {
fn user_id(&self) -> Result<i64, FlowyError>;
@ -195,20 +195,24 @@ impl DatabaseManager2 {
Ok(())
}
pub async fn import_csv(&self, content: String) -> FlowyResult<String> {
let params = tokio::task::spawn_blocking(move || CSVImporter.import_csv_from_string(content))
.await
.map_err(internal_error)??;
let database_id = params.database_id.clone();
pub async fn import_csv(&self, content: String, format: CSVFormat) -> FlowyResult<ImportResult> {
let params =
tokio::task::spawn_blocking(move || CSVImporter.import_csv_from_string(content, format))
.await
.map_err(internal_error)??;
let result = ImportResult {
database_id: params.database_id.clone(),
view_id: params.view_id.clone(),
};
self.create_database_with_params(params).await?;
Ok(database_id)
Ok(result)
}
pub async fn import_csv_data_from_uri(&self, _uri: String) -> FlowyResult<()> {
pub async fn import_csv_from_uri(&self, _uri: String, _format: CSVFormat) -> FlowyResult<()> {
Ok(())
}
pub async fn export_csv(&self, view_id: &str, style: ExportStyle) -> FlowyResult<String> {
pub async fn export_csv(&self, view_id: &str, style: CSVFormat) -> FlowyResult<String> {
let database = self.get_database_with_view_id(view_id).await?;
database.export_csv(style).await
}

View File

@ -35,7 +35,7 @@ use crate::services::field::{
};
use crate::services::filter::Filter;
use crate::services::group::{default_group_setting, GroupSetting, RowChangeset};
use crate::services::share::csv::{CSVExport, ExportStyle};
use crate::services::share::csv::{CSVExport, CSVFormat};
use crate::services::sort::Sort;
#[derive(Clone)]
@ -833,7 +833,7 @@ impl DatabaseEditor {
}
}
pub async fn export_csv(&self, style: ExportStyle) -> FlowyResult<String> {
pub async fn export_csv(&self, style: CSVFormat) -> FlowyResult<String> {
let database = self.database.clone();
let csv = tokio::task::spawn_blocking(move || {
let database_guard = database.lock();

View File

@ -36,7 +36,7 @@ impl TypeOption for DateTypeOption {
impl From<TypeOptionData> for DateTypeOption {
fn from(data: TypeOptionData) -> Self {
let date_format = data
.get_i64_value("data_format")
.get_i64_value("date_format")
.map(DateFormat::from)
.unwrap_or_default();
let time_format = data
@ -58,7 +58,7 @@ impl From<TypeOptionData> for DateTypeOption {
impl From<DateTypeOption> for TypeOptionData {
fn from(data: DateTypeOption) -> Self {
TypeOptionDataBuilder::new()
.insert_i64_value("data_format", data.date_format.value())
.insert_i64_value("date_format", data.date_format.value())
.insert_i64_value("time_format", data.time_format.value())
.insert_i64_value("field_type", data.field_type.value())
.build()

View File

@ -49,7 +49,9 @@ impl ToCellChangeset for DateCellChangeset {
#[derive(Default, Clone, Debug, Serialize)]
pub struct DateCellData {
pub timestamp: Option<i64>,
#[serde(default)]
pub include_time: bool,
#[serde(default)]
pub timezone_id: String,
}
@ -61,7 +63,6 @@ impl From<&Cell> for DateCellData {
let include_time = cell.get_bool_value("include_time").unwrap_or_default();
let timezone_id = cell.get_str_value("timezone_id").unwrap_or_default();
Self {
timestamp,
include_time,

View File

@ -3,12 +3,13 @@ use crate::services::cell::stringify_cell_data;
use collab_database::database::Database;
use flowy_error::{FlowyError, FlowyResult};
use std::collections::HashMap;
use indexmap::IndexMap;
pub enum ExportStyle {
#[derive(Debug, Clone, Copy)]
pub enum CSVFormat {
/// The export data will be pure data, without any meta data.
/// Will lost the field type information.
SIMPLE,
Original,
/// The export data contains meta data, such as field type.
/// It can be used to fully restore the database.
META,
@ -16,7 +17,7 @@ pub enum ExportStyle {
pub struct CSVExport;
impl CSVExport {
pub fn export_database(&self, database: &Database, style: ExportStyle) -> FlowyResult<String> {
pub fn export_database(&self, database: &Database, style: CSVFormat) -> FlowyResult<String> {
let mut wtr = csv::Writer::from_writer(vec![]);
let inline_view_id = database.get_inline_view_id();
let fields = database.get_fields(&inline_view_id, None);
@ -25,8 +26,8 @@ impl CSVExport {
let field_records = fields
.iter()
.map(|field| match &style {
ExportStyle::SIMPLE => field.name.clone(),
ExportStyle::META => serde_json::to_string(&field).unwrap(),
CSVFormat::Original => field.name.clone(),
CSVFormat::META => serde_json::to_string(&field).unwrap(),
})
.collect::<Vec<String>>();
wtr
@ -34,10 +35,10 @@ impl CSVExport {
.map_err(|e| FlowyError::internal().context(e))?;
// Write rows
let field_by_field_id = fields
.into_iter()
.map(|field| (field.id.clone(), field))
.collect::<HashMap<_, _>>();
let mut field_by_field_id = IndexMap::new();
fields.into_iter().for_each(|field| {
field_by_field_id.insert(field.id.clone(), field);
});
let rows = database.get_rows_for_view(&inline_view_id);
for row in rows {
let cells = field_by_field_id
@ -47,8 +48,8 @@ impl CSVExport {
Some(cell) => {
let field_type = FieldType::from(field.field_type);
match style {
ExportStyle::SIMPLE => stringify_cell_data(cell, &field_type, &field_type, field),
ExportStyle::META => serde_json::to_string(cell).unwrap_or_else(|_| "".to_string()),
CSVFormat::Original => stringify_cell_data(cell, &field_type, &field_type, field),
CSVFormat::META => serde_json::to_string(cell).unwrap_or_else(|_| "".to_string()),
}
},
})

View File

@ -1,31 +1,38 @@
use crate::entities::FieldType;
use crate::services::cell::CellBuilder;
use crate::services::field::default_type_option_data_from_type;
use crate::services::field::{default_type_option_data_from_type, CELL_DATA};
use crate::services::share::csv::CSVFormat;
use collab_database::database::{gen_database_id, gen_database_view_id, gen_field_id, gen_row_id};
use collab_database::fields::Field;
use collab_database::rows::CreateRowParams;
use collab_database::rows::{new_cell_builder, Cell, CreateRowParams};
use collab_database::views::{CreateDatabaseParams, DatabaseLayout};
use flowy_error::{FlowyError, FlowyResult};
use rayon::prelude::*;
use std::collections::HashMap;
use std::{fs::File, io::prelude::*};
#[derive(Default)]
pub struct CSVImporter;
impl CSVImporter {
pub fn import_csv_from_file(&self, path: &str) -> FlowyResult<CreateDatabaseParams> {
pub fn import_csv_from_file(
&self,
path: &str,
style: CSVFormat,
) -> FlowyResult<CreateDatabaseParams> {
let mut file = File::open(path)?;
let mut content = String::new();
file.read_to_string(&mut content)?;
let fields_with_rows = self.get_fields_and_rows(content)?;
let database_data = database_from_fields_and_rows(fields_with_rows);
let database_data = database_from_fields_and_rows(fields_with_rows, &style);
Ok(database_data)
}
pub fn import_csv_from_string(&self, content: String) -> FlowyResult<CreateDatabaseParams> {
pub fn import_csv_from_string(
&self,
content: String,
format: CSVFormat,
) -> FlowyResult<CreateDatabaseParams> {
let fields_with_rows = self.get_fields_and_rows(content)?;
let database_data = database_from_fields_and_rows(fields_with_rows);
let database_data = database_from_fields_and_rows(fields_with_rows, &format);
Ok(database_data)
}
@ -60,7 +67,10 @@ impl CSVImporter {
}
}
fn database_from_fields_and_rows(fields_and_rows: FieldsRows) -> CreateDatabaseParams {
fn database_from_fields_and_rows(
fields_and_rows: FieldsRows,
format: &CSVFormat,
) -> CreateDatabaseParams {
let (fields, rows) = fields_and_rows.split();
let view_id = gen_database_view_id();
let database_id = gen_database_id();
@ -68,36 +78,47 @@ fn database_from_fields_and_rows(fields_and_rows: FieldsRows) -> CreateDatabaseP
let fields = fields
.into_iter()
.enumerate()
.map(
|(index, field_str)| match serde_json::from_str(&field_str) {
Ok(field) => field,
Err(_) => {
let field_type = FieldType::RichText;
let type_option_data = default_type_option_data_from_type(&field_type);
let is_primary = index == 0;
Field::new(
gen_field_id(),
field_str,
field_type.clone().into(),
is_primary,
)
.with_type_option_data(field_type, type_option_data)
},
.map(|(index, field_meta)| match format {
CSVFormat::Original => default_field(field_meta, index == 0),
CSVFormat::META => {
//
match serde_json::from_str(&field_meta) {
Ok(field) => {
//
field
},
Err(e) => {
dbg!(e);
default_field(field_meta, index == 0)
},
}
},
)
})
.collect::<Vec<Field>>();
let created_rows = rows
.par_iter()
.map(|row| {
let mut cell_by_field_id = HashMap::new();
.iter()
.map(|cells| {
let mut params = CreateRowParams::new(gen_row_id());
for (index, cell) in row.iter().enumerate() {
for (index, cell_content) in cells.iter().enumerate() {
if let Some(field) = fields.get(index) {
cell_by_field_id.insert(field.id.clone(), cell.to_string());
let field_type = FieldType::from(field.field_type);
// Make the cell based on the style.
let cell = match format {
CSVFormat::Original => new_cell_builder(field_type)
.insert_str_value(CELL_DATA, cell_content.to_string())
.build(),
CSVFormat::META => match serde_json::from_str::<Cell>(cell_content) {
Ok(cell) => cell,
Err(_) => new_cell_builder(field_type)
.insert_str_value(CELL_DATA, "".to_string())
.build(),
},
};
params.cells.insert(field.id.clone(), cell);
}
}
params.cells = CellBuilder::with_cells(cell_by_field_id, &fields).build();
params
})
.collect::<Vec<CreateRowParams>>();
@ -116,6 +137,18 @@ fn database_from_fields_and_rows(fields_and_rows: FieldsRows) -> CreateDatabaseP
}
}
fn default_field(field_str: String, is_primary: bool) -> Field {
let field_type = FieldType::RichText;
let type_option_data = default_type_option_data_from_type(&field_type);
Field::new(
gen_field_id(),
field_str,
field_type.clone().into(),
is_primary,
)
.with_type_option_data(field_type, type_option_data)
}
struct FieldsRows {
fields: Vec<String>,
rows: Vec<Vec<String>>,
@ -126,9 +159,14 @@ impl FieldsRows {
}
}
pub struct ImportResult {
pub database_id: String,
pub view_id: String,
}
#[cfg(test)]
mod tests {
use crate::services::share::csv::CSVImporter;
use crate::services::share::csv::{CSVFormat, CSVImporter};
#[test]
fn test_import_csv_from_str() {
@ -137,7 +175,9 @@ mod tests {
2,tag 2,2,"May 22, 2023",No,
,,,,Yes,"#;
let importer = CSVImporter;
let result = importer.import_csv_from_string(s.to_string()).unwrap();
let result = importer
.import_csv_from_string(s.to_string(), CSVFormat::Original)
.unwrap();
assert_eq!(result.created_rows.len(), 3);
assert_eq!(result.fields.len(), 6);

View File

@ -12,6 +12,7 @@ use flowy_database2::services::field::{
CheckboxTypeOption, ChecklistTypeOption, DateCellChangeset, MultiSelectTypeOption, SelectOption,
SelectOptionCellChangeset, SingleSelectTypeOption,
};
use flowy_database2::services::share::csv::{CSVFormat, ImportResult};
use flowy_error::FlowyResult;
use flowy_test::folder_event::ViewTest;
use flowy_test::FlowyCoreTest;
@ -224,8 +225,13 @@ impl DatabaseEditorTest {
self.update_cell(&field.id, row_id, cell_changeset).await
}
pub async fn import(&self, s: String) -> String {
self.sdk.database_manager.import_csv(s).await.unwrap()
pub async fn import(&self, s: String, format: CSVFormat) -> ImportResult {
self
.sdk
.database_manager
.import_csv(s, format)
.await
.unwrap()
}
pub async fn get_database(&self, database_id: &str) -> Option<Arc<DatabaseEditor>> {

View File

@ -1,36 +1,190 @@
use crate::database::database_editor::DatabaseEditorTest;
use flowy_database2::services::share::csv::ExportStyle;
use flowy_database2::entities::FieldType;
use flowy_database2::services::cell::stringify_cell_data;
use flowy_database2::services::field::CHECK;
use flowy_database2::services::share::csv::CSVFormat;
#[tokio::test]
async fn export_and_then_import_test() {
async fn export_meta_csv_test() {
let test = DatabaseEditorTest::new_grid().await;
let database = test.editor.clone();
let csv_1 = database.export_csv(ExportStyle::SIMPLE).await.unwrap();
let s = database.export_csv(CSVFormat::META).await.unwrap();
let mut reader = csv::Reader::from_reader(s.as_bytes());
for header in reader.headers() {
dbg!(header);
}
let imported_database_id = test.import(csv_1.clone()).await;
let csv_2 = test
.get_database(&imported_database_id)
.await
.unwrap()
.export_csv(ExportStyle::SIMPLE)
.await
.unwrap();
let mut reader = csv::Reader::from_reader(csv_1.as_bytes());
let export_csv_records_1 = reader.records();
let mut reader = csv::Reader::from_reader(csv_2.as_bytes());
let export_csv_records_2 = reader.records();
let mut a = export_csv_records_1
.map(|v| v.unwrap())
.flat_map(|v| v.iter().map(|v| v.to_string()).collect::<Vec<_>>())
.collect::<Vec<String>>();
let mut b = export_csv_records_2
.map(|v| v.unwrap())
.flat_map(|v| v.iter().map(|v| v.to_string()).collect::<Vec<_>>())
.collect::<Vec<String>>();
a.sort();
b.sort();
assert_eq!(a, b);
let export_csv_records = reader.records();
for record in export_csv_records {
let record = record.unwrap();
dbg!(record);
}
}
#[tokio::test]
async fn export_csv_test() {
let test = DatabaseEditorTest::new_grid().await;
let database = test.editor.clone();
let s = database.export_csv(CSVFormat::Original).await.unwrap();
let expected = r#"Name,Price,Time,Status,Platform,is urgent,link,TODO,Updated At,Created At
A,$1,2022/03/14,,"Google,Facebook",Yes,AppFlowy website - https://www.appflowy.io,"Wake up at 6:00 am,Get some coffee,Start working",2022/03/14,2022/03/14
,$2,2022/03/14,,"Google,Twitter",Yes,,,2022/03/14,2022/03/14
C,$3,2022/03/14,Completed,Facebook,No,,,2022/03/14,2022/03/14
DA,$14,2022/11/17,Completed,,No,,,2022/11/17,2022/11/17
AE,,2022/11/13,Planned,,No,,,2022/11/13,2022/11/13
AE,$5,2022/12/24,Planned,,Yes,,,2022/12/24,2022/12/24
"#;
assert_eq!(s, expected);
}
#[tokio::test]
async fn export_and_then_import_meta_csv_test() {
let test = DatabaseEditorTest::new_grid().await;
let database = test.editor.clone();
let format = CSVFormat::META;
let csv_1 = database.export_csv(format).await.unwrap();
let result = test.import(csv_1.clone(), format).await;
let database = test.get_database(&result.database_id).await.unwrap();
let fields = database.get_fields(&result.view_id, None);
let rows = database.get_rows(&result.view_id).await.unwrap();
assert_eq!(fields[0].field_type, 0);
assert_eq!(fields[1].field_type, 1);
assert_eq!(fields[2].field_type, 2);
assert_eq!(fields[3].field_type, 3);
assert_eq!(fields[4].field_type, 4);
assert_eq!(fields[5].field_type, 5);
assert_eq!(fields[6].field_type, 6);
assert_eq!(fields[7].field_type, 7);
assert_eq!(fields[8].field_type, 8);
assert_eq!(fields[9].field_type, 9);
for field in fields {
for (index, row) in rows.iter().enumerate() {
if let Some(cell) = row.cells.get(&field.id) {
let field_type = FieldType::from(field.field_type);
let s = stringify_cell_data(cell, &field_type, &field_type, &field);
match &field_type {
FieldType::RichText => {
if index == 0 {
assert_eq!(s, "A");
}
},
FieldType::Number => {
if index == 0 {
assert_eq!(s, "$1");
}
},
FieldType::DateTime => {
if index == 0 {
assert_eq!(s, "2022/03/14");
}
},
FieldType::SingleSelect => {
if index == 0 {
assert_eq!(s, "");
}
},
FieldType::MultiSelect => {
if index == 0 {
assert_eq!(s, "Google,Facebook");
}
},
FieldType::Checkbox => {},
FieldType::URL => {},
FieldType::Checklist => {},
FieldType::UpdatedAt => {},
FieldType::CreatedAt => {},
}
} else {
panic!(
"Can not found the cell with id: {} in {:?}",
field.id, row.cells
);
}
}
}
}
#[tokio::test]
async fn history_database_import_test() {
let format = CSVFormat::META;
let test = DatabaseEditorTest::new_grid().await;
let csv = r#""{""id"":""TJCxFc"",""name"":""Name"",""field_type"":0,""visibility"":true,""width"":100,""type_options"":{""0"":{""data"":""""}},""is_primary"":true}","{""id"":""XbMTxa"",""name"":""Price"",""field_type"":1,""visibility"":true,""width"":100,""type_options"":{""1"":{""format"":1,""name"":""Number"",""scale"":0,""symbol"":""$""}},""is_primary"":false}","{""id"":""cPgMsM"",""name"":""Time"",""field_type"":2,""visibility"":true,""width"":100,""type_options"":{""2"":{""date_format"":1,""field_type"":2,""time_format"":1}},""is_primary"":false}","{""id"":""vCelOS"",""name"":""Status"",""field_type"":3,""visibility"":true,""width"":100,""type_options"":{""3"":{""content"":""{\""options\"":[{\""id\"":\""c_-f\"",\""name\"":\""Completed\"",\""color\"":\""Purple\""},{\""id\"":\""wQpG\"",\""name\"":\""Planned\"",\""color\"":\""Purple\""},{\""id\"":\""VLHf\"",\""name\"":\""Paused\"",\""color\"":\""Purple\""}],\""disable_color\"":false}""}},""is_primary"":false}","{""id"":""eQEcry"",""name"":""Platform"",""field_type"":4,""visibility"":true,""width"":100,""type_options"":{""4"":{""content"":""{\""options\"":[{\""id\"":\""edpw\"",\""name\"":\""Google\"",\""color\"":\""Purple\""},{\""id\"":\""cx0O\"",\""name\"":\""Facebook\"",\""color\"":\""Purple\""},{\""id\"":\""EsFR\"",\""name\"":\""Twitter\"",\""color\"":\""Purple\""}],\""disable_color\"":false}""}},""is_primary"":false}","{""id"":""KGlcPi"",""name"":""is urgent"",""field_type"":5,""visibility"":true,""width"":100,""type_options"":{""5"":{""is_selected"":false}},""is_primary"":false}","{""id"":""SBpJNI"",""name"":""link"",""field_type"":6,""visibility"":true,""width"":100,""type_options"":{""6"":{""data"":""""}},""is_primary"":false}","{""id"":""orSsPm"",""name"":""TODO"",""field_type"":7,""visibility"":true,""width"":100,""type_options"":{""7"":{""content"":""{\""options\"":[{\""id\"":\""HLXi\"",\""name\"":\""Wake up at 6:00 am\"",\""color\"":\""Purple\""},{\""id\"":\""CsGr\"",\""name\"":\""Get some coffee\"",\""color\"":\""Purple\""},{\""id\"":\""4WqN\"",\""name\"":\""Start working\"",\""color\"":\""Purple\""}],\""disable_color\"":false}""}},""is_primary"":false}"
"{""data"":""A"",""field_type"":0}","{""data"":""1"",""field_type"":1}","{""data"":""1647251762"",""field_type"":2}","{""data"":"""",""field_type"":3}","{""data"":""edpw,cx0O"",""field_type"":4}","{""data"":""Yes"",""field_type"":5}","{""data"":""AppFlowy website - https://www.appflowy.io"",""field_type"":6}","{""data"":""HLXi,CsGr,4WqN"",""field_type"":7}"
"{""data"":"""",""field_type"":0}","{""data"":""2"",""field_type"":1}","{""data"":""1647251762"",""field_type"":2}","{""data"":"""",""field_type"":3}","{""data"":""edpw,EsFR"",""field_type"":4}","{""data"":""Yes"",""field_type"":5}","{""data"":"""",""field_type"":6}","{""data"":"""",""field_type"":7}"
"{""data"":""C"",""field_type"":0}","{""data"":""3"",""field_type"":1}","{""data"":""1647251762"",""field_type"":2}","{""data"":""c_-f"",""field_type"":3}","{""data"":""cx0O"",""field_type"":4}","{""data"":""No"",""field_type"":5}","{""data"":"""",""field_type"":6}","{""data"":"""",""field_type"":7}"
"{""data"":""DA"",""field_type"":0}","{""data"":""14"",""field_type"":1}","{""data"":""1668704685"",""field_type"":2}","{""data"":""c_-f"",""field_type"":3}","{""data"":"""",""field_type"":4}","{""data"":""No"",""field_type"":5}","{""data"":"""",""field_type"":6}","{""data"":"""",""field_type"":7}"
"{""data"":""AE"",""field_type"":0}","{""data"":"""",""field_type"":1}","{""data"":""1668359085"",""field_type"":2}","{""data"":""wQpG"",""field_type"":3}","{""data"":"""",""field_type"":4}","{""data"":""No"",""field_type"":5}","{""data"":"""",""field_type"":6}","{""data"":"""",""field_type"":7}"
"{""data"":""AE"",""field_type"":0}","{""data"":""5"",""field_type"":1}","{""data"":""1671938394"",""field_type"":2}","{""data"":""wQpG"",""field_type"":3}","{""data"":"""",""field_type"":4}","{""data"":""Yes"",""field_type"":5}","{""data"":"""",""field_type"":6}","{""data"":"""",""field_type"":7}"
"#;
let result = test.import(csv.to_string(), format).await;
let database = test.get_database(&result.database_id).await.unwrap();
let fields = database.get_fields(&result.view_id, None);
let rows = database.get_rows(&result.view_id).await.unwrap();
assert_eq!(fields[0].field_type, 0);
assert_eq!(fields[1].field_type, 1);
assert_eq!(fields[2].field_type, 2);
assert_eq!(fields[3].field_type, 3);
assert_eq!(fields[4].field_type, 4);
assert_eq!(fields[5].field_type, 5);
assert_eq!(fields[6].field_type, 6);
assert_eq!(fields[7].field_type, 7);
for field in fields {
for (index, row) in rows.iter().enumerate() {
if let Some(cell) = row.cells.get(&field.id) {
let field_type = FieldType::from(field.field_type);
let s = stringify_cell_data(cell, &field_type, &field_type, &field);
match &field_type {
FieldType::RichText => {
if index == 0 {
assert_eq!(s, "A");
}
},
FieldType::Number => {
if index == 0 {
assert_eq!(s, "$1");
}
},
FieldType::DateTime => {
if index == 0 {
assert_eq!(s, "2022/03/14");
}
},
FieldType::SingleSelect => {
if index == 0 {
assert_eq!(s, "");
}
},
FieldType::MultiSelect => {
if index == 0 {
assert_eq!(s, "Google,Facebook");
}
},
FieldType::Checkbox => {
if index == 0 {
assert_eq!(s, CHECK);
}
},
FieldType::URL => {
if index == 0 {
assert_eq!(s, "AppFlowy website - https://www.appflowy.io");
}
},
FieldType::Checklist => {},
FieldType::UpdatedAt => {},
FieldType::CreatedAt => {},
}
} else {
panic!(
"Can not found the cell with id: {} in {:?}",
field.id, row.cells
);
}
}
}
}