feat: async load database row, async filter, async sort (#6068)

* chore: display date when convert text to date

* chore: filter & sort

* chore: fix filter and sort

* chore: fix test

* chore: clippy

* chore: fix test
This commit is contained in:
Nathan.fooo 2024-08-26 09:46:16 +08:00 committed by GitHub
parent 242faee2f5
commit 62f0307289
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
59 changed files with 703 additions and 667 deletions

View File

@ -1,6 +1,6 @@
import 'package:appflowy/plugins/database/application/row/row_service.dart'; import 'package:appflowy/plugins/database/application/row/row_service.dart';
import 'package:appflowy/plugins/database/domain/row_listener.dart'; import 'package:appflowy/plugins/database/domain/row_listener.dart';
import 'package:appflowy_backend/protobuf/flowy-database2/row_entities.pb.dart'; import 'package:appflowy_backend/protobuf/flowy-database2/protobuf.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import '../cell/cell_cache.dart'; import '../cell/cell_cache.dart';
@ -39,6 +39,9 @@ class RowController {
Future<void> initialize() async { Future<void> initialize() async {
await _rowBackendSvc.initRow(rowMeta.id); await _rowBackendSvc.initRow(rowMeta.id);
_rowListener.start( _rowListener.start(
onRowFetched: (DidFetchRowPB row) {
_rowCache.setRowMeta(row.meta);
},
onMetaChanged: (newRowMeta) { onMetaChanged: (newRowMeta) {
if (_isDisposed) { if (_isDisposed) {
return; return;

View File

@ -1,3 +1,5 @@
import 'dart:math';
import 'package:appflowy/generated/locale_keys.g.dart'; import 'package:appflowy/generated/locale_keys.g.dart';
import 'package:appflowy/plugins/database/application/row/row_service.dart'; import 'package:appflowy/plugins/database/application/row/row_service.dart';
import 'package:appflowy/plugins/database/grid/presentation/widgets/calculations/calculations_row.dart'; import 'package:appflowy/plugins/database/grid/presentation/widgets/calculations/calculations_row.dart';
@ -305,22 +307,26 @@ class _GridRowsState extends State<_GridRows> {
buildWhen: (previous, current) => previous.fields != current.fields, buildWhen: (previous, current) => previous.fields != current.fields,
builder: (context, state) { builder: (context, state) {
return Flexible( return Flexible(
child: _WrapScrollView( child: LayoutBuilder(
scrollController: widget.scrollController, builder: (BuildContext context, BoxConstraints layoutConstraits) {
contentWidth: GridLayout.headerWidth(state.fields), return _WrapScrollView(
child: BlocConsumer<GridBloc, GridState>( scrollController: widget.scrollController,
listenWhen: (previous, current) => contentWidth: GridLayout.headerWidth(state.fields),
previous.rowCount != current.rowCount, child: BlocConsumer<GridBloc, GridState>(
listener: (context, state) => _evaluateFloatingCalculations(), listenWhen: (previous, current) =>
builder: (context, state) { previous.rowCount != current.rowCount,
return ScrollConfiguration( listener: (context, state) => _evaluateFloatingCalculations(),
behavior: ScrollConfiguration.of(context).copyWith( builder: (context, state) {
scrollbars: false, return ScrollConfiguration(
), behavior: ScrollConfiguration.of(context).copyWith(
child: _renderList(context, state), scrollbars: false,
); ),
}, child: _renderList(context, state, layoutConstraits),
), );
},
),
);
},
), ),
); );
}, },
@ -330,19 +336,19 @@ class _GridRowsState extends State<_GridRows> {
Widget _renderList( Widget _renderList(
BuildContext context, BuildContext context,
GridState state, GridState state,
BoxConstraints layoutConstraints,
) { ) {
// 1. GridRowBottomBar // 1. GridRowBottomBar
// 2. GridCalculationsRow // 2. GridCalculationsRow
// 3. Footer Padding // 3. Footer Padding
final itemCount = state.rowInfos.length + 3; final itemCount = state.rowInfos.length + 3;
return Stack( return Stack(
children: [ children: [
Positioned.fill( Positioned.fill(
child: ReorderableListView.builder( child: ReorderableListView.builder(
/// This is a workaround related to /// This is a workaround related to
/// https://github.com/flutter/flutter/issues/25652 /// https://github.com/flutter/flutter/issues/25652
cacheExtent: 600, cacheExtent: max(layoutConstraints.maxHeight * 2, 500),
scrollController: widget.scrollController.verticalController, scrollController: widget.scrollController.verticalController,
physics: const ClampingScrollPhysics(), physics: const ClampingScrollPhysics(),
buildDefaultDragHandles: false, buildDefaultDragHandles: false,

View File

@ -57,7 +57,7 @@ class _DatabaseViewSettingContent extends StatelessWidget {
builder: (context, state) { builder: (context, state) {
return Padding( return Padding(
padding: EdgeInsets.symmetric( padding: EdgeInsets.symmetric(
horizontal: GridSize.horizontalHeaderPadding + 40, horizontal: GridSize.horizontalHeaderPadding,
), ),
child: DecoratedBox( child: DecoratedBox(
decoration: BoxDecoration( decoration: BoxDecoration(

View File

@ -964,7 +964,7 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -989,7 +989,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1018,7 +1018,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -1038,7 +1038,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-entity" name = "collab-entity"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -1057,7 +1057,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -1100,7 +1100,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -1180,7 +1180,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-user" name = "collab-user"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",

View File

@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
# To switch to the local path, run: # To switch to the local path, run:
# scripts/tool/update_collab_source.sh # scripts/tool/update_collab_source.sh
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
# Working directory: frontend # Working directory: frontend
# To update the commit ID, run: # To update the commit ID, run:

View File

@ -947,7 +947,7 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -972,7 +972,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1001,7 +1001,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -1021,7 +1021,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-entity" name = "collab-entity"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -1040,7 +1040,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -1083,7 +1083,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -1163,7 +1163,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-user" name = "collab-user"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",

View File

@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
# To switch to the local path, run: # To switch to the local path, run:
# scripts/tool/update_collab_source.sh # scripts/tool/update_collab_source.sh
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
# Working directory: frontend # Working directory: frontend
# To update the commit ID, run: # To update the commit ID, run:

View File

@ -825,7 +825,7 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -850,7 +850,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-database" name = "collab-database"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -879,7 +879,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-document" name = "collab-document"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -899,7 +899,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-entity" name = "collab-entity"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -918,7 +918,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-folder" name = "collab-folder"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"arc-swap", "arc-swap",
@ -961,7 +961,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-plugins" name = "collab-plugins"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -1041,7 +1041,7 @@ dependencies = [
[[package]] [[package]]
name = "collab-user" name = "collab-user"
version = "0.2.0" version = "0.2.0"
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"collab", "collab",

View File

@ -136,13 +136,13 @@ rocksdb = { git = "https://github.com/rust-rocksdb/rust-rocksdb", rev = "1710120
# To switch to the local path, run: # To switch to the local path, run:
# scripts/tool/update_collab_source.sh # scripts/tool/update_collab_source.sh
# ⚠️⚠️⚠️️ # ⚠️⚠️⚠️️
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" } collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
# Working directory: frontend # Working directory: frontend
# To update the commit ID, run: # To update the commit ID, run:

View File

@ -107,7 +107,7 @@ impl AppFlowyCore {
let store_preference = Arc::new(KVStorePreferences::new(&config.storage_path).unwrap()); let store_preference = Arc::new(KVStorePreferences::new(&config.storage_path).unwrap());
info!("🔥{:?}", &config); info!("🔥{:?}", &config);
let task_scheduler = TaskDispatcher::new(Duration::from_secs(2)); let task_scheduler = TaskDispatcher::new(Duration::from_secs(10));
let task_dispatcher = Arc::new(RwLock::new(task_scheduler)); let task_dispatcher = Arc::new(RwLock::new(task_scheduler));
runtime.spawn(TaskRunner::run(task_dispatcher.clone())); runtime.spawn(TaskRunner::run(task_dispatcher.clone()));

View File

@ -86,6 +86,18 @@ impl From<RowOrder> for RowMetaPB {
} }
} }
impl From<Row> for RowMetaPB {
fn from(data: Row) -> Self {
Self {
id: data.id.into_inner(),
document_id: None,
icon: None,
cover: None,
is_document_empty: None,
}
}
}
impl std::convert::From<RowDetail> for RowMetaPB { impl std::convert::From<RowDetail> for RowMetaPB {
fn from(row_detail: RowDetail) -> Self { fn from(row_detail: RowDetail) -> Self {
Self { Self {

View File

@ -36,7 +36,9 @@ pub(crate) async fn get_database_data_handler(
.get_database_id_with_view_id(view_id.as_ref()) .get_database_id_with_view_id(view_id.as_ref())
.await?; .await?;
let database_editor = manager.get_database_editor(&database_id).await?; let database_editor = manager.get_database_editor(&database_id).await?;
let data = database_editor.open_database(view_id.as_ref()).await?; let data = database_editor
.async_open_database_view(view_id.as_ref())
.await?;
trace!( trace!(
"layout: {:?}, rows: {}, fields: {}", "layout: {:?}, rows: {}, fields: {}",
data.layout_type, data.layout_type,
@ -57,9 +59,7 @@ pub(crate) async fn get_all_rows_handler(
.get_database_id_with_view_id(view_id.as_ref()) .get_database_id_with_view_id(view_id.as_ref())
.await?; .await?;
let database_editor = manager.get_database_editor(&database_id).await?; let database_editor = manager.get_database_editor(&database_id).await?;
let row_details = database_editor let row_details = database_editor.get_all_rows(view_id.as_ref()).await?;
.get_all_row_details(view_id.as_ref())
.await?;
let rows = row_details let rows = row_details
.into_iter() .into_iter()
.map(|detail| RowMetaPB::from(detail.as_ref().clone())) .map(|detail| RowMetaPB::from(detail.as_ref().clone()))

View File

@ -20,12 +20,15 @@ use crate::services::share::csv::{CSVExport, CSVFormat};
use crate::services::sort::Sort; use crate::services::sort::Sort;
use crate::utils::cache::AnyTypeCache; use crate::utils::cache::AnyTypeCache;
use crate::DatabaseUser; use crate::DatabaseUser;
use arc_swap::ArcSwap;
use async_trait::async_trait; use async_trait::async_trait;
use collab_database::database::Database; use collab_database::database::Database;
use collab_database::entity::DatabaseView; use collab_database::entity::DatabaseView;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{Cell, Cells, Row, RowCell, RowDetail, RowId}; use collab_database::rows::{Cell, Cells, Row, RowCell, RowDetail, RowId};
use collab_database::views::{DatabaseLayout, FilterMap, LayoutSetting, OrderObjectPosition}; use collab_database::views::{
DatabaseLayout, FilterMap, LayoutSetting, OrderObjectPosition, RowOrder,
};
use collab_entity::CollabType; use collab_entity::CollabType;
use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig}; use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig};
use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult}; use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
@ -35,11 +38,15 @@ use lib_infra::priority_task::TaskDispatcher;
use lib_infra::util::timestamp; use lib_infra::util::timestamp;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::{broadcast, RwLock}; use std::time::Duration;
use tokio::sync::{broadcast, oneshot, RwLock};
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
use tracing::{debug, error, event, info, instrument, trace, warn}; use tracing::{debug, error, event, info, instrument, trace, warn};
type OpenDatabaseResult = oneshot::Sender<FlowyResult<DatabasePB>>;
pub struct DatabaseEditor { pub struct DatabaseEditor {
database_id: String,
pub(crate) database: Arc<RwLock<Database>>, pub(crate) database: Arc<RwLock<Database>>,
pub cell_cache: CellCache, pub cell_cache: CellCache,
pub(crate) database_views: Arc<DatabaseViews>, pub(crate) database_views: Arc<DatabaseViews>,
@ -48,6 +55,8 @@ pub struct DatabaseEditor {
notification_sender: Arc<DebounceNotificationSender>, notification_sender: Arc<DebounceNotificationSender>,
user: Arc<dyn DatabaseUser>, user: Arc<dyn DatabaseUser>,
collab_builder: Arc<AppFlowyCollabBuilder>, collab_builder: Arc<AppFlowyCollabBuilder>,
is_opening: ArcSwap<bool>,
opening_ret_txs: Arc<RwLock<Vec<OpenDatabaseResult>>>,
database_cancellation: Arc<RwLock<Option<CancellationToken>>>, database_cancellation: Arc<RwLock<Option<CancellationToken>>>,
} }
@ -101,12 +110,15 @@ impl DatabaseEditor {
database.clone(), database.clone(),
)?; )?;
let this = Arc::new(Self { let this = Arc::new(Self {
database_id: database_id.clone(),
user, user,
database, database,
cell_cache, cell_cache,
database_views, database_views,
notification_sender, notification_sender,
collab_builder, collab_builder,
is_opening: Default::default(),
opening_ret_txs: Arc::new(Default::default()),
database_cancellation, database_cancellation,
}); });
observe_block_event(&database_id, &this).await; observe_block_event(&database_id, &this).await;
@ -509,7 +521,7 @@ impl DatabaseEditor {
} }
pub async fn duplicate_row(&self, view_id: &str, row_id: &RowId) -> FlowyResult<()> { pub async fn duplicate_row(&self, view_id: &str, row_id: &RowId) -> FlowyResult<()> {
let (row_detail, index) = { let (row, index) = {
let mut database = self.database.write().await; let mut database = self.database.write().await;
let params = database let params = database
@ -524,22 +536,12 @@ impl DatabaseEditor {
index, index,
row_order row_order
); );
let row_detail = database.get_row_detail(&row_order.id).await; let row = database.get_row(&row_order.id).await;
(row_detail, index) (row, index)
}; };
match row_detail { for view in self.database_views.editors().await {
None => { view.v_did_create_row(&row, index).await;
error!(
"Failed to duplicate row: {:?}. Row is not exist before duplicating",
row_id
);
},
Some(row_detail) => {
for view in self.database_views.editors().await {
view.v_did_create_row(&row_detail, index).await;
}
},
} }
Ok(()) Ok(())
@ -596,7 +598,7 @@ impl DatabaseEditor {
if let Some(row_detail) = row_detail { if let Some(row_detail) = row_detail {
trace!("created row: {:?} at {}", row_detail, index); trace!("created row: {:?} at {}", row_detail, index);
for view in self.database_views.editors().await { for view in self.database_views.editors().await {
view.v_did_create_row(&row_detail, index).await; view.v_did_create_row(&row_detail.row, index).await;
} }
return Ok(Some(row_detail)); return Ok(Some(row_detail));
} }
@ -677,9 +679,14 @@ impl DatabaseEditor {
Ok(()) Ok(())
} }
pub async fn get_all_row_details(&self, view_id: &str) -> FlowyResult<Vec<Arc<RowDetail>>> { pub async fn get_all_rows(&self, view_id: &str) -> FlowyResult<Vec<Arc<Row>>> {
let view_editor = self.database_views.get_view_editor(view_id).await?; let view_editor = self.database_views.get_view_editor(view_id).await?;
Ok(view_editor.v_get_all_row_details().await) Ok(view_editor.v_get_all_rows().await)
}
pub async fn get_all_row_orders(&self, view_id: &str) -> FlowyResult<Vec<RowOrder>> {
let orders = self.database.read().await.get_row_orders_for_view(view_id);
Ok(orders)
} }
pub async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option<Row> { pub async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option<Row> {
@ -900,7 +907,7 @@ impl DatabaseEditor {
new_cell: Cell, new_cell: Cell,
) -> FlowyResult<()> { ) -> FlowyResult<()> {
// Get the old row before updating the cell. It would be better to get the old cell // Get the old row before updating the cell. It would be better to get the old cell
let old_row = self.get_row_detail(view_id, row_id).await; let old_row = self.get_row(view_id, row_id).await;
self self
.database .database
.write() .write()
@ -923,7 +930,7 @@ impl DatabaseEditor {
pub async fn clear_cell(&self, view_id: &str, row_id: RowId, field_id: &str) -> FlowyResult<()> { pub async fn clear_cell(&self, view_id: &str, row_id: RowId, field_id: &str) -> FlowyResult<()> {
// Get the old row before updating the cell. It would be better to get the old cell // Get the old row before updating the cell. It would be better to get the old cell
let old_row = self.get_row_detail(view_id, &row_id).await; let old_row = self.get_row(view_id, &row_id).await;
self self
.database .database
@ -948,13 +955,13 @@ impl DatabaseEditor {
view_id: &str, view_id: &str,
row_id: &RowId, row_id: &RowId,
field_id: &str, field_id: &str,
old_row: Option<RowDetail>, old_row: Option<Row>,
) { ) {
let option_row = self.get_row_detail(view_id, row_id).await; let option_row = self.get_row(view_id, row_id).await;
if let Some(new_row_detail) = option_row { if let Some(row) = option_row {
for view in self.database_views.editors().await { for view in self.database_views.editors().await {
view view
.v_did_update_row(&old_row, &new_row_detail, Some(field_id.to_owned())) .v_did_update_row(&old_row, &row, Some(field_id.to_owned()))
.await; .await;
} }
} }
@ -1153,16 +1160,19 @@ impl DatabaseEditor {
let view = self.database_views.get_view_editor(view_id).await?; let view = self.database_views.get_view_editor(view_id).await?;
let mut row_changeset = RowChangeset::new(row_detail.row.id.clone()); let mut row_changeset = RowChangeset::new(row_detail.row.id.clone());
view view
.v_move_group_row(&row_detail, &mut row_changeset, to_group, to_row.clone()) .v_move_group_row(
&row_detail.row,
&mut row_changeset,
to_group,
to_row.clone(),
)
.await; .await;
let to_row = if to_row.is_some() { let to_row = if to_row.is_some() {
to_row to_row
} else { } else {
let row_details = self.get_all_row_details(view_id).await?; let row_details = self.get_all_rows(view_id).await?;
row_details row_details.last().map(|row| row.id.clone())
.last()
.map(|row_detail| row_detail.row.id.clone())
}; };
if let Some(row_id) = to_row.clone() { if let Some(row_id) = to_row.clone() {
self.move_row(view_id, from_row.clone(), row_id).await?; self.move_row(view_id, from_row.clone(), row_id).await?;
@ -1283,11 +1293,15 @@ impl DatabaseEditor {
.read() .read()
.await .await
.get_view(view_id) .get_view(view_id)
.ok_or_else(|| FlowyError::record_not_found().with_context("Can't find the database view"))?; .ok_or_else(|| {
FlowyError::record_not_found()
.with_context(format!("Can't find the database view:{}", view_id))
})?;
Ok(database_view_setting_pb_from_view(view)) Ok(database_view_setting_pb_from_view(view))
} }
pub async fn close_database(&self) { pub async fn close_database(&self) {
info!("Close database: {}", self.database_id);
let cancellation = self.database_cancellation.read().await; let cancellation = self.database_cancellation.read().await;
if let Some(cancellation) = &*cancellation { if let Some(cancellation) = &*cancellation {
info!("Cancel database operation"); info!("Cancel database operation");
@ -1295,59 +1309,152 @@ impl DatabaseEditor {
} }
} }
pub async fn open_database(&self, view_id: &str) -> FlowyResult<DatabasePB> { // Only used in test
let view_layout = self.database.read().await.get_database_view_layout(view_id); #[cfg(debug_assertions)]
let new_token = CancellationToken::new(); pub async fn open_database_view(&self, view_id: &str) -> FlowyResult<DatabasePB> {
let rows = self
if let Some(old_token) = self .get_all_rows(view_id)
.database_cancellation
.write()
.await
.replace(new_token.clone())
{
old_token.cancel();
}
let row_details = self
.database_views
.get_view_editor(view_id)
.await? .await?
.v_get_all_row_details()
.await;
let (database_id, fields, is_linked) = {
let database = self.database.read().await;
let database_id = database.get_database_id();
let fields = database
.get_all_field_orders()
.into_iter()
.map(FieldIdPB::from)
.collect::<Vec<_>>();
let is_linked = database.is_inline_view(view_id);
(database_id, fields, is_linked)
};
let rows = row_details
.into_iter() .into_iter()
.map(|order| RowMetaPB::from(order.as_ref().clone())) .map(|order| RowMetaPB::from(order.as_ref().clone()))
.collect::<Vec<RowMetaPB>>(); .collect::<Vec<RowMetaPB>>();
let view_layout = self.database.read().await.get_database_view_layout(view_id);
trace!( let fields = self
"database: {}, num fields: {}, num row: {}", .database
database_id, .read()
fields.len(), .await
rows.len() .get_all_field_orders()
); .into_iter()
self.database_cancellation.write().await.take(); .map(FieldIdPB::from)
.collect::<Vec<_>>();
Ok(DatabasePB { Ok(DatabasePB {
id: database_id, id: self.database_id.clone(),
fields, fields,
rows, rows,
layout_type: view_layout.into(), layout_type: view_layout.into(),
is_linked, is_linked: self.database.read().await.is_inline_view(view_id),
}) })
} }
pub async fn async_open_database_view(&self, view_id: &str) -> FlowyResult<DatabasePB> {
info!("Open database: {}, view: {}", self.database_id, view_id);
let (tx, rx) = oneshot::channel();
self.opening_ret_txs.write().await.push(tx);
// Check if the database is currently being opened
if !*self.is_opening.load_full() {
self.is_opening.store(Arc::new(true));
let fut = async {
let view_layout = self.database.read().await.get_database_view_layout(view_id);
let new_token = CancellationToken::new();
if let Some(old_token) = self
.database_cancellation
.write()
.await
.replace(new_token.clone())
{
old_token.cancel();
}
let row_orders = self.database.read().await.get_row_orders_for_view(view_id);
let cloned_database = Arc::downgrade(&self.database);
let cloned_row_orders = row_orders.clone();
let opening_database_views = self.database_views.clone();
tokio::spawn(async move {
const CHUNK_SIZE: usize = 10;
let mut loaded_rows = vec![];
for chunk_row_orders in cloned_row_orders.chunks(CHUNK_SIZE) {
match cloned_database.upgrade() {
None => break,
Some(database) => {
for row_order in chunk_row_orders {
if let Some(database_row) =
database.read().await.init_database_row(&row_order.id).await
{
if let Some(row) = database_row.read().await.get_row() {
loaded_rows.push(Arc::new(row));
}
}
}
// stop init database rows
if new_token.is_cancelled() {
return;
}
if loaded_rows.len() % 100 == 0 {
for database_view in opening_database_views.editors().await {
let mut view_rows = loaded_rows.clone();
database_view.v_filter_rows_and_notify(&mut view_rows).await;
database_view.v_sort_rows_and_notify(&mut view_rows).await;
}
}
},
}
tokio::task::yield_now().await;
}
for database_view in opening_database_views.editors().await {
let mut view_rows = loaded_rows.clone();
database_view.v_filter_rows_and_notify(&mut view_rows).await;
database_view.v_sort_rows_and_notify(&mut view_rows).await;
}
});
// Collect database details in a single block holding the `read` lock
let (database_id, fields, is_linked) = {
let database = self.database.read().await;
(
database.get_database_id(),
database
.get_all_field_orders()
.into_iter()
.map(FieldIdPB::from)
.collect::<Vec<_>>(),
database.is_inline_view(view_id),
)
};
let rows = row_orders
.into_iter()
.map(RowMetaPB::from)
.collect::<Vec<RowMetaPB>>();
trace!(
"database: {}, num fields: {}, num rows: {}",
database_id,
fields.len(),
rows.len()
);
Ok::<_, FlowyError>(DatabasePB {
id: database_id,
fields,
rows,
layout_type: view_layout.into(),
is_linked,
})
};
let result = fut.await;
// Mark that the opening process is complete
self.is_opening.store(Arc::new(false));
// Clear cancellation token
self.database_cancellation.write().await.take();
// Collect all waiting tasks and send the result
let txs = std::mem::take(&mut *self.opening_ret_txs.write().await);
for tx in txs {
let _ = tx.send(result.clone());
}
}
// Wait for the result or timeout after 60 seconds
match tokio::time::timeout(Duration::from_secs(60), rx).await {
Ok(result) => result.map_err(internal_error)?,
Err(_) => Err(FlowyError::internal().with_context("Timeout while opening database view")),
}
}
pub async fn export_csv(&self, style: CSVFormat) -> FlowyResult<String> { pub async fn export_csv(&self, style: CSVFormat) -> FlowyResult<String> {
let database = self.database.clone(); let database = self.database.clone();
let database_guard = database.read().await; let database_guard = database.read().await;
@ -1562,11 +1669,11 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl {
} }
} }
async fn get_all_row_details(&self, view_id: &str) -> Vec<Arc<RowDetail>> { async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let row_orders = self.database.read().await.get_row_orders_for_view(&view_id); let row_orders = self.database.read().await.get_row_orders_for_view(&view_id);
trace!("{} has total row orders: {}", view_id, row_orders.len()); trace!("{} has total row orders: {}", view_id, row_orders.len());
let mut row_details_list = vec![]; let mut all_rows = vec![];
// Loading the rows in chunks of 10 rows in order to prevent blocking the main asynchronous runtime // Loading the rows in chunks of 10 rows in order to prevent blocking the main asynchronous runtime
const CHUNK_SIZE: usize = 10; const CHUNK_SIZE: usize = 10;
let cancellation = self let cancellation = self
@ -1579,25 +1686,18 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl {
let database_read_guard = self.database.read().await; let database_read_guard = self.database.read().await;
let chunk = chunk.to_vec(); let chunk = chunk.to_vec();
let rows = database_read_guard.get_rows_from_row_orders(&chunk).await; let rows = database_read_guard.get_rows_from_row_orders(&chunk).await;
for row in rows { if let Some(cancellation) = &cancellation {
if let Some(cancellation) = &cancellation { if cancellation.is_cancelled() {
if cancellation.is_cancelled() { info!("Get all database row is cancelled:{}", view_id);
info!("Get all database row is cancelled:{}", view_id); return vec![];
return vec![];
}
}
match database_read_guard.get_row_detail(&row.id).await {
None => warn!("Failed to get row detail for row: {}", row.id.as_str()),
Some(row_details) => {
row_details_list.push(row_details);
},
} }
} }
all_rows.extend(rows);
drop(database_read_guard); drop(database_read_guard);
tokio::task::yield_now().await; tokio::task::yield_now().await;
} }
trace!("total row details: {}", row_details_list.len()); trace!("total row details: {}", all_rows.len());
row_details_list.into_iter().map(Arc::new).collect() all_rows.into_iter().map(Arc::new).collect()
} }
async fn remove_row(&self, row_id: &RowId) -> Option<Row> { async fn remove_row(&self, row_id: &RowId) -> Option<Row> {

View File

@ -159,25 +159,12 @@ pub(crate) async fn observe_block_event(database_id: &str, database_editor: &Arc
BlockEvent::DidFetchRow(row_details) => { BlockEvent::DidFetchRow(row_details) => {
for row_detail in row_details { for row_detail in row_details {
trace!("Did fetch row: {:?}", row_detail.row.id); trace!("Did fetch row: {:?}", row_detail.row.id);
let row_id = row_detail.row.id.clone(); let row_id = row_detail.row.id.clone();
let pb = DidFetchRowPB::from(row_detail); let pb = DidFetchRowPB::from(row_detail);
send_notification(&row_id, DatabaseNotification::DidFetchRow) send_notification(&row_id, DatabaseNotification::DidFetchRow)
.payload(pb) .payload(pb)
.send(); .send();
} }
// let cloned_token = token.clone();
// tokio::spawn(async move {
// tokio::time::sleep(Duration::from_secs(2)).await;
// if cloned_token.is_cancelled() {
// }
// // if let Some(database_editor) = cloned_database_editor.upgrade() {
// // TODO(nathan): calculate inserted row with RowsVisibilityChangePB
// // for view_editor in database_editor.database_views.editors().await {
// // }
// // }
// });
}, },
} }
} }

View File

@ -1,6 +1,3 @@
use collab_database::entity::DatabaseView;
use collab_database::views::DatabaseLayout;
use crate::entities::{ use crate::entities::{
DatabaseLayoutPB, DatabaseLayoutSettingPB, DatabaseViewSettingPB, FieldSettingsPB, FilterPB, DatabaseLayoutPB, DatabaseLayoutSettingPB, DatabaseViewSettingPB, FieldSettingsPB, FilterPB,
GroupSettingPB, SortPB, GroupSettingPB, SortPB,
@ -9,6 +6,9 @@ use crate::services::field_settings::FieldSettings;
use crate::services::filter::Filter; use crate::services::filter::Filter;
use crate::services::group::GroupSetting; use crate::services::group::GroupSetting;
use crate::services::sort::Sort; use crate::services::sort::Sort;
use collab_database::entity::DatabaseView;
use collab_database::views::DatabaseLayout;
use tracing::error;
pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> DatabaseViewSettingPB { pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> DatabaseViewSettingPB {
let layout_type: DatabaseLayoutPB = view.layout.into(); let layout_type: DatabaseLayoutPB = view.layout.into();
@ -33,7 +33,10 @@ pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> Database
.into_iter() .into_iter()
.flat_map(|value| match Filter::try_from(value) { .flat_map(|value| match Filter::try_from(value) {
Ok(filter) => Some(FilterPB::from(&filter)), Ok(filter) => Some(FilterPB::from(&filter)),
Err(_) => None, Err(err) => {
error!("Error converting filter: {:?}", err);
None
},
}) })
.collect::<Vec<FilterPB>>(); .collect::<Vec<FilterPB>>();
@ -42,7 +45,10 @@ pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> Database
.into_iter() .into_iter()
.flat_map(|value| match GroupSetting::try_from(value) { .flat_map(|value| match GroupSetting::try_from(value) {
Ok(setting) => Some(GroupSettingPB::from(&setting)), Ok(setting) => Some(GroupSettingPB::from(&setting)),
Err(_) => None, Err(err) => {
error!("Error converting group setting: {:?}", err);
None
},
}) })
.collect::<Vec<GroupSettingPB>>(); .collect::<Vec<GroupSettingPB>>();
@ -51,7 +57,10 @@ pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> Database
.into_iter() .into_iter()
.flat_map(|value| match Sort::try_from(value) { .flat_map(|value| match Sort::try_from(value) {
Ok(sort) => Some(SortPB::from(&sort)), Ok(sort) => Some(SortPB::from(&sort)),
Err(_) => None, Err(err) => {
error!("Error converting sort: {:?}", err);
None
},
}) })
.collect::<Vec<SortPB>>(); .collect::<Vec<SortPB>>();

View File

@ -180,14 +180,14 @@ impl DatabaseViewEditor {
.send(); .send();
} }
pub async fn v_did_create_row(&self, row_detail: &RowDetail, index: usize) { pub async fn v_did_create_row(&self, row: &Row, index: usize) {
// Send the group notification if the current view has groups // Send the group notification if the current view has groups
if let Some(controller) = self.group_controller.write().await.as_mut() { if let Some(controller) = self.group_controller.write().await.as_mut() {
let mut row_details = vec![Arc::new(row_detail.clone())]; let mut rows = vec![Arc::new(row.clone())];
self.v_filter_rows(&mut row_details).await; self.v_filter_rows(&mut rows).await;
if let Some(row_detail) = row_details.pop() { if let Some(row) = rows.pop() {
let changesets = controller.did_create_row(&row_detail, index); let changesets = controller.did_create_row(&row, index);
for changeset in changesets { for changeset in changesets {
notify_did_update_group_rows(changeset).await; notify_did_update_group_rows(changeset).await;
@ -195,9 +195,7 @@ impl DatabaseViewEditor {
} }
} }
self self.gen_did_create_row_view_tasks(index, row.clone()).await;
.gen_did_create_row_view_tasks(index, row_detail.clone())
.await;
} }
#[tracing::instrument(level = "trace", skip_all)] #[tracing::instrument(level = "trace", skip_all)]
@ -244,12 +242,7 @@ impl DatabaseViewEditor {
/// Notify the view that the row has been updated. If the view has groups, /// Notify the view that the row has been updated. If the view has groups,
/// send the group notification with [GroupRowsNotificationPB]. Otherwise, /// send the group notification with [GroupRowsNotificationPB]. Otherwise,
/// send the view notification with [RowsChangePB] /// send the view notification with [RowsChangePB]
pub async fn v_did_update_row( pub async fn v_did_update_row(&self, old_row: &Option<Row>, row: &Row, field_id: Option<String>) {
&self,
old_row: &Option<RowDetail>,
row_detail: &RowDetail,
field_id: Option<String>,
) {
if let Some(controller) = self.group_controller.write().await.as_mut() { if let Some(controller) = self.group_controller.write().await.as_mut() {
let field = self let field = self
.delegate .delegate
@ -257,11 +250,11 @@ impl DatabaseViewEditor {
.await; .await;
if let Some(field) = field { if let Some(field) = field {
let mut row_details = vec![Arc::new(row_detail.clone())]; let mut rows = vec![Arc::new(row.clone())];
self.v_filter_rows(&mut row_details).await; self.v_filter_rows(&mut rows).await;
if let Some(row_detail) = row_details.pop() { if let Some(row) = rows.pop() {
let result = controller.did_update_group_row(old_row, &row_detail, &field); let result = controller.did_update_group_row(old_row, &row, &field);
if let Ok(result) = result { if let Ok(result) = result {
let mut group_changes = GroupChangesPB { let mut group_changes = GroupChangesPB {
@ -295,26 +288,34 @@ impl DatabaseViewEditor {
// Each row update will trigger a calculations, filter and sort operation. We don't want // Each row update will trigger a calculations, filter and sort operation. We don't want
// to block the main thread, so we spawn a new task to do the work. // to block the main thread, so we spawn a new task to do the work.
self self
.gen_did_update_row_view_tasks(row_detail.row.id.clone(), field_id) .gen_did_update_row_view_tasks(row.id.clone(), field_id)
.await; .await;
} }
pub async fn v_filter_rows(&self, row_details: &mut Vec<Arc<RowDetail>>) { pub async fn v_filter_rows(&self, rows: &mut Vec<Arc<Row>>) {
self.filter_controller.filter_rows(row_details).await self.filter_controller.filter_rows(rows).await
} }
pub async fn v_sort_rows(&self, row_details: &mut Vec<Arc<RowDetail>>) { pub async fn v_filter_rows_and_notify(&self, rows: &mut Vec<Arc<Row>>) {
let _ = self.filter_controller.filter_rows_and_notify(rows).await;
}
pub async fn v_sort_rows(&self, rows: &mut Vec<Arc<Row>>) {
self.sort_controller.write().await.sort_rows(rows).await
}
pub async fn v_sort_rows_and_notify(&self, rows: &mut Vec<Arc<Row>>) {
self self
.sort_controller .sort_controller
.write() .write()
.await .await
.sort_rows(row_details) .sort_rows_and_notify(rows)
.await .await;
} }
#[instrument(level = "info", skip(self))] #[instrument(level = "info", skip(self))]
pub async fn v_get_all_row_details(&self) -> Vec<Arc<RowDetail>> { pub async fn v_get_all_rows(&self) -> Vec<Arc<Row>> {
let mut rows = self.delegate.get_all_row_details(&self.view_id).await; let mut rows = self.delegate.get_all_rows(&self.view_id).await;
self.v_filter_rows(&mut rows).await; self.v_filter_rows(&mut rows).await;
self.v_sort_rows(&mut rows).await; self.v_sort_rows(&mut rows).await;
rows rows
@ -322,7 +323,7 @@ impl DatabaseViewEditor {
pub async fn v_move_group_row( pub async fn v_move_group_row(
&self, &self,
row_detail: &RowDetail, row: &Row,
row_changeset: &mut RowChangeset, row_changeset: &mut RowChangeset,
to_group_id: &str, to_group_id: &str,
to_row_id: Option<RowId>, to_row_id: Option<RowId>,
@ -330,7 +331,7 @@ impl DatabaseViewEditor {
let result = self let result = self
.mut_group_controller(|group_controller, field| { .mut_group_controller(|group_controller, field| {
let move_row_context = MoveGroupRowContext { let move_row_context = MoveGroupRowContext {
row_detail, row,
row_changeset, row_changeset,
field: &field, field: &field,
to_group_id, to_group_id,
@ -1126,7 +1127,7 @@ impl DatabaseViewEditor {
}); });
} }
async fn gen_did_create_row_view_tasks(&self, preliminary_index: usize, row_detail: RowDetail) { async fn gen_did_create_row_view_tasks(&self, preliminary_index: usize, row: Row) {
let weak_sort_controller = Arc::downgrade(&self.sort_controller); let weak_sort_controller = Arc::downgrade(&self.sort_controller);
let weak_calculations_controller = Arc::downgrade(&self.calculations_controller); let weak_calculations_controller = Arc::downgrade(&self.calculations_controller);
af_spawn(async move { af_spawn(async move {
@ -1134,13 +1135,13 @@ impl DatabaseViewEditor {
sort_controller sort_controller
.read() .read()
.await .await
.did_create_row(preliminary_index, &row_detail) .did_create_row(preliminary_index, &row)
.await; .await;
} }
if let Some(calculations_controller) = weak_calculations_controller.upgrade() { if let Some(calculations_controller) = weak_calculations_controller.upgrade() {
calculations_controller calculations_controller
.did_receive_row_changed(row_detail.row.clone()) .did_receive_row_changed(row.clone())
.await; .await;
} }
}); });

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{RowDetail, RowId}; use collab_database::rows::{Row, RowDetail, RowId};
use crate::services::cell::CellCache; use crate::services::cell::CellCache;
use crate::services::database_view::{ use crate::services::database_view::{
@ -52,8 +52,8 @@ impl FilterDelegate for DatabaseViewFilterDelegateImpl {
self.0.get_fields(view_id, field_ids).await self.0.get_fields(view_id, field_ids).await
} }
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> { async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
self.0.get_all_row_details(view_id).await self.0.get_all_rows(view_id).await
} }
async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)> { async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)> {

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{RowDetail, RowId}; use collab_database::rows::{Row, RowId};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -96,10 +96,10 @@ impl GroupControllerDelegate for GroupControllerDelegateImpl {
self.delegate.get_field(field_id).await self.delegate.get_field(field_id).await
} }
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> { async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
let mut row_details = self.delegate.get_all_row_details(view_id).await; let mut rows = self.delegate.get_all_rows(view_id).await;
self.filter_controller.filter_rows(&mut row_details).await; self.filter_controller.filter_rows(&mut rows).await;
row_details rows
} }
} }

View File

@ -56,7 +56,7 @@ pub trait DatabaseViewOperation: Send + Sync + 'static {
async fn get_row_detail(&self, view_id: &str, row_id: &RowId) -> Option<(usize, Arc<RowDetail>)>; async fn get_row_detail(&self, view_id: &str, row_id: &RowId) -> Option<(usize, Arc<RowDetail>)>;
/// Returns all the rows in the view /// Returns all the rows in the view
async fn get_all_row_details(&self, view_id: &str) -> Vec<Arc<RowDetail>>; async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
async fn remove_row(&self, row_id: &RowId) -> Option<Row>; async fn remove_row(&self, row_id: &RowId) -> Option<Row>;

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::RowDetail; use collab_database::rows::Row;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use crate::services::cell::CellCache; use crate::services::cell::CellCache;
@ -59,18 +59,17 @@ impl SortDelegate for DatabaseViewSortDelegateImpl {
self.delegate.get_sort(view_id, sort_id).await.map(Arc::new) self.delegate.get_sort(view_id, sort_id).await.map(Arc::new)
} }
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> { async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
let view_id = view_id.to_string(); let view_id = view_id.to_string();
let mut row_details = self.delegate.get_all_row_details(&view_id).await; let mut rows = self.delegate.get_all_rows(&view_id).await;
self.filter_controller.filter_rows(&mut row_details).await; self.filter_controller.filter_rows(&mut rows).await;
row_details rows
} }
async fn filter_row(&self, row_detail: &RowDetail) -> bool { async fn filter_row(&self, row: &Row) -> bool {
let row_detail = row_detail.clone(); let mut rows = vec![Arc::new(row.clone())];
let mut row_details = vec![Arc::new(row_detail)]; self.filter_controller.filter_rows(&mut rows).await;
self.filter_controller.filter_rows(&mut row_details).await; !rows.is_empty()
!row_details.is_empty()
} }
async fn get_field(&self, field_id: &str) -> Option<Field> { async fn get_field(&self, field_id: &str) -> Option<Field> {

View File

@ -5,18 +5,19 @@ use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, NaiveTime, Offset, Tim
use chrono_tz::Tz; use chrono_tz::Tz;
use collab::preclude::Any; use collab::preclude::Any;
use collab::util::AnyMapExt; use collab::util::AnyMapExt;
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder}; use collab_database::fields::{Field, TypeOptionData, TypeOptionDataBuilder};
use collab_database::rows::Cell; use collab_database::rows::Cell;
use collab_database::template::date_parse::cast_string_to_timestamp;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use flowy_error::{ErrorCode, FlowyError, FlowyResult}; use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use crate::entities::{DateCellDataPB, DateFilterPB}; use crate::entities::{DateCellDataPB, DateFilterPB, FieldType};
use crate::services::cell::{CellDataChangeset, CellDataDecoder}; use crate::services::cell::{CellDataChangeset, CellDataDecoder};
use crate::services::field::{ use crate::services::field::{
default_order, DateCellChangeset, DateCellData, DateFormat, TimeFormat, TypeOption, default_order, DateCellChangeset, DateCellData, DateFormat, TimeFormat, TypeOption,
TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionCellDataSerde, TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionCellDataSerde,
TypeOptionTransform, TypeOptionTransform, CELL_DATA,
}; };
use crate::services::sort::SortCondition; use crate::services::sort::SortCondition;
@ -232,6 +233,17 @@ impl CellDataDecoder for DateTypeOption {
} }
} }
fn decode_cell_with_transform(
&self,
cell: &Cell,
_from_field_type: FieldType,
_field: &Field,
) -> Option<<Self as TypeOption>::CellData> {
let s = cell.get_as::<String>(CELL_DATA)?;
let timestamp = cast_string_to_timestamp(&s)?;
Some(DateCellData::from_timestamp(timestamp))
}
fn numeric_cell(&self, _cell: &Cell) -> Option<f64> { fn numeric_cell(&self, _cell: &Cell) -> Option<f64> {
None None
} }

View File

@ -47,6 +47,16 @@ impl DateCellData {
reminder_id, reminder_id,
} }
} }
pub fn from_timestamp(timestamp: i64) -> Self {
Self {
timestamp: Some(timestamp),
end_timestamp: None,
include_time: false,
is_range: false,
reminder_id: String::new(),
}
}
} }
impl TypeOptionCellData for DateCellData { impl TypeOptionCellData for DateCellData {

View File

@ -132,7 +132,7 @@ impl NumberTypeOption {
match self.format { match self.format {
NumberFormat::Num => { NumberFormat::Num => {
if SCIENTIFIC_NOTATION_REGEX if SCIENTIFIC_NOTATION_REGEX
.is_match(&num_cell_data.as_ref()) .is_match(num_cell_data.as_ref())
.unwrap() .unwrap()
{ {
match Decimal::from_scientific(&num_cell_data.as_ref().to_lowercase()) { match Decimal::from_scientific(&num_cell_data.as_ref().to_lowercase()) {
@ -142,7 +142,7 @@ impl NumberTypeOption {
} else { } else {
// Test the input string is start with dot and only contains number. // Test the input string is start with dot and only contains number.
// If it is, add a 0 before the dot. For example, ".123" -> "0.123" // If it is, add a 0 before the dot. For example, ".123" -> "0.123"
let num_str = match START_WITH_DOT_NUM_REGEX.captures(&num_cell_data.as_ref()) { let num_str = match START_WITH_DOT_NUM_REGEX.captures(num_cell_data.as_ref()) {
Ok(Some(captures)) => match captures.get(0).map(|m| m.as_str().to_string()) { Ok(Some(captures)) => match captures.get(0).map(|m| m.as_str().to_string()) {
Some(s) => { Some(s) => {
format!("0{}", s) format!("0{}", s)
@ -152,7 +152,7 @@ impl NumberTypeOption {
// Extract the number from the string. // Extract the number from the string.
// For example, "123abc" -> "123". check out the number_type_option_input_test test for // For example, "123abc" -> "123". check out the number_type_option_input_test test for
// more examples. // more examples.
_ => match EXTRACT_NUM_REGEX.captures(&num_cell_data.as_ref()) { _ => match EXTRACT_NUM_REGEX.captures(num_cell_data.as_ref()) {
Ok(Some(captures)) => captures Ok(Some(captures)) => captures
.get(0) .get(0)
.map(|m| m.as_str().to_string()) .map(|m| m.as_str().to_string())
@ -169,7 +169,7 @@ impl NumberTypeOption {
}, },
_ => { _ => {
// If the format is not number, use the format string to format the number. // If the format is not number, use the format string to format the number.
NumberCellFormat::from_format_str(&num_cell_data.as_ref(), &self.format) NumberCellFormat::from_format_str(num_cell_data.as_ref(), &self.format)
}, },
} }
} }
@ -186,12 +186,12 @@ impl CellDataDecoder for NumberTypeOption {
fn decode_cell(&self, cell: &Cell) -> FlowyResult<<Self as TypeOption>::CellData> { fn decode_cell(&self, cell: &Cell) -> FlowyResult<<Self as TypeOption>::CellData> {
let num_cell_data = self.parse_cell(cell)?; let num_cell_data = self.parse_cell(cell)?;
Ok(NumberCellData::from( Ok(NumberCellData::from(
self.format_cell_data(&num_cell_data)?.to_string(), self.format_cell_data(num_cell_data)?.to_string(),
)) ))
} }
fn stringify_cell_data(&self, cell_data: <Self as TypeOption>::CellData) -> String { fn stringify_cell_data(&self, cell_data: <Self as TypeOption>::CellData) -> String {
match self.format_cell_data(&cell_data) { match self.format_cell_data(cell_data) {
Ok(cell_data) => cell_data.to_string(), Ok(cell_data) => cell_data.to_string(),
Err(_) => "".to_string(), Err(_) => "".to_string(),
} }
@ -205,7 +205,7 @@ impl CellDataDecoder for NumberTypeOption {
) -> Option<<Self as TypeOption>::CellData> { ) -> Option<<Self as TypeOption>::CellData> {
let num_cell = Self::CellData::from(cell); let num_cell = Self::CellData::from(cell);
Some(Self::CellData::from( Some(Self::CellData::from(
self.format_cell_data(&num_cell).ok()?.to_string(), self.format_cell_data(num_cell).ok()?.to_string(),
)) ))
} }

View File

@ -194,12 +194,12 @@ where
if let Some(cell_data_cache) = self.cell_data_cache.as_ref() { if let Some(cell_data_cache) = self.cell_data_cache.as_ref() {
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
let key = CellDataCacheKey::new(field, field_type, cell); let key = CellDataCacheKey::new(field, field_type, cell);
tracing::trace!( // tracing::trace!(
"Cell cache update: field_type:{}, cell: {:?}, cell_data: {:?}", // "Cell cache update: field_type:{}, cell: {:?}, cell_data: {:?}",
field_type, // field_type,
cell, // cell,
cell_data // cell_data
); // );
cell_data_cache.insert(key.as_ref(), cell_data); cell_data_cache.insert(key.as_ref(), cell_data);
} }
} }
@ -523,6 +523,7 @@ pub fn is_type_option_cell_transformable(
| (FieldType::RichText, FieldType::MultiSelect) | (FieldType::RichText, FieldType::MultiSelect)
| (FieldType::RichText, FieldType::URL) | (FieldType::RichText, FieldType::URL)
| (FieldType::RichText, FieldType::Number) | (FieldType::RichText, FieldType::Number)
| (FieldType::RichText, FieldType::DateTime)
| (_, FieldType::RichText) | (_, FieldType::RichText)
) )
} }

View File

@ -24,7 +24,7 @@ use crate::services::filter::{Filter, FilterChangeset, FilterInner, FilterResult
pub trait FilterDelegate: Send + Sync + 'static { pub trait FilterDelegate: Send + Sync + 'static {
async fn get_field(&self, field_id: &str) -> Option<Field>; async fn get_field(&self, field_id: &str) -> Option<Field>;
async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>; async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>;
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>; async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)>; async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)>;
async fn get_all_filters(&self, view_id: &str) -> Vec<Filter>; async fn get_all_filters(&self, view_id: &str) -> Vec<Filter>;
async fn save_filters(&self, view_id: &str, filters: &[Filter]); async fn save_filters(&self, view_id: &str, filters: &[Filter]);
@ -129,32 +129,6 @@ impl FilterController {
self.task_scheduler.write().await.add_task(task); self.task_scheduler.write().await.add_task(task);
} }
pub async fn filter_rows(&self, rows: &mut Vec<Arc<RowDetail>>) {
let filters = self.filters.read().await;
if filters.is_empty() {
return;
}
let field_by_field_id = self.get_field_map().await;
rows.iter().for_each(|row_detail| {
let _ = filter_row(
&row_detail.row,
&self.result_by_row_id,
&field_by_field_id,
&self.cell_cache,
&filters,
);
});
rows.retain(|row_detail| {
self
.result_by_row_id
.get(&row_detail.row.id)
.map(|result| *result)
.unwrap_or(false)
});
}
pub async fn did_receive_row_changed(&self, row_id: RowId) { pub async fn did_receive_row_changed(&self, row_id: RowId) {
if !self.filters.read().await.is_empty() { if !self.filters.read().await.is_empty() {
self self
@ -338,7 +312,10 @@ impl FilterController {
pub async fn process(&self, predicate: &str) -> FlowyResult<()> { pub async fn process(&self, predicate: &str) -> FlowyResult<()> {
let event_type = FilterEvent::from_str(predicate).unwrap(); let event_type = FilterEvent::from_str(predicate).unwrap();
match event_type { match event_type {
FilterEvent::FilterDidChanged => self.filter_all_rows_handler().await?, FilterEvent::FilterDidChanged => {
let mut rows = self.delegate.get_rows(&self.view_id).await;
self.filter_rows_and_notify(&mut rows).await?
},
FilterEvent::RowDidChanged(row_id) => self.filter_single_row_handler(row_id).await?, FilterEvent::RowDidChanged(row_id) => self.filter_single_row_handler(row_id).await?,
} }
Ok(()) Ok(())
@ -376,42 +353,35 @@ impl FilterController {
Ok(()) Ok(())
} }
async fn filter_all_rows_handler(&self) -> FlowyResult<()> { pub async fn filter_rows_and_notify(&self, rows: &mut Vec<Arc<Row>>) -> FlowyResult<()> {
let filters = self.filters.read().await; let filters = self.filters.read().await;
let field_by_field_id = self.get_field_map().await; let field_by_field_id = self.get_field_map().await;
let mut visible_rows = vec![]; let mut visible_rows = vec![];
let mut invisible_rows = vec![]; let mut invisible_rows = vec![];
for (index, row) in rows.iter_mut().enumerate() {
for (index, row_detail) in self
.delegate
.get_rows(&self.view_id)
.await
.into_iter()
.enumerate()
{
if let Some(is_visible) = filter_row( if let Some(is_visible) = filter_row(
&row_detail.row, row,
&self.result_by_row_id, &self.result_by_row_id,
&field_by_field_id, &field_by_field_id,
&self.cell_cache, &self.cell_cache,
&filters, &filters,
) { ) {
if is_visible { if is_visible {
let row_meta = RowMetaPB::from(row_detail.as_ref().clone()); let row_meta = RowMetaPB::from(row.as_ref().clone());
visible_rows.push(InsertedRowPB::new(row_meta).with_index(index as i32)) visible_rows.push(InsertedRowPB::new(row_meta).with_index(index as i32))
} else { } else {
invisible_rows.push(row_detail.row.id.clone()); invisible_rows.push(row.id.clone());
} }
} }
} }
rows.retain(|row| !invisible_rows.iter().any(|id| id == &row.id));
let notification = FilterResultNotification { let notification = FilterResultNotification {
view_id: self.view_id.clone(), view_id: self.view_id.clone(),
invisible_rows, invisible_rows,
visible_rows, visible_rows,
}; };
tracing::trace!("filter result {:?}", filters); tracing::trace!("filter result {:?}", notification);
let _ = self let _ = self
.notifier .notifier
.send(DatabaseViewChanged::FilterNotification(notification)); .send(DatabaseViewChanged::FilterNotification(notification));
@ -419,6 +389,32 @@ impl FilterController {
Ok(()) Ok(())
} }
pub async fn filter_rows(&self, rows: &mut Vec<Arc<Row>>) {
let filters = self.filters.read().await;
if filters.is_empty() {
return;
}
let field_by_field_id = self.get_field_map().await;
rows.iter().for_each(|row| {
let _ = filter_row(
row,
&self.result_by_row_id,
&field_by_field_id,
&self.cell_cache,
&filters,
);
});
rows.retain(|row| {
self
.result_by_row_id
.get(&row.id)
.map(|result| *result)
.unwrap_or(false)
});
}
async fn get_field_map(&self) -> HashMap<String, Field> { async fn get_field_map(&self) -> HashMap<String, Field> {
self self
.delegate .delegate

View File

@ -10,6 +10,7 @@ use collab_database::rows::RowId;
use collab_database::views::{FilterMap, FilterMapBuilder}; use collab_database::views::{FilterMap, FilterMapBuilder};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use lib_infra::box_any::BoxAny; use lib_infra::box_any::BoxAny;
use tracing::error;
use crate::entities::{ use crate::entities::{
CheckboxFilterPB, ChecklistFilterPB, DateFilterContent, DateFilterPB, FieldType, FilterType, CheckboxFilterPB, ChecklistFilterPB, DateFilterContent, DateFilterPB, FieldType, FilterType,
@ -454,8 +455,13 @@ fn get_children(filter_map: FilterMap) -> Vec<Filter> {
if let Some(Any::Array(children)) = filter_map.get(FILTER_CHILDREN) { if let Some(Any::Array(children)) = filter_map.get(FILTER_CHILDREN) {
for child in children.iter() { for child in children.iter() {
if let Any::Map(child_map) = child { if let Any::Map(child_map) = child {
if let Ok(filter) = Filter::try_from(child_map.deref().clone()) { match Filter::try_from(child_map.deref().clone()) {
result.push(filter); Ok(filter) => {
result.push(filter);
},
Err(err) => {
error!("Failed to deserialize filter: {:?}", err);
},
} }
} }
} }

View File

@ -1,6 +1,6 @@
use async_trait::async_trait; use async_trait::async_trait;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{Cell, Cells, Row, RowDetail, RowId}; use collab_database::rows::{Cell, Cells, Row, RowId};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -33,7 +33,7 @@ pub trait GroupCustomize: Send + Sync {
fn create_or_delete_group_when_cell_changed( fn create_or_delete_group_when_cell_changed(
&mut self, &mut self,
_row_detail: &RowDetail, _row: &Row,
_old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>, _old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>,
_cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, _cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> { ) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> {
@ -45,7 +45,7 @@ pub trait GroupCustomize: Send + Sync {
/// ///
fn add_or_remove_row_when_cell_changed( fn add_or_remove_row_when_cell_changed(
&mut self, &mut self,
row_detail: &RowDetail, row: &Row,
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> Vec<GroupRowsNotificationPB>; ) -> Vec<GroupRowsNotificationPB>;
@ -113,7 +113,7 @@ pub trait GroupController: Send + Sync {
/// ///
/// * `rows`: rows to be inserted /// * `rows`: rows to be inserted
/// * `field`: reference to the field being sorted (currently unused) /// * `field`: reference to the field being sorted (currently unused)
fn fill_groups(&mut self, rows: &[&RowDetail], field: &Field) -> FlowyResult<()>; fn fill_groups(&mut self, rows: &[&Row], field: &Field) -> FlowyResult<()>;
/// Create a new group, currently only supports single and multi-select. /// Create a new group, currently only supports single and multi-select.
/// ///
@ -137,11 +137,7 @@ pub trait GroupController: Send + Sync {
/// Returns a changeset payload to be sent as a notification. /// Returns a changeset payload to be sent as a notification.
/// ///
/// * `row_detail`: the newly-created row /// * `row_detail`: the newly-created row
fn did_create_row( fn did_create_row(&mut self, row: &Row, index: usize) -> Vec<GroupRowsNotificationPB>;
&mut self,
row_detail: &RowDetail,
index: usize,
) -> Vec<GroupRowsNotificationPB>;
/// Called after a row's cell data is changed, this moves the row to the /// Called after a row's cell data is changed, this moves the row to the
/// correct group. It may also insert a new group and/or remove an old group. /// correct group. It may also insert a new group and/or remove an old group.
@ -153,8 +149,8 @@ pub trait GroupController: Send + Sync {
/// * `field`: /// * `field`:
fn did_update_group_row( fn did_update_group_row(
&mut self, &mut self,
old_row_detail: &Option<RowDetail>, old_row: &Option<Row>,
row_detail: &RowDetail, new_row: &Row,
field: &Field, field: &Field,
) -> FlowyResult<DidUpdateGroupRowResult>; ) -> FlowyResult<DidUpdateGroupRowResult>;

View File

@ -3,7 +3,7 @@ use std::marker::PhantomData;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{Cells, Row, RowDetail, RowId}; use collab_database::rows::{Cells, Row, RowId};
use futures::executor::block_on; use futures::executor::block_on;
use serde::de::DeserializeOwned; use serde::de::DeserializeOwned;
use serde::Serialize; use serde::Serialize;
@ -27,7 +27,7 @@ use crate::services::group::{GroupChangeset, GroupsBuilder, MoveGroupRowContext}
pub trait GroupControllerDelegate: Send + Sync + 'static { pub trait GroupControllerDelegate: Send + Sync + 'static {
async fn get_field(&self, field_id: &str) -> Option<Field>; async fn get_field(&self, field_id: &str) -> Option<Field>;
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>; async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
} }
/// [BaseGroupController] is a generic group controller that provides customized implementations /// [BaseGroupController] is a generic group controller that provides customized implementations
@ -86,7 +86,7 @@ where
fn update_no_status_group( fn update_no_status_group(
&mut self, &mut self,
row_detail: &RowDetail, row: &Row,
other_group_changesets: &[GroupRowsNotificationPB], other_group_changesets: &[GroupRowsNotificationPB],
) -> Option<GroupRowsNotificationPB> { ) -> Option<GroupRowsNotificationPB> {
let no_status_group = self.context.get_mut_no_status_group()?; let no_status_group = self.context.get_mut_no_status_group()?;
@ -115,8 +115,8 @@ where
if !no_status_group_rows.is_empty() { if !no_status_group_rows.is_empty() {
changeset changeset
.inserted_rows .inserted_rows
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone()))); .push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
no_status_group.add_row(row_detail.clone()); no_status_group.add_row(row.clone());
} }
// [other_group_delete_rows] contains all the deleted rows except the default group. // [other_group_delete_rows] contains all the deleted rows except the default group.
@ -139,8 +139,8 @@ where
.collect::<Vec<&InsertedRowPB>>(); .collect::<Vec<&InsertedRowPB>>();
let mut deleted_row_ids = vec![]; let mut deleted_row_ids = vec![];
for row_detail in &no_status_group.rows { for row in &no_status_group.rows {
let row_id = row_detail.row.id.to_string(); let row_id = row.id.to_string();
if default_group_deleted_rows if default_group_deleted_rows
.iter() .iter()
.any(|deleted_row| deleted_row.row_meta.id == row_id) .any(|deleted_row| deleted_row.row_meta.id == row_id)
@ -150,7 +150,7 @@ where
} }
no_status_group no_status_group
.rows .rows
.retain(|row_detail| !deleted_row_ids.contains(&row_detail.row.id)); .retain(|row| !deleted_row_ids.contains(&row.id));
changeset.deleted_rows.extend(deleted_row_ids); changeset.deleted_rows.extend(deleted_row_ids);
Some(changeset) Some(changeset)
} }
@ -179,9 +179,9 @@ where
} }
#[tracing::instrument(level = "trace", skip_all, fields(row_count=%rows.len(), group_result))] #[tracing::instrument(level = "trace", skip_all, fields(row_count=%rows.len(), group_result))]
fn fill_groups(&mut self, rows: &[&RowDetail], _field: &Field) -> FlowyResult<()> { fn fill_groups(&mut self, rows: &[&Row], _field: &Field) -> FlowyResult<()> {
for row_detail in rows { for row in rows {
let cell = match row_detail.row.cells.get(&self.grouping_field_id) { let cell = match row.cells.get(&self.grouping_field_id) {
None => self.placeholder_cell(), None => self.placeholder_cell(),
Some(cell) => Some(cell.clone()), Some(cell) => Some(cell.clone()),
}; };
@ -192,7 +192,7 @@ where
for group in self.context.groups() { for group in self.context.groups() {
if self.can_group(&group.id, &cell_data) { if self.can_group(&group.id, &cell_data) {
grouped_rows.push(GroupedRow { grouped_rows.push(GroupedRow {
row_detail: (*row_detail).clone(), row: (*row).clone(),
group_id: group.id.clone(), group_id: group.id.clone(),
}); });
} }
@ -201,7 +201,7 @@ where
if !grouped_rows.is_empty() { if !grouped_rows.is_empty() {
for group_row in grouped_rows { for group_row in grouped_rows {
if let Some(group) = self.context.get_mut_group(&group_row.group_id) { if let Some(group) = self.context.get_mut_group(&group_row.group_id) {
group.add_row(group_row.row_detail); group.add_row(group_row.row);
} }
} }
continue; continue;
@ -210,7 +210,7 @@ where
match self.context.get_mut_no_status_group() { match self.context.get_mut_no_status_group() {
None => {}, None => {},
Some(no_status_group) => no_status_group.add_row((*row_detail).clone()), Some(no_status_group) => no_status_group.add_row((*row).clone()),
} }
} }
@ -229,14 +229,10 @@ where
self.context.move_group(from_group_id, to_group_id) self.context.move_group(from_group_id, to_group_id)
} }
fn did_create_row( fn did_create_row(&mut self, row: &Row, index: usize) -> Vec<GroupRowsNotificationPB> {
&mut self,
row_detail: &RowDetail,
index: usize,
) -> Vec<GroupRowsNotificationPB> {
let mut changesets: Vec<GroupRowsNotificationPB> = vec![]; let mut changesets: Vec<GroupRowsNotificationPB> = vec![];
let cell = match row_detail.row.cells.get(&self.grouping_field_id) { let cell = match row.cells.get(&self.grouping_field_id) {
None => self.placeholder_cell(), None => self.placeholder_cell(),
Some(cell) => Some(cell.clone()), Some(cell) => Some(cell.clone()),
}; };
@ -252,7 +248,7 @@ where
let changeset = GroupRowsNotificationPB::insert( let changeset = GroupRowsNotificationPB::insert(
group.id.clone(), group.id.clone(),
vec![InsertedRowPB { vec![InsertedRowPB {
row_meta: (*row_detail).clone().into(), row_meta: (*row).clone().into(),
index: Some(index as i32), index: Some(index as i32),
is_new: true, is_new: true,
}], }],
@ -263,15 +259,15 @@ where
if !suitable_group_ids.is_empty() { if !suitable_group_ids.is_empty() {
for group_id in suitable_group_ids.iter() { for group_id in suitable_group_ids.iter() {
if let Some(group) = self.context.get_mut_group(group_id) { if let Some(group) = self.context.get_mut_group(group_id) {
group.add_row((*row_detail).clone()); group.add_row((*row).clone());
} }
} }
} else if let Some(no_status_group) = self.context.get_mut_no_status_group() { } else if let Some(no_status_group) = self.context.get_mut_no_status_group() {
no_status_group.add_row((*row_detail).clone()); no_status_group.add_row((*row).clone());
let changeset = GroupRowsNotificationPB::insert( let changeset = GroupRowsNotificationPB::insert(
no_status_group.id.clone(), no_status_group.id.clone(),
vec![InsertedRowPB { vec![InsertedRowPB {
row_meta: (*row_detail).clone().into(), row_meta: (*row).clone().into(),
index: Some(index as i32), index: Some(index as i32),
is_new: true, is_new: true,
}], }],
@ -285,8 +281,8 @@ where
fn did_update_group_row( fn did_update_group_row(
&mut self, &mut self,
old_row_detail: &Option<RowDetail>, old_row: &Option<Row>,
row_detail: &RowDetail, new_row: &Row,
field: &Field, field: &Field,
) -> FlowyResult<DidUpdateGroupRowResult> { ) -> FlowyResult<DidUpdateGroupRowResult> {
let mut result = DidUpdateGroupRowResult { let mut result = DidUpdateGroupRowResult {
@ -294,20 +290,17 @@ where
deleted_group: None, deleted_group: None,
row_changesets: vec![], row_changesets: vec![],
}; };
if let Some(cell_data) = get_cell_data_from_row::<P>(Some(&row_detail.row), field) { if let Some(cell_data) = get_cell_data_from_row::<P>(Some(new_row), field) {
let old_cell_data = let old_cell_data = get_cell_data_from_row::<P>(old_row.as_ref(), field);
get_cell_data_from_row::<P>(old_row_detail.as_ref().map(|detail| &detail.row), field); if let Ok((insert, delete)) =
if let Ok((insert, delete)) = self.create_or_delete_group_when_cell_changed( self.create_or_delete_group_when_cell_changed(new_row, old_cell_data.as_ref(), &cell_data)
row_detail, {
old_cell_data.as_ref(),
&cell_data,
) {
result.inserted_group = insert; result.inserted_group = insert;
result.deleted_group = delete; result.deleted_group = delete;
} }
let mut changesets = self.add_or_remove_row_when_cell_changed(row_detail, &cell_data); let mut changesets = self.add_or_remove_row_when_cell_changed(new_row, &cell_data);
if let Some(changeset) = self.update_no_status_group(row_detail, &changesets) { if let Some(changeset) = self.update_no_status_group(new_row, &changesets) {
if !changeset.is_empty() { if !changeset.is_empty() {
changesets.push(changeset); changesets.push(changeset);
} }
@ -356,7 +349,7 @@ where
deleted_group: None, deleted_group: None,
row_changesets: vec![], row_changesets: vec![],
}; };
let cell = match context.row_detail.row.cells.get(&self.grouping_field_id) { let cell = match context.row.cells.get(&self.grouping_field_id) {
Some(cell) => Some(cell.clone()), Some(cell) => Some(cell.clone()),
None => self.placeholder_cell(), None => self.placeholder_cell(),
}; };
@ -364,7 +357,7 @@ where
if let Some(cell) = cell { if let Some(cell) = cell {
let cell_bytes = get_cell_protobuf(&cell, context.field, None); let cell_bytes = get_cell_protobuf(&cell, context.field, None);
let cell_data = cell_bytes.parser::<P>()?; let cell_data = cell_bytes.parser::<P>()?;
result.deleted_group = self.delete_group_when_move_row(&context.row_detail.row, &cell_data); result.deleted_group = self.delete_group_when_move_row(context.row, &cell_data);
result.row_changesets = self.move_row(context); result.row_changesets = self.move_row(context);
} else { } else {
tracing::warn!("Unexpected moving group row, changes should not be empty"); tracing::warn!("Unexpected moving group row, changes should not be empty");
@ -388,11 +381,7 @@ where
match group { match group {
Some((_index, group_data)) => { Some((_index, group_data)) => {
let row_ids = group_data let row_ids = group_data.rows.iter().map(|row| row.id.clone()).collect();
.rows
.iter()
.map(|row| row.row.id.clone())
.collect();
let type_option_data = <Self as GroupCustomize>::delete_group(self, group_id).await?; let type_option_data = <Self as GroupCustomize>::delete_group(self, group_id).await?;
Ok((row_ids, type_option_data)) Ok((row_ids, type_option_data))
}, },
@ -446,7 +435,7 @@ where
} }
struct GroupedRow { struct GroupedRow {
row_detail: RowDetail, row: Row,
group_id: String, group_id: String,
} }

View File

@ -1,6 +1,6 @@
use async_trait::async_trait; use async_trait::async_trait;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail}; use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -49,27 +49,25 @@ impl GroupCustomize for CheckboxGroupController {
fn add_or_remove_row_when_cell_changed( fn add_or_remove_row_when_cell_changed(
&mut self, &mut self,
row_detail: &RowDetail, row: &Row,
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> Vec<GroupRowsNotificationPB> { ) -> Vec<GroupRowsNotificationPB> {
let mut changesets = vec![]; let mut changesets = vec![];
self.context.iter_mut_status_groups(|group| { self.context.iter_mut_status_groups(|group| {
let mut changeset = GroupRowsNotificationPB::new(group.id.clone()); let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
let is_not_contained = !group.contains_row(&row_detail.row.id); let is_not_contained = !group.contains_row(&row.id);
if group.id == CHECK { if group.id == CHECK {
if !cell_data.is_checked { if !cell_data.is_checked {
// Remove the row if the group.id is CHECK but the cell_data is UNCHECK // Remove the row if the group.id is CHECK but the cell_data is UNCHECK
changeset changeset.deleted_rows.push(row.id.clone().into_inner());
.deleted_rows group.remove_row(&row.id);
.push(row_detail.row.id.clone().into_inner());
group.remove_row(&row_detail.row.id);
} else { } else {
// Add the row to the group if the group didn't contain the row // Add the row to the group if the group didn't contain the row
if is_not_contained { if is_not_contained {
changeset changeset
.inserted_rows .inserted_rows
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone()))); .push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
group.add_row(row_detail.clone()); group.add_row(row.clone());
} }
} }
} }
@ -77,17 +75,15 @@ impl GroupCustomize for CheckboxGroupController {
if group.id == UNCHECK { if group.id == UNCHECK {
if cell_data.is_checked { if cell_data.is_checked {
// Remove the row if the group.id is UNCHECK but the cell_data is CHECK // Remove the row if the group.id is UNCHECK but the cell_data is CHECK
changeset changeset.deleted_rows.push(row.id.clone().into_inner());
.deleted_rows group.remove_row(&row.id);
.push(row_detail.row.id.clone().into_inner());
group.remove_row(&row_detail.row.id);
} else { } else {
// Add the row to the group if the group didn't contain the row // Add the row to the group if the group didn't contain the row
if is_not_contained { if is_not_contained {
changeset changeset
.inserted_rows .inserted_rows
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone()))); .push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
group.add_row(row_detail.clone()); group.add_row(row.clone());
} }
} }
} }

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use chrono::{DateTime, Datelike, Days, Duration, Local, NaiveDateTime}; use chrono::{DateTime, Datelike, Days, Duration, Local, NaiveDateTime};
use collab_database::database::timestamp; use collab_database::database::timestamp;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail}; use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr}; use serde_repr::{Deserialize_repr, Serialize_repr};
@ -73,7 +73,7 @@ impl GroupCustomize for DateGroupController {
fn create_or_delete_group_when_cell_changed( fn create_or_delete_group_when_cell_changed(
&mut self, &mut self,
_row_detail: &RowDetail, _row: &Row,
_old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>, _old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>,
_cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, _cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> { ) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> {
@ -86,10 +86,7 @@ impl GroupCustomize for DateGroupController {
{ {
let group = make_group_from_date_cell(&_cell_data.into(), &setting_content); let group = make_group_from_date_cell(&_cell_data.into(), &setting_content);
let mut new_group = self.context.add_new_group(group)?; let mut new_group = self.context.add_new_group(group)?;
new_group new_group.group.rows.push(RowMetaPB::from(_row.clone()));
.group
.rows
.push(RowMetaPB::from(_row_detail.clone()));
inserted_group = Some(new_group); inserted_group = Some(new_group);
} }
@ -122,7 +119,7 @@ impl GroupCustomize for DateGroupController {
fn add_or_remove_row_when_cell_changed( fn add_or_remove_row_when_cell_changed(
&mut self, &mut self,
row_detail: &RowDetail, row: &Row,
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> Vec<GroupRowsNotificationPB> { ) -> Vec<GroupRowsNotificationPB> {
let mut changesets = vec![]; let mut changesets = vec![];
@ -130,17 +127,15 @@ impl GroupCustomize for DateGroupController {
self.context.iter_mut_status_groups(|group| { self.context.iter_mut_status_groups(|group| {
let mut changeset = GroupRowsNotificationPB::new(group.id.clone()); let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
if group.id == get_date_group_id(&cell_data.into(), &setting_content) { if group.id == get_date_group_id(&cell_data.into(), &setting_content) {
if !group.contains_row(&row_detail.row.id) { if !group.contains_row(&row.id) {
changeset changeset
.inserted_rows .inserted_rows
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone()))); .push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
group.add_row(row_detail.clone()); group.add_row(row.clone());
} }
} else if group.contains_row(&row_detail.row.id) { } else if group.contains_row(&row.id) {
group.remove_row(&row_detail.row.id); group.remove_row(&row.id);
changeset changeset.deleted_rows.push(row.id.clone().into_inner());
.deleted_rows
.push(row_detail.row.id.clone().into_inner());
} }
if !changeset.is_empty() { if !changeset.is_empty() {

View File

@ -2,7 +2,7 @@ use async_trait::async_trait;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{Cells, Row, RowDetail, RowId}; use collab_database::rows::{Cells, Row, RowId};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -53,7 +53,7 @@ impl GroupController for DefaultGroupController {
Some((0, self.group.clone())) Some((0, self.group.clone()))
} }
fn fill_groups(&mut self, rows: &[&RowDetail], _field: &Field) -> FlowyResult<()> { fn fill_groups(&mut self, rows: &[&Row], _field: &Field) -> FlowyResult<()> {
rows.iter().for_each(|row| { rows.iter().for_each(|row| {
self.group.add_row((*row).clone()); self.group.add_row((*row).clone());
}); });
@ -71,17 +71,13 @@ impl GroupController for DefaultGroupController {
Ok(()) Ok(())
} }
fn did_create_row( fn did_create_row(&mut self, row: &Row, index: usize) -> Vec<GroupRowsNotificationPB> {
&mut self, self.group.add_row((*row).clone());
row_detail: &RowDetail,
index: usize,
) -> Vec<GroupRowsNotificationPB> {
self.group.add_row((*row_detail).clone());
vec![GroupRowsNotificationPB::insert( vec![GroupRowsNotificationPB::insert(
self.group.id.clone(), self.group.id.clone(),
vec![InsertedRowPB { vec![InsertedRowPB {
row_meta: (*row_detail).clone().into(), row_meta: (*row).clone().into(),
index: Some(index as i32), index: Some(index as i32),
is_new: true, is_new: true,
}], }],
@ -90,8 +86,8 @@ impl GroupController for DefaultGroupController {
fn did_update_group_row( fn did_update_group_row(
&mut self, &mut self,
_old_row_detail: &Option<RowDetail>, _old_row: &Option<Row>,
_row_detail: &RowDetail, _new_row: &Row,
_field: &Field, _field: &Field,
) -> FlowyResult<DidUpdateGroupRowResult> { ) -> FlowyResult<DidUpdateGroupRowResult> {
Ok(DidUpdateGroupRowResult { Ok(DidUpdateGroupRowResult {

View File

@ -1,7 +1,7 @@
use async_trait::async_trait; use async_trait::async_trait;
use collab_database::entity::SelectOption; use collab_database::entity::SelectOption;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail}; use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -51,12 +51,12 @@ impl GroupCustomize for MultiSelectGroupController {
fn add_or_remove_row_when_cell_changed( fn add_or_remove_row_when_cell_changed(
&mut self, &mut self,
row_detail: &RowDetail, row: &Row,
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> Vec<GroupRowsNotificationPB> { ) -> Vec<GroupRowsNotificationPB> {
let mut changesets = vec![]; let mut changesets = vec![];
self.context.iter_mut_status_groups(|group| { self.context.iter_mut_status_groups(|group| {
if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row_detail) { if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row) {
changesets.push(changeset); changesets.push(changeset);
} }
}); });

View File

@ -1,7 +1,7 @@
use async_trait::async_trait; use async_trait::async_trait;
use collab_database::entity::SelectOption; use collab_database::entity::SelectOption;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail}; use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
use flowy_error::{FlowyError, FlowyResult}; use flowy_error::{FlowyError, FlowyResult};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -53,12 +53,12 @@ impl GroupCustomize for SingleSelectGroupController {
fn add_or_remove_row_when_cell_changed( fn add_or_remove_row_when_cell_changed(
&mut self, &mut self,
row_detail: &RowDetail, row: &Row,
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> Vec<GroupRowsNotificationPB> { ) -> Vec<GroupRowsNotificationPB> {
let mut changesets = vec![]; let mut changesets = vec![];
self.context.iter_mut_status_groups(|group| { self.context.iter_mut_status_groups(|group| {
if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row_detail) { if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row) {
changesets.push(changeset); changesets.push(changeset);
} }
}); });

View File

@ -1,7 +1,7 @@
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
use collab_database::entity::SelectOption; use collab_database::entity::SelectOption;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{Cell, Row, RowDetail}; use collab_database::rows::{Cell, Row};
use crate::entities::{ use crate::entities::{
FieldType, GroupRowsNotificationPB, InsertedRowPB, RowMetaPB, SelectOptionCellDataPB, FieldType, GroupRowsNotificationPB, InsertedRowPB, RowMetaPB, SelectOptionCellDataPB,
@ -15,30 +15,26 @@ use crate::services::group::{Group, GroupData, MoveGroupRowContext};
pub fn add_or_remove_select_option_row( pub fn add_or_remove_select_option_row(
group: &mut GroupData, group: &mut GroupData,
cell_data: &SelectOptionCellDataPB, cell_data: &SelectOptionCellDataPB,
row_detail: &RowDetail, row: &Row,
) -> Option<GroupRowsNotificationPB> { ) -> Option<GroupRowsNotificationPB> {
let mut changeset = GroupRowsNotificationPB::new(group.id.clone()); let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
if cell_data.select_options.is_empty() { if cell_data.select_options.is_empty() {
if group.contains_row(&row_detail.row.id) { if group.contains_row(&row.id) {
group.remove_row(&row_detail.row.id); group.remove_row(&row.id);
changeset changeset.deleted_rows.push(row.id.clone().into_inner());
.deleted_rows
.push(row_detail.row.id.clone().into_inner());
} }
} else { } else {
cell_data.select_options.iter().for_each(|option| { cell_data.select_options.iter().for_each(|option| {
if option.id == group.id { if option.id == group.id {
if !group.contains_row(&row_detail.row.id) { if !group.contains_row(&row.id) {
changeset changeset
.inserted_rows .inserted_rows
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone()))); .push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
group.add_row(row_detail.clone()); group.add_row(row.clone());
} }
} else if group.contains_row(&row_detail.row.id) { } else if group.contains_row(&row.id) {
group.remove_row(&row_detail.row.id); group.remove_row(&row.id);
changeset changeset.deleted_rows.push(row.id.clone().into_inner());
.deleted_rows
.push(row_detail.row.id.clone().into_inner());
} }
}); });
} }
@ -76,14 +72,14 @@ pub fn move_group_row(
) -> Option<GroupRowsNotificationPB> { ) -> Option<GroupRowsNotificationPB> {
let mut changeset = GroupRowsNotificationPB::new(group.id.clone()); let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
let MoveGroupRowContext { let MoveGroupRowContext {
row_detail, row,
row_changeset, row_changeset,
field, field,
to_group_id, to_group_id,
to_row_id, to_row_id,
} = context; } = context;
let from_index = group.index_of_row(&row_detail.row.id); let from_index = group.index_of_row(&row.id);
let to_index = match to_row_id { let to_index = match to_row_id {
None => None, None => None,
Some(to_row_id) => group.index_of_row(to_row_id), Some(to_row_id) => group.index_of_row(to_row_id),
@ -91,40 +87,28 @@ pub fn move_group_row(
// Remove the row in which group contains it // Remove the row in which group contains it
if let Some(from_index) = &from_index { if let Some(from_index) = &from_index {
changeset changeset.deleted_rows.push(row.id.clone().into_inner());
.deleted_rows tracing::debug!("Group:{} remove {} at {}", group.id, row.id, from_index);
.push(row_detail.row.id.clone().into_inner()); group.remove_row(&row.id);
tracing::debug!(
"Group:{} remove {} at {}",
group.id,
row_detail.row.id,
from_index
);
group.remove_row(&row_detail.row.id);
} }
if group.id == *to_group_id { if group.id == *to_group_id {
let mut inserted_row = InsertedRowPB::new(RowMetaPB::from((*row_detail).clone())); let mut inserted_row = InsertedRowPB::new(RowMetaPB::from((*row).clone()));
match to_index { match to_index {
None => { None => {
changeset.inserted_rows.push(inserted_row); changeset.inserted_rows.push(inserted_row);
tracing::debug!("Group:{} append row:{}", group.id, row_detail.row.id); tracing::debug!("Group:{} append row:{}", group.id, row.id);
group.add_row(row_detail.clone()); group.add_row(row.clone());
}, },
Some(to_index) => { Some(to_index) => {
if to_index < group.number_of_row() { if to_index < group.number_of_row() {
tracing::debug!( tracing::debug!("Group:{} insert {} at {} ", group.id, row.id, to_index);
"Group:{} insert {} at {} ",
group.id,
row_detail.row.id,
to_index
);
inserted_row.index = Some(to_index as i32); inserted_row.index = Some(to_index as i32);
group.insert_row(to_index, (*row_detail).clone()); group.insert_row(to_index, row.clone());
} else { } else {
tracing::warn!("Move to index: {} is out of bounds", to_index); tracing::warn!("Move to index: {} is out of bounds", to_index);
tracing::debug!("Group:{} append row:{}", group.id, row_detail.row.id); tracing::debug!("Group:{} append row:{}", group.id, row.id);
group.add_row((*row_detail).clone()); group.add_row(row.clone());
} }
changeset.inserted_rows.push(inserted_row); changeset.inserted_rows.push(inserted_row);
}, },
@ -138,7 +122,7 @@ pub fn move_group_row(
if let Some(cell) = cell { if let Some(cell) = cell {
tracing::debug!( tracing::debug!(
"Update content of the cell in the row:{} to group:{}", "Update content of the cell in the row:{} to group:{}",
row_detail.row.id, row.id,
group.id group.id
); );
row_changeset row_changeset

View File

@ -1,6 +1,6 @@
use async_trait::async_trait; use async_trait::async_trait;
use collab_database::fields::{Field, TypeOptionData}; use collab_database::fields::{Field, TypeOptionData};
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail}; use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -47,7 +47,7 @@ impl GroupCustomize for URLGroupController {
fn create_or_delete_group_when_cell_changed( fn create_or_delete_group_when_cell_changed(
&mut self, &mut self,
_row_detail: &RowDetail, _row: &Row,
_old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>, _old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>,
_cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, _cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> { ) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> {
@ -57,10 +57,7 @@ impl GroupCustomize for URLGroupController {
let cell_data: URLCellData = _cell_data.clone().into(); let cell_data: URLCellData = _cell_data.clone().into();
let group = Group::new(cell_data.data); let group = Group::new(cell_data.data);
let mut new_group = self.context.add_new_group(group)?; let mut new_group = self.context.add_new_group(group)?;
new_group new_group.group.rows.push(RowMetaPB::from(_row.clone()));
.group
.rows
.push(RowMetaPB::from(_row_detail.clone()));
inserted_group = Some(new_group); inserted_group = Some(new_group);
} }
@ -91,24 +88,22 @@ impl GroupCustomize for URLGroupController {
fn add_or_remove_row_when_cell_changed( fn add_or_remove_row_when_cell_changed(
&mut self, &mut self,
row_detail: &RowDetail, row: &Row,
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType, cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
) -> Vec<GroupRowsNotificationPB> { ) -> Vec<GroupRowsNotificationPB> {
let mut changesets = vec![]; let mut changesets = vec![];
self.context.iter_mut_status_groups(|group| { self.context.iter_mut_status_groups(|group| {
let mut changeset = GroupRowsNotificationPB::new(group.id.clone()); let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
if group.id == cell_data.content { if group.id == cell_data.content {
if !group.contains_row(&row_detail.row.id) { if !group.contains_row(&row.id) {
changeset changeset
.inserted_rows .inserted_rows
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone()))); .push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
group.add_row(row_detail.clone()); group.add_row(row.clone());
} }
} else if group.contains_row(&row_detail.row.id) { } else if group.contains_row(&row.id) {
group.remove_row(&row_detail.row.id); group.remove_row(&row.id);
changeset changeset.deleted_rows.push(row.id.clone().into_inner());
.deleted_rows
.push(row_detail.row.id.clone().into_inner());
} }
if !changeset.is_empty() { if !changeset.is_empty() {

View File

@ -1,7 +1,7 @@
use collab::preclude::encoding::serde::{from_any, to_any}; use collab::preclude::encoding::serde::{from_any, to_any};
use collab::preclude::Any; use collab::preclude::Any;
use collab_database::database::gen_database_group_id; use collab_database::database::gen_database_group_id;
use collab_database::rows::{RowDetail, RowId}; use collab_database::rows::{Row, RowId};
use collab_database::views::{GroupMap, GroupMapBuilder, GroupSettingBuilder, GroupSettingMap}; use collab_database::views::{GroupMap, GroupMapBuilder, GroupSettingBuilder, GroupSettingMap};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
@ -103,7 +103,7 @@ pub struct GroupData {
pub field_id: String, pub field_id: String,
pub is_default: bool, pub is_default: bool,
pub is_visible: bool, pub is_visible: bool,
pub(crate) rows: Vec<RowDetail>, pub(crate) rows: Vec<Row>,
} }
impl GroupData { impl GroupData {
@ -119,18 +119,11 @@ impl GroupData {
} }
pub fn contains_row(&self, row_id: &RowId) -> bool { pub fn contains_row(&self, row_id: &RowId) -> bool {
self self.rows.iter().any(|row| &row.id == row_id)
.rows
.iter()
.any(|row_detail| &row_detail.row.id == row_id)
} }
pub fn remove_row(&mut self, row_id: &RowId) { pub fn remove_row(&mut self, row_id: &RowId) {
match self match self.rows.iter().position(|row| &row.id == row_id) {
.rows
.iter()
.position(|row_detail| &row_detail.row.id == row_id)
{
None => {}, None => {},
Some(pos) => { Some(pos) => {
self.rows.remove(pos); self.rows.remove(pos);
@ -138,18 +131,18 @@ impl GroupData {
} }
} }
pub fn add_row(&mut self, row_detail: RowDetail) { pub fn add_row(&mut self, row: Row) {
match self.rows.iter().find(|r| r.row.id == row_detail.row.id) { match self.rows.iter().find(|r| r.id == row.id) {
None => { None => {
self.rows.push(row_detail); self.rows.push(row);
}, },
Some(_) => {}, Some(_) => {},
} }
} }
pub fn insert_row(&mut self, index: usize, row_detail: RowDetail) { pub fn insert_row(&mut self, index: usize, row: Row) {
if index < self.rows.len() { if index < self.rows.len() {
self.rows.insert(index, row_detail); self.rows.insert(index, row);
} else { } else {
tracing::error!( tracing::error!(
"Insert row index:{} beyond the bounds:{},", "Insert row index:{} beyond the bounds:{},",
@ -160,10 +153,7 @@ impl GroupData {
} }
pub fn index_of_row(&self, row_id: &RowId) -> Option<usize> { pub fn index_of_row(&self, row_id: &RowId) -> Option<usize> {
self self.rows.iter().position(|row| &row.id == row_id)
.rows
.iter()
.position(|row_detail| &row_detail.row.id == row_id)
} }
pub fn number_of_row(&self) -> usize { pub fn number_of_row(&self) -> usize {

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use async_trait::async_trait; use async_trait::async_trait;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{Cell, RowDetail, RowId}; use collab_database::rows::{Cell, Row, RowId};
use flowy_error::FlowyResult; use flowy_error::FlowyResult;
@ -36,7 +36,7 @@ pub struct GeneratedGroups {
} }
pub struct MoveGroupRowContext<'a> { pub struct MoveGroupRowContext<'a> {
pub row_detail: &'a RowDetail, pub row: &'a Row,
pub row_changeset: &'a mut RowChangeset, pub row_changeset: &'a mut RowChangeset,
pub field: &'a Field, pub field: &'a Field,
pub to_group_id: &'a str, pub to_group_id: &'a str,

View File

@ -5,7 +5,7 @@ use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{Cell, Row, RowDetail, RowId}; use collab_database::rows::{Cell, Row, RowId};
use rayon::prelude::ParallelSliceMut; use rayon::prelude::ParallelSliceMut;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::sync::RwLock; use tokio::sync::RwLock;
@ -28,8 +28,8 @@ use crate::services::sort::{
pub trait SortDelegate: Send + Sync { pub trait SortDelegate: Send + Sync {
async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Arc<Sort>>; async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Arc<Sort>>;
/// Returns all the rows after applying grid's filter /// Returns all the rows after applying grid's filter
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>; async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
async fn filter_row(&self, row_detail: &RowDetail) -> bool; async fn filter_row(&self, row_detail: &Row) -> bool;
async fn get_field(&self, field_id: &str) -> Option<Field>; async fn get_field(&self, field_id: &str) -> Option<Field>;
async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>; async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>;
} }
@ -95,22 +95,22 @@ impl SortController {
} }
} }
pub async fn did_create_row(&self, preliminary_index: usize, row_detail: &RowDetail) { pub async fn did_create_row(&self, preliminary_index: usize, row: &Row) {
if !self.delegate.filter_row(row_detail).await { if !self.delegate.filter_row(row).await {
return; return;
} }
if !self.sorts.is_empty() { if !self.sorts.is_empty() {
self self
.gen_task( .gen_task(
SortEvent::NewRowInserted(row_detail.clone()), SortEvent::NewRowInserted(row.clone()),
QualityOfService::Background, QualityOfService::Background,
) )
.await; .await;
} else { } else {
let result = InsertRowResult { let result = InsertRowResult {
view_id: self.view_id.clone(), view_id: self.view_id.clone(),
row: row_detail.clone(), row: row.clone(),
index: preliminary_index, index: preliminary_index,
}; };
let _ = self let _ = self
@ -130,31 +130,15 @@ impl SortController {
// #[tracing::instrument(name = "process_sort_task", level = "trace", skip_all, err)] // #[tracing::instrument(name = "process_sort_task", level = "trace", skip_all, err)]
pub async fn process(&mut self, predicate: &str) -> FlowyResult<()> { pub async fn process(&mut self, predicate: &str) -> FlowyResult<()> {
let event_type = SortEvent::from_str(predicate).unwrap(); let event_type = SortEvent::from_str(predicate).unwrap();
let mut row_details = self.delegate.get_rows(&self.view_id).await; let mut rows = self.delegate.get_rows(&self.view_id).await;
match event_type { match event_type {
SortEvent::SortDidChanged | SortEvent::DeleteAllSorts => { SortEvent::SortDidChanged | SortEvent::DeleteAllSorts => {
self.sort_rows(&mut row_details).await; self.sort_rows_and_notify(&mut rows).await;
let row_orders = row_details
.iter()
.map(|row_detail| row_detail.row.id.to_string())
.collect::<Vec<String>>();
let notification = ReorderAllRowsResult {
view_id: self.view_id.clone(),
row_orders,
};
let _ = self
.notifier
.send(DatabaseViewChanged::ReorderAllRowsNotification(
notification,
));
}, },
SortEvent::RowDidChanged(row_id) => { SortEvent::RowDidChanged(row_id) => {
let old_row_index = self.row_index_cache.get(&row_id).cloned(); let old_row_index = self.row_index_cache.get(&row_id).cloned();
self.sort_rows(&mut rows).await;
self.sort_rows(&mut row_details).await;
let new_row_index = self.row_index_cache.get(&row_id).cloned(); let new_row_index = self.row_index_cache.get(&row_id).cloned();
match (old_row_index, new_row_index) { match (old_row_index, new_row_index) {
(Some(old_row_index), Some(new_row_index)) => { (Some(old_row_index), Some(new_row_index)) => {
@ -176,17 +160,17 @@ impl SortController {
_ => tracing::trace!("The row index cache is outdated"), _ => tracing::trace!("The row index cache is outdated"),
} }
}, },
SortEvent::NewRowInserted(row_detail) => { SortEvent::NewRowInserted(row) => {
self.sort_rows(&mut row_details).await; self.sort_rows(&mut rows).await;
let row_index = self.row_index_cache.get(&row_detail.row.id).cloned(); let row_index = self.row_index_cache.get(&row.id).cloned();
match row_index { match row_index {
Some(row_index) => { Some(row_index) => {
let notification = InsertRowResult { let notification = InsertRowResult {
view_id: self.view_id.clone(), view_id: self.view_id.clone(),
row: row_detail.clone(), row: row.clone(),
index: row_index, index: row_index,
}; };
self.row_index_cache.insert(row_detail.row.id, row_index); self.row_index_cache.insert(row.id, row_index);
let _ = self let _ = self
.notifier .notifier
.send(DatabaseViewChanged::InsertRowNotification(notification)); .send(DatabaseViewChanged::InsertRowNotification(notification));
@ -210,20 +194,36 @@ impl SortController {
self.task_scheduler.write().await.add_task(task); self.task_scheduler.write().await.add_task(task);
} }
pub async fn sort_rows(&mut self, rows: &mut Vec<Arc<RowDetail>>) { pub async fn sort_rows_and_notify(&mut self, rows: &mut Vec<Arc<Row>>) {
self.sort_rows(rows).await;
let row_orders = rows
.iter()
.map(|row| row.id.to_string())
.collect::<Vec<String>>();
let notification = ReorderAllRowsResult {
view_id: self.view_id.clone(),
row_orders,
};
let _ = self
.notifier
.send(DatabaseViewChanged::ReorderAllRowsNotification(
notification,
));
}
pub async fn sort_rows(&mut self, rows: &mut Vec<Arc<Row>>) {
if self.sorts.is_empty() { if self.sorts.is_empty() {
return; return;
} }
let fields = self.delegate.get_fields(&self.view_id, None).await; let fields = self.delegate.get_fields(&self.view_id, None).await;
for sort in self.sorts.iter().rev() { for sort in self.sorts.iter().rev() {
rows rows.par_sort_by(|left, right| cmp_row(left, right, sort, &fields, &self.cell_cache));
.par_sort_by(|left, right| cmp_row(&left.row, &right.row, sort, &fields, &self.cell_cache));
} }
rows.iter().enumerate().for_each(|(index, row_detail)| { rows.iter().enumerate().for_each(|(index, row)| {
self self.row_index_cache.insert(row.id.clone(), index);
.row_index_cache
.insert(row_detail.row.id.clone(), index);
}); });
} }
@ -363,7 +363,7 @@ fn cmp_cell(
enum SortEvent { enum SortEvent {
SortDidChanged, SortDidChanged,
RowDidChanged(RowId), RowDidChanged(RowId),
NewRowInserted(RowDetail), NewRowInserted(Row),
DeleteAllSorts, DeleteAllSorts,
} }

View File

@ -3,7 +3,7 @@ use std::cmp::Ordering;
use anyhow::bail; use anyhow::bail;
use collab::preclude::Any; use collab::preclude::Any;
use collab::util::AnyMapExt; use collab::util::AnyMapExt;
use collab_database::rows::{RowDetail, RowId}; use collab_database::rows::{Row, RowId};
use collab_database::views::{SortMap, SortMapBuilder}; use collab_database::views::{SortMap, SortMapBuilder};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -113,7 +113,7 @@ pub struct ReorderSingleRowResult {
#[derive(Clone)] #[derive(Clone)]
pub struct InsertRowResult { pub struct InsertRowResult {
pub view_id: String, pub view_id: String,
pub row: RowDetail, pub row: Row,
pub index: usize, pub index: usize,
} }

View File

@ -11,17 +11,17 @@ use crate::database::block_test::script::RowScript::*;
#[tokio::test] #[tokio::test]
async fn created_at_field_test() { async fn created_at_field_test() {
let mut test = DatabaseRowTest::new().await; let mut test = DatabaseRowTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
test test
.run_scripts(vec![CreateEmptyRow, AssertRowCount(row_count + 1)]) .run_scripts(vec![CreateEmptyRow, AssertRowCount(row_count + 1)])
.await; .await;
// Get created time of the new row. // Get created time of the new row.
let row_detail = test.get_rows().await.last().cloned().unwrap(); let row = test.get_rows().await.last().cloned().unwrap();
let updated_at_field = test.get_first_field(FieldType::CreatedTime).await; let updated_at_field = test.get_first_field(FieldType::CreatedTime).await;
let cell = test let cell = test
.editor .editor
.get_cell(&updated_at_field.id, &row_detail.row.id) .get_cell(&updated_at_field.id, &row.id)
.await .await
.unwrap(); .unwrap();
let created_at_timestamp = DateCellData::from(&cell).timestamp.unwrap(); let created_at_timestamp = DateCellData::from(&cell).timestamp.unwrap();
@ -34,11 +34,11 @@ async fn created_at_field_test() {
#[tokio::test] #[tokio::test]
async fn update_at_field_test() { async fn update_at_field_test() {
let mut test = DatabaseRowTest::new().await; let mut test = DatabaseRowTest::new().await;
let row_detail = test.get_rows().await.remove(0); let row = test.get_rows().await.remove(0);
let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await; let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await;
let cell = test let cell = test
.editor .editor
.get_cell(&last_edit_field.id, &row_detail.row.id) .get_cell(&last_edit_field.id, &row.id)
.await .await
.unwrap(); .unwrap();
let old_updated_at = DateCellData::from(&cell).timestamp.unwrap(); let old_updated_at = DateCellData::from(&cell).timestamp.unwrap();
@ -46,17 +46,17 @@ async fn update_at_field_test() {
tokio::time::sleep(Duration::from_millis(1000)).await; tokio::time::sleep(Duration::from_millis(1000)).await;
test test
.run_script(UpdateTextCell { .run_script(UpdateTextCell {
row_id: row_detail.row.id.clone(), row_id: row.id.clone(),
content: "test".to_string(), content: "test".to_string(),
}) })
.await; .await;
// Get the updated time of the row. // Get the updated time of the row.
let row_detail = test.get_rows().await.remove(0); let row = test.get_rows().await.remove(0);
let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await; let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await;
let cell = test let cell = test
.editor .editor
.get_cell(&last_edit_field.id, &row_detail.row.id) .get_cell(&last_edit_field.id, &row.id)
.await .await
.unwrap(); .unwrap();
let new_updated_at = DateCellData::from(&cell).timestamp.unwrap(); let new_updated_at = DateCellData::from(&cell).timestamp.unwrap();

View File

@ -37,13 +37,13 @@ impl DatabaseRowTest {
self self
.row_by_row_id .row_by_row_id
.insert(row_detail.row.id.to_string(), row_detail.into()); .insert(row_detail.row.id.to_string(), row_detail.into());
self.row_details = self.get_rows().await; self.rows = self.get_rows().await;
}, },
RowScript::UpdateTextCell { row_id, content } => { RowScript::UpdateTextCell { row_id, content } => {
self.update_text_cell(row_id, &content).await.unwrap(); self.update_text_cell(row_id, &content).await.unwrap();
}, },
RowScript::AssertRowCount(expected_row_count) => { RowScript::AssertRowCount(expected_row_count) => {
assert_eq!(expected_row_count, self.row_details.len()); assert_eq!(expected_row_count, self.rows.len());
}, },
} }
} }

View File

@ -15,10 +15,10 @@ use crate::database::cell_test::script::DatabaseCellTest;
async fn grid_cell_update() { async fn grid_cell_update() {
let mut test = DatabaseCellTest::new().await; let mut test = DatabaseCellTest::new().await;
let fields = test.get_fields().await; let fields = test.get_fields().await;
let rows = &test.row_details; let rows = &test.rows;
let mut scripts = vec![]; let mut scripts = vec![];
for row_detail in rows.iter() { for row in rows.iter() {
for field in &fields { for field in &fields {
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
if field_type == FieldType::LastEditedTime || field_type == FieldType::CreatedTime { if field_type == FieldType::LastEditedTime || field_type == FieldType::CreatedTime {
@ -63,7 +63,7 @@ async fn grid_cell_update() {
scripts.push(UpdateCell { scripts.push(UpdateCell {
view_id: test.view_id.clone(), view_id: test.view_id.clone(),
field_id: field.id.clone(), field_id: field.id.clone(),
row_id: row_detail.row.id.clone(), row_id: row.id.clone(),
changeset: cell_changeset, changeset: cell_changeset,
is_err: false, is_err: false,
}); });
@ -134,7 +134,7 @@ async fn update_updated_at_field_on_other_cell_update() {
test test
.run_script(UpdateCell { .run_script(UpdateCell {
view_id: test.view_id.clone(), view_id: test.view_id.clone(),
row_id: test.row_details[0].row.id.clone(), row_id: test.rows[0].id.clone(),
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
changeset: BoxAny::new("change".to_string()), changeset: BoxAny::new("change".to_string()),
is_err: false, is_err: false,

View File

@ -4,7 +4,7 @@ use std::sync::Arc;
use collab_database::database::gen_database_view_id; use collab_database::database::gen_database_view_id;
use collab_database::entity::SelectOption; use collab_database::entity::SelectOption;
use collab_database::fields::Field; use collab_database::fields::Field;
use collab_database::rows::{RowDetail, RowId}; use collab_database::rows::{Row, RowId};
use lib_infra::box_any::BoxAny; use lib_infra::box_any::BoxAny;
use strum::EnumCount; use strum::EnumCount;
@ -31,7 +31,7 @@ pub struct DatabaseEditorTest {
pub view_id: String, pub view_id: String,
pub editor: Arc<DatabaseEditor>, pub editor: Arc<DatabaseEditor>,
pub fields: Vec<Arc<Field>>, pub fields: Vec<Arc<Field>>,
pub row_details: Vec<Arc<RowDetail>>, pub rows: Vec<Arc<Row>>,
pub field_count: usize, pub field_count: usize,
pub row_by_row_id: HashMap<String, RowMetaPB>, pub row_by_row_id: HashMap<String, RowMetaPB>,
} }
@ -86,7 +86,7 @@ impl DatabaseEditorTest {
.map(Arc::new) .map(Arc::new)
.collect(); .collect();
let rows = editor let rows = editor
.get_all_row_details(&test.child_view.id) .get_all_rows(&test.child_view.id)
.await .await
.unwrap() .unwrap()
.into_iter() .into_iter()
@ -98,7 +98,7 @@ impl DatabaseEditorTest {
view_id, view_id,
editor, editor,
fields, fields,
row_details: rows, rows,
field_count: FieldType::COUNT, field_count: FieldType::COUNT,
row_by_row_id: HashMap::default(), row_by_row_id: HashMap::default(),
} }
@ -108,12 +108,8 @@ impl DatabaseEditorTest {
self.editor.get_all_filters(&self.view_id).await.items self.editor.get_all_filters(&self.view_id).await.items
} }
pub async fn get_rows(&self) -> Vec<Arc<RowDetail>> { pub async fn get_rows(&self) -> Vec<Arc<Row>> {
self self.editor.get_all_rows(&self.view_id).await.unwrap()
.editor
.get_all_row_details(&self.view_id)
.await
.unwrap()
} }
pub async fn get_field(&self, field_id: &str, field_type: FieldType) -> Field { pub async fn get_field(&self, field_id: &str, field_type: FieldType) -> Field {

View File

@ -123,14 +123,10 @@ impl DatabaseFieldTest {
} => { } => {
let field = self.editor.get_field(&field_id).await.unwrap(); let field = self.editor.get_field(&field_id).await.unwrap();
let rows = self let rows = self.editor.get_all_rows(&self.view_id()).await.unwrap();
.editor let row = rows.get(row_index).unwrap();
.get_all_row_details(&self.view_id())
.await
.unwrap();
let row_detail = rows.get(row_index).unwrap();
let cell = row_detail.row.cells.get(&field_id).unwrap().clone(); let cell = row.cells.get(&field_id).unwrap().clone();
let content = stringify_cell(&cell, &field); let content = stringify_cell(&cell, &field);
assert_eq!(content, expected_content); assert_eq!(content, expected_content);
}, },

View File

@ -8,7 +8,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
async fn grid_filter_checkbox_is_check_test() { async fn grid_filter_checkbox_is_check_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let expected = 3; let expected = 3;
let row_count = test.row_details.len(); let row_count = test.rows.len();
// The initial number of checked is 3 // The initial number of checked is 3
// The initial number of unchecked is 4 // The initial number of unchecked is 4
let scripts = vec![ let scripts = vec![
@ -32,7 +32,7 @@ async fn grid_filter_checkbox_is_check_test() {
async fn grid_filter_checkbox_is_uncheck_test() { async fn grid_filter_checkbox_is_uncheck_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let expected = 4; let expected = 4;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
parent_filter_id: None, parent_filter_id: None,

View File

@ -9,12 +9,12 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
async fn grid_filter_checklist_is_incomplete_test() { async fn grid_filter_checklist_is_incomplete_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let expected = 5; let expected = 5;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let option_ids = get_checklist_cell_options(&test).await; let option_ids = get_checklist_cell_options(&test).await;
let scripts = vec![ let scripts = vec![
UpdateChecklistCell { UpdateChecklistCell {
row_id: test.row_details[0].row.id.clone(), row_id: test.rows[0].id.clone(),
selected_option_ids: option_ids, selected_option_ids: option_ids,
}, },
CreateDataFilter { CreateDataFilter {
@ -37,11 +37,11 @@ async fn grid_filter_checklist_is_incomplete_test() {
async fn grid_filter_checklist_is_complete_test() { async fn grid_filter_checklist_is_complete_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let expected = 2; let expected = 2;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let option_ids = get_checklist_cell_options(&test).await; let option_ids = get_checklist_cell_options(&test).await;
let scripts = vec![ let scripts = vec![
UpdateChecklistCell { UpdateChecklistCell {
row_id: test.row_details[0].row.id.clone(), row_id: test.rows[0].id.clone(),
selected_option_ids: option_ids, selected_option_ids: option_ids,
}, },
CreateDataFilter { CreateDataFilter {
@ -62,10 +62,7 @@ async fn grid_filter_checklist_is_complete_test() {
async fn get_checklist_cell_options(test: &DatabaseFilterTest) -> Vec<String> { async fn get_checklist_cell_options(test: &DatabaseFilterTest) -> Vec<String> {
let field = test.get_first_field(FieldType::Checklist).await; let field = test.get_first_field(FieldType::Checklist).await;
let row_cell = test let row_cell = test.editor.get_cell(&field.id, &test.rows[0].id).await;
.editor
.get_cell(&field.id, &test.row_details[0].row.id)
.await;
row_cell row_cell
.map_or(ChecklistCellData::default(), |cell| { .map_or(ChecklistCellData::default(), |cell| {
ChecklistCellData::from(&cell) ChecklistCellData::from(&cell)

View File

@ -7,7 +7,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
#[tokio::test] #[tokio::test]
async fn grid_filter_date_is_test() { async fn grid_filter_date_is_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 3; let expected = 3;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -32,7 +32,7 @@ async fn grid_filter_date_is_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_date_after_test() { async fn grid_filter_date_after_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 3; let expected = 3;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -57,7 +57,7 @@ async fn grid_filter_date_after_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_date_on_or_after_test() { async fn grid_filter_date_on_or_after_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 3; let expected = 3;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -82,7 +82,7 @@ async fn grid_filter_date_on_or_after_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_date_on_or_before_test() { async fn grid_filter_date_on_or_before_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 4; let expected = 4;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -107,7 +107,7 @@ async fn grid_filter_date_on_or_before_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_date_within_test() { async fn grid_filter_date_within_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 5; let expected = 5;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {

View File

@ -7,7 +7,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
#[tokio::test] #[tokio::test]
async fn grid_filter_number_is_equal_test() { async fn grid_filter_number_is_equal_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 1; let expected = 1;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -30,7 +30,7 @@ async fn grid_filter_number_is_equal_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_number_is_less_than_test() { async fn grid_filter_number_is_less_than_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 2; let expected = 2;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -54,7 +54,7 @@ async fn grid_filter_number_is_less_than_test() {
#[should_panic] #[should_panic]
async fn grid_filter_number_is_less_than_test2() { async fn grid_filter_number_is_less_than_test2() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 2; let expected = 2;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -77,7 +77,7 @@ async fn grid_filter_number_is_less_than_test2() {
#[tokio::test] #[tokio::test]
async fn grid_filter_number_is_less_than_or_equal_test() { async fn grid_filter_number_is_less_than_or_equal_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 3; let expected = 3;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -100,7 +100,7 @@ async fn grid_filter_number_is_less_than_or_equal_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_number_is_empty_test() { async fn grid_filter_number_is_empty_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 2; let expected = 2;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -123,7 +123,7 @@ async fn grid_filter_number_is_empty_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_number_is_not_empty_test() { async fn grid_filter_number_is_not_empty_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 5; let expected = 5;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {

View File

@ -301,7 +301,7 @@ impl DatabaseFilterTest {
} }
}, },
FilterScript::AssertNumberOfVisibleRows { expected } => { FilterScript::AssertNumberOfVisibleRows { expected } => {
let grid = self.editor.open_database(&self.view_id).await.unwrap(); let grid = self.editor.open_database_view(&self.view_id).await.unwrap();
assert_eq!(grid.rows.len(), expected); assert_eq!(grid.rows.len(), expected);
}, },
FilterScript::Wait { millisecond } => { FilterScript::Wait { millisecond } => {

View File

@ -84,7 +84,7 @@ async fn grid_filter_multi_select_is_test2() {
async fn grid_filter_single_select_is_empty_test() { async fn grid_filter_single_select_is_empty_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let expected = 3; let expected = 3;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
parent_filter_id: None, parent_filter_id: None,
@ -109,7 +109,7 @@ async fn grid_filter_single_select_is_test() {
let field = test.get_first_field(FieldType::SingleSelect).await; let field = test.get_first_field(FieldType::SingleSelect).await;
let mut options = test.get_single_select_type_option(&field.id).await; let mut options = test.get_single_select_type_option(&field.id).await;
let expected = 2; let expected = 2;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
parent_filter_id: None, parent_filter_id: None,
@ -135,7 +135,7 @@ async fn grid_filter_single_select_is_test2() {
let row_details = test.get_rows().await; let row_details = test.get_rows().await;
let mut options = test.get_single_select_type_option(&field.id).await; let mut options = test.get_single_select_type_option(&field.id).await;
let option = options.remove(0); let option = options.remove(0);
let row_count = test.row_details.len(); let row_count = test.rows.len();
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -152,13 +152,13 @@ async fn grid_filter_single_select_is_test2() {
}, },
AssertNumberOfVisibleRows { expected: 2 }, AssertNumberOfVisibleRows { expected: 2 },
UpdateSingleSelectCell { UpdateSingleSelectCell {
row_id: row_details[1].row.id.clone(), row_id: row_details[1].id.clone(),
option_id: option.id.clone(), option_id: option.id.clone(),
changed: None, changed: None,
}, },
AssertNumberOfVisibleRows { expected: 3 }, AssertNumberOfVisibleRows { expected: 3 },
UpdateSingleSelectCell { UpdateSingleSelectCell {
row_id: row_details[1].row.id.clone(), row_id: row_details[1].id.clone(),
option_id: "".to_string(), option_id: "".to_string(),
changed: Some(FilterRowChanged { changed: Some(FilterRowChanged {
showing_num_of_rows: 0, showing_num_of_rows: 0,

View File

@ -102,7 +102,7 @@ async fn grid_filter_contain_text_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_contain_text_test2() { async fn grid_filter_contain_text_test2() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_detail = test.row_details.clone(); let row_detail = test.rows.clone();
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -118,7 +118,7 @@ async fn grid_filter_contain_text_test2() {
}), }),
}, },
UpdateTextCell { UpdateTextCell {
row_id: row_detail[1].row.id.clone(), row_id: row_detail[1].id.clone(),
text: "ABC".to_string(), text: "ABC".to_string(),
changed: Some(FilterRowChanged { changed: Some(FilterRowChanged {
showing_num_of_rows: 1, showing_num_of_rows: 1,
@ -257,7 +257,7 @@ async fn grid_filter_delete_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_update_empty_text_cell_test() { async fn grid_filter_update_empty_text_cell_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_details = test.row_details.clone(); let row = test.rows.clone();
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
parent_filter_id: None, parent_filter_id: None,
@ -273,7 +273,7 @@ async fn grid_filter_update_empty_text_cell_test() {
}, },
AssertFilterCount { count: 1 }, AssertFilterCount { count: 1 },
UpdateTextCell { UpdateTextCell {
row_id: row_details[0].row.id.clone(), row_id: row[0].id.clone(),
text: "".to_string(), text: "".to_string(),
changed: Some(FilterRowChanged { changed: Some(FilterRowChanged {
showing_num_of_rows: 1, showing_num_of_rows: 1,

View File

@ -7,7 +7,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
#[tokio::test] #[tokio::test]
async fn grid_filter_time_is_equal_test() { async fn grid_filter_time_is_equal_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 1; let expected = 1;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -30,7 +30,7 @@ async fn grid_filter_time_is_equal_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_time_is_less_than_test() { async fn grid_filter_time_is_less_than_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 1; let expected = 1;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -54,7 +54,7 @@ async fn grid_filter_time_is_less_than_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_time_is_less_than_or_equal_test() { async fn grid_filter_time_is_less_than_or_equal_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 1; let expected = 1;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -77,7 +77,7 @@ async fn grid_filter_time_is_less_than_or_equal_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_time_is_empty_test() { async fn grid_filter_time_is_empty_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 6; let expected = 6;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {
@ -100,7 +100,7 @@ async fn grid_filter_time_is_empty_test() {
#[tokio::test] #[tokio::test]
async fn grid_filter_time_is_not_empty_test() { async fn grid_filter_time_is_not_empty_test() {
let mut test = DatabaseFilterTest::new().await; let mut test = DatabaseFilterTest::new().await;
let row_count = test.row_details.len(); let row_count = test.rows.len();
let expected = 1; let expected = 1;
let scripts = vec![ let scripts = vec![
CreateDataFilter { CreateDataFilter {

View File

@ -184,13 +184,13 @@ async fn change_date_on_moving_row_to_another_group() {
let group = test.group_at_index(2).await; let group = test.group_at_index(2).await;
let rows = group.clone().rows; let rows = group.clone().rows;
let row_id = &rows.first().unwrap().id; let row_id = &rows.first().unwrap().id;
let row_detail = test let row = test
.get_rows() .get_rows()
.await .await
.into_iter() .into_iter()
.find(|r| r.row.id.to_string() == *row_id) .find(|r| r.id.to_string() == *row_id)
.unwrap(); .unwrap();
let cell = row_detail.row.cells.get(&date_field.id.clone()).unwrap(); let cell = row.cells.get(&date_field.id.clone()).unwrap();
let date_cell = DateCellData::from(cell); let date_cell = DateCellData::from(cell);
let date_time = let date_time =

View File

@ -42,12 +42,12 @@ async fn according_to_text_contains_filter_test() {
let scripts = vec![ let scripts = vec![
AssertCellExistence { AssertCellExistence {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len() - 1, row_index: test.rows.len() - 1,
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: text_field.id, field_id: text_field.id,
row_index: test.row_details.len() - 1, row_index: test.rows.len() - 1,
expected_content: "sample".to_string(), expected_content: "sample".to_string(),
}, },
@ -84,7 +84,7 @@ async fn according_to_empty_text_contains_filter_test() {
let scripts = vec![AssertCellExistence { let scripts = vec![AssertCellExistence {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len() - 1, row_index: test.rows.len() - 1,
exists: false, exists: false,
}]; }];
@ -278,7 +278,7 @@ async fn according_to_invalid_date_time_is_filter_test() {
AssertRowCount(8), AssertRowCount(8),
AssertCellExistence { AssertCellExistence {
field_id: datetime_field.id.clone(), field_id: datetime_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: false, exists: false,
}, },
]; ];

View File

@ -29,12 +29,12 @@ async fn row_data_payload_with_empty_hashmap_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: false, exists: false,
}, },
AssertCellContent { AssertCellContent {
field_id: text_field.id, field_id: text_field.id,
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: "".to_string(), expected_content: "".to_string(),
}, },
@ -64,18 +64,18 @@ async fn row_data_payload_with_unknown_field_id_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: false, exists: false,
}, },
AssertCellContent { AssertCellContent {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: "".to_string(), expected_content: "".to_string(),
}, },
AssertCellExistence { AssertCellExistence {
field_id: malformed_field_id.to_string(), field_id: malformed_field_id.to_string(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: false, exists: false,
}, },
]; ];
@ -101,12 +101,12 @@ async fn row_data_payload_with_empty_string_text_data_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: text_field.id, field_id: text_field.id,
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: cell_data.to_string(), expected_content: cell_data.to_string(),
}, },
@ -133,12 +133,12 @@ async fn row_data_payload_with_text_data_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: cell_data.to_string(), expected_content: cell_data.to_string(),
}, },
@ -174,34 +174,34 @@ async fn row_data_payload_with_multi_text_data_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: text_field.id.clone(), field_id: text_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: text_field.id, field_id: text_field.id,
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: text_cell_data.to_string(), expected_content: text_cell_data.to_string(),
}, },
AssertCellExistence { AssertCellExistence {
field_id: number_field.id.clone(), field_id: number_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: number_field.id, field_id: number_field.id,
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: "$1,234".to_string(), expected_content: "$1,234".to_string(),
}, },
AssertCellExistence { AssertCellExistence {
field_id: url_field.id.clone(), field_id: url_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: url_field.id, field_id: url_field.id,
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: url_cell_data.to_string(), expected_content: url_cell_data.to_string(),
}, },
@ -228,12 +228,12 @@ async fn row_data_payload_with_date_time_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: date_field.id.clone(), field_id: date_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: date_field.id.clone(), field_id: date_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: "2024/03/15".to_string(), expected_content: "2024/03/15".to_string(),
}, },
@ -264,7 +264,7 @@ async fn row_data_payload_with_invalid_date_time_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: date_field.id.clone(), field_id: date_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: false, exists: false,
}, },
]; ];
@ -290,12 +290,12 @@ async fn row_data_payload_with_checkbox_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: checkbox_field.id.clone(), field_id: checkbox_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: checkbox_field.id.clone(), field_id: checkbox_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: cell_data.to_string(), expected_content: cell_data.to_string(),
}, },
@ -336,12 +336,12 @@ async fn row_data_payload_with_select_option_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: multi_select_field.id.clone(), field_id: multi_select_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertCellContent { AssertCellContent {
field_id: multi_select_field.id.clone(), field_id: multi_select_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: stringified_cell_data, expected_content: stringified_cell_data,
}, },
@ -373,12 +373,12 @@ async fn row_data_payload_with_invalid_select_option_id_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: multi_select_field.id.clone(), field_id: multi_select_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertSelectOptionCellStrict { AssertSelectOptionCellStrict {
field_id: multi_select_field.id.clone(), field_id: multi_select_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: first_id, expected_content: first_id,
}, },
]; ];
@ -414,12 +414,12 @@ async fn row_data_payload_with_too_many_select_option_test() {
Wait { milliseconds: 100 }, Wait { milliseconds: 100 },
AssertCellExistence { AssertCellExistence {
field_id: single_select_field.id.clone(), field_id: single_select_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
exists: true, exists: true,
}, },
AssertSelectOptionCellStrict { AssertSelectOptionCellStrict {
field_id: single_select_field.id.clone(), field_id: single_select_field.id.clone(),
row_index: test.row_details.len(), row_index: test.rows.len(),
expected_content: stringified_cell_data, expected_content: stringified_cell_data,
}, },
]; ];

View File

@ -63,14 +63,14 @@ impl DatabasePreFillRowCellTest {
self self
.row_by_row_id .row_by_row_id
.insert(row_detail.row.id.to_string(), row_detail.into()); .insert(row_detail.row.id.to_string(), row_detail.into());
self.row_details = self.get_rows().await; self.rows = self.get_rows().await;
}, },
PreFillRowCellTestScript::CreateRowWithPayload { payload } => { PreFillRowCellTestScript::CreateRowWithPayload { payload } => {
let row_detail = self.editor.create_row(payload).await.unwrap().unwrap(); let row_detail = self.editor.create_row(payload).await.unwrap().unwrap();
self self
.row_by_row_id .row_by_row_id
.insert(row_detail.row.id.to_string(), row_detail.into()); .insert(row_detail.row.id.to_string(), row_detail.into());
self.row_details = self.get_rows().await; self.rows = self.get_rows().await;
}, },
PreFillRowCellTestScript::InsertFilter { filter } => self PreFillRowCellTestScript::InsertFilter { filter } => self
.editor .editor
@ -86,11 +86,7 @@ impl DatabasePreFillRowCellTest {
.await .await
.unwrap(), .unwrap(),
PreFillRowCellTestScript::AssertRowCount(expected_row_count) => { PreFillRowCellTestScript::AssertRowCount(expected_row_count) => {
let rows = self let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
.editor
.get_all_row_details(&self.view_id)
.await
.unwrap();
assert_eq!(expected_row_count, rows.len()); assert_eq!(expected_row_count, rows.len());
}, },
PreFillRowCellTestScript::AssertCellExistence { PreFillRowCellTestScript::AssertCellExistence {
@ -98,15 +94,9 @@ impl DatabasePreFillRowCellTest {
row_index, row_index,
exists, exists,
} => { } => {
let rows = self let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
.editor let row = rows.get(row_index).unwrap();
.get_all_row_details(&self.view_id) let cell = row.cells.get(&field_id).cloned();
.await
.unwrap();
let row_detail = rows.get(row_index).unwrap();
let cell = row_detail.row.cells.get(&field_id).cloned();
assert_eq!(exists, cell.is_some()); assert_eq!(exists, cell.is_some());
}, },
PreFillRowCellTestScript::AssertCellContent { PreFillRowCellTestScript::AssertCellContent {
@ -116,19 +106,9 @@ impl DatabasePreFillRowCellTest {
} => { } => {
let field = self.editor.get_field(&field_id).await.unwrap(); let field = self.editor.get_field(&field_id).await.unwrap();
let rows = self let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
.editor let row = rows.get(row_index).unwrap();
.get_all_row_details(&self.view_id) let cell = row.cells.get(&field_id).cloned().unwrap_or_default();
.await
.unwrap();
let row_detail = rows.get(row_index).unwrap();
let cell = row_detail
.row
.cells
.get(&field_id)
.cloned()
.unwrap_or_default();
let content = stringify_cell(&cell, &field); let content = stringify_cell(&cell, &field);
assert_eq!(content, expected_content); assert_eq!(content, expected_content);
}, },
@ -137,22 +117,10 @@ impl DatabasePreFillRowCellTest {
row_index, row_index,
expected_content, expected_content,
} => { } => {
let rows = self let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
.editor let row = rows.get(row_index).unwrap();
.get_all_row_details(&self.view_id) let cell = row.cells.get(&field_id).cloned().unwrap_or_default();
.await
.unwrap();
let row_detail = rows.get(row_index).unwrap();
let cell = row_detail
.row
.cells
.get(&field_id)
.cloned()
.unwrap_or_default();
let content = SelectOptionIds::from(&cell).join(SELECTION_IDS_SEPARATOR); let content = SelectOptionIds::from(&cell).join(SELECTION_IDS_SEPARATOR);
assert_eq!(content, expected_content); assert_eq!(content, expected_content);
}, },
PreFillRowCellTestScript::Wait { milliseconds } => { PreFillRowCellTestScript::Wait { milliseconds } => {

View File

@ -33,7 +33,7 @@ async fn export_and_then_import_meta_csv_test() {
let database = test.get_database(&result.database_id).await.unwrap(); let database = test.get_database(&result.database_id).await.unwrap();
let fields = database.get_fields(&result.view_id, None).await; let fields = database.get_fields(&result.view_id, None).await;
let rows = database.get_all_row_details(&result.view_id).await.unwrap(); let rows = database.get_all_rows(&result.view_id).await.unwrap();
assert_eq!(fields[0].field_type, 0); assert_eq!(fields[0].field_type, 0);
assert_eq!(fields[1].field_type, 1); assert_eq!(fields[1].field_type, 1);
assert_eq!(fields[2].field_type, 2); assert_eq!(fields[2].field_type, 2);
@ -46,8 +46,8 @@ async fn export_and_then_import_meta_csv_test() {
assert_eq!(fields[9].field_type, 9); assert_eq!(fields[9].field_type, 9);
for field in fields { for field in fields {
for (index, row_detail) in rows.iter().enumerate() { for (index, row) in rows.iter().enumerate() {
if let Some(cell) = row_detail.row.cells.get(&field.id) { if let Some(cell) = row.cells.get(&field.id) {
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
let s = stringify_cell(cell, &field); let s = stringify_cell(cell, &field);
match &field_type { match &field_type {
@ -89,7 +89,7 @@ async fn export_and_then_import_meta_csv_test() {
} else { } else {
panic!( panic!(
"Can not found the cell with id: {} in {:?}", "Can not found the cell with id: {} in {:?}",
field.id, row_detail.row.cells field.id, row.cells
); );
} }
} }
@ -112,7 +112,7 @@ async fn history_database_import_test() {
let database = test.get_database(&result.database_id).await.unwrap(); let database = test.get_database(&result.database_id).await.unwrap();
let fields = database.get_fields(&result.view_id, None).await; let fields = database.get_fields(&result.view_id, None).await;
let rows = database.get_all_row_details(&result.view_id).await.unwrap(); let rows = database.get_all_rows(&result.view_id).await.unwrap();
assert_eq!(fields[0].field_type, 0); assert_eq!(fields[0].field_type, 0);
assert_eq!(fields[1].field_type, 1); assert_eq!(fields[1].field_type, 1);
assert_eq!(fields[2].field_type, 2); assert_eq!(fields[2].field_type, 2);
@ -123,8 +123,8 @@ async fn history_database_import_test() {
assert_eq!(fields[7].field_type, 7); assert_eq!(fields[7].field_type, 7);
for field in fields { for field in fields {
for (index, row_detail) in rows.iter().enumerate() { for (index, row) in rows.iter().enumerate() {
if let Some(cell) = row_detail.row.cells.get(&field.id) { if let Some(cell) = row.cells.get(&field.id) {
let field_type = FieldType::from(field.field_type); let field_type = FieldType::from(field.field_type);
let s = stringify_cell(cell, &field); let s = stringify_cell(cell, &field);
match &field_type { match &field_type {
@ -174,7 +174,7 @@ async fn history_database_import_test() {
} else { } else {
panic!( panic!(
"Can not found the cell with id: {} in {:?}", "Can not found the cell with id: {} in {:?}",
field.id, row_detail.row.cells field.id, row.cells
); );
} }
} }

View File

@ -117,14 +117,10 @@ impl DatabaseSortTest {
}, },
SortScript::AssertCellContentOrder { field_id, orders } => { SortScript::AssertCellContentOrder { field_id, orders } => {
let mut cells = vec![]; let mut cells = vec![];
let rows = self let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
.editor
.get_all_row_details(&self.view_id)
.await
.unwrap();
let field = self.editor.get_field(&field_id).await.unwrap(); let field = self.editor.get_field(&field_id).await.unwrap();
for row_detail in rows { for row in rows {
if let Some(cell) = row_detail.row.cells.get(&field_id) { if let Some(cell) = row.cells.get(&field_id) {
let content = stringify_cell(cell, &field); let content = stringify_cell(cell, &field);
cells.push(content); cells.push(content);
} else { } else {

View File

@ -67,10 +67,10 @@ async fn sort_change_notification_by_update_text_test() {
]; ];
test.run_scripts(scripts).await; test.run_scripts(scripts).await;
let row_details = test.get_rows().await; let row = test.get_rows().await;
let scripts = vec![ let scripts = vec![
UpdateTextCell { UpdateTextCell {
row_id: row_details[1].row.id.clone(), row_id: row[1].id.clone(),
text: "E".to_string(), text: "E".to_string(),
}, },
AssertSortChanged { AssertSortChanged {