mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
feat: async load database row, async filter, async sort (#6068)
* chore: display date when convert text to date * chore: filter & sort * chore: fix filter and sort * chore: fix test * chore: clippy * chore: fix test
This commit is contained in:
parent
242faee2f5
commit
62f0307289
@ -1,6 +1,6 @@
|
||||
import 'package:appflowy/plugins/database/application/row/row_service.dart';
|
||||
import 'package:appflowy/plugins/database/domain/row_listener.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-database2/row_entities.pb.dart';
|
||||
import 'package:appflowy_backend/protobuf/flowy-database2/protobuf.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import '../cell/cell_cache.dart';
|
||||
@ -39,6 +39,9 @@ class RowController {
|
||||
Future<void> initialize() async {
|
||||
await _rowBackendSvc.initRow(rowMeta.id);
|
||||
_rowListener.start(
|
||||
onRowFetched: (DidFetchRowPB row) {
|
||||
_rowCache.setRowMeta(row.meta);
|
||||
},
|
||||
onMetaChanged: (newRowMeta) {
|
||||
if (_isDisposed) {
|
||||
return;
|
||||
|
@ -1,3 +1,5 @@
|
||||
import 'dart:math';
|
||||
|
||||
import 'package:appflowy/generated/locale_keys.g.dart';
|
||||
import 'package:appflowy/plugins/database/application/row/row_service.dart';
|
||||
import 'package:appflowy/plugins/database/grid/presentation/widgets/calculations/calculations_row.dart';
|
||||
@ -305,22 +307,26 @@ class _GridRowsState extends State<_GridRows> {
|
||||
buildWhen: (previous, current) => previous.fields != current.fields,
|
||||
builder: (context, state) {
|
||||
return Flexible(
|
||||
child: _WrapScrollView(
|
||||
scrollController: widget.scrollController,
|
||||
contentWidth: GridLayout.headerWidth(state.fields),
|
||||
child: BlocConsumer<GridBloc, GridState>(
|
||||
listenWhen: (previous, current) =>
|
||||
previous.rowCount != current.rowCount,
|
||||
listener: (context, state) => _evaluateFloatingCalculations(),
|
||||
builder: (context, state) {
|
||||
return ScrollConfiguration(
|
||||
behavior: ScrollConfiguration.of(context).copyWith(
|
||||
scrollbars: false,
|
||||
),
|
||||
child: _renderList(context, state),
|
||||
);
|
||||
},
|
||||
),
|
||||
child: LayoutBuilder(
|
||||
builder: (BuildContext context, BoxConstraints layoutConstraits) {
|
||||
return _WrapScrollView(
|
||||
scrollController: widget.scrollController,
|
||||
contentWidth: GridLayout.headerWidth(state.fields),
|
||||
child: BlocConsumer<GridBloc, GridState>(
|
||||
listenWhen: (previous, current) =>
|
||||
previous.rowCount != current.rowCount,
|
||||
listener: (context, state) => _evaluateFloatingCalculations(),
|
||||
builder: (context, state) {
|
||||
return ScrollConfiguration(
|
||||
behavior: ScrollConfiguration.of(context).copyWith(
|
||||
scrollbars: false,
|
||||
),
|
||||
child: _renderList(context, state, layoutConstraits),
|
||||
);
|
||||
},
|
||||
),
|
||||
);
|
||||
},
|
||||
),
|
||||
);
|
||||
},
|
||||
@ -330,19 +336,19 @@ class _GridRowsState extends State<_GridRows> {
|
||||
Widget _renderList(
|
||||
BuildContext context,
|
||||
GridState state,
|
||||
BoxConstraints layoutConstraints,
|
||||
) {
|
||||
// 1. GridRowBottomBar
|
||||
// 2. GridCalculationsRow
|
||||
// 3. Footer Padding
|
||||
final itemCount = state.rowInfos.length + 3;
|
||||
|
||||
return Stack(
|
||||
children: [
|
||||
Positioned.fill(
|
||||
child: ReorderableListView.builder(
|
||||
/// This is a workaround related to
|
||||
/// https://github.com/flutter/flutter/issues/25652
|
||||
cacheExtent: 600,
|
||||
cacheExtent: max(layoutConstraints.maxHeight * 2, 500),
|
||||
scrollController: widget.scrollController.verticalController,
|
||||
physics: const ClampingScrollPhysics(),
|
||||
buildDefaultDragHandles: false,
|
||||
|
@ -57,7 +57,7 @@ class _DatabaseViewSettingContent extends StatelessWidget {
|
||||
builder: (context, state) {
|
||||
return Padding(
|
||||
padding: EdgeInsets.symmetric(
|
||||
horizontal: GridSize.horizontalHeaderPadding + 40,
|
||||
horizontal: GridSize.horizontalHeaderPadding,
|
||||
),
|
||||
child: DecoratedBox(
|
||||
decoration: BoxDecoration(
|
||||
|
14
frontend/appflowy_tauri/src-tauri/Cargo.lock
generated
14
frontend/appflowy_tauri/src-tauri/Cargo.lock
generated
@ -964,7 +964,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -989,7 +989,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-database"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -1018,7 +1018,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-document"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -1038,7 +1038,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-entity"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -1057,7 +1057,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-folder"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -1100,7 +1100,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-plugins"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
@ -1180,7 +1180,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-user"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collab",
|
||||
|
@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
|
||||
# To switch to the local path, run:
|
||||
# scripts/tool/update_collab_source.sh
|
||||
# ⚠️⚠️⚠️️
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
|
||||
# Working directory: frontend
|
||||
# To update the commit ID, run:
|
||||
|
14
frontend/appflowy_web_app/src-tauri/Cargo.lock
generated
14
frontend/appflowy_web_app/src-tauri/Cargo.lock
generated
@ -947,7 +947,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -972,7 +972,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-database"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -1001,7 +1001,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-document"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -1021,7 +1021,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-entity"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -1040,7 +1040,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-folder"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -1083,7 +1083,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-plugins"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
@ -1163,7 +1163,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-user"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collab",
|
||||
|
@ -116,13 +116,13 @@ custom-protocol = ["tauri/custom-protocol"]
|
||||
# To switch to the local path, run:
|
||||
# scripts/tool/update_collab_source.sh
|
||||
# ⚠️⚠️⚠️️
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
|
||||
# Working directory: frontend
|
||||
# To update the commit ID, run:
|
||||
|
14
frontend/rust-lib/Cargo.lock
generated
14
frontend/rust-lib/Cargo.lock
generated
@ -825,7 +825,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -850,7 +850,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-database"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@ -879,7 +879,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-document"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -899,7 +899,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-entity"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -918,7 +918,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-folder"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
@ -961,7 +961,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-plugins"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
@ -1041,7 +1041,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "collab-user"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d#f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d"
|
||||
source = "git+https://github.com/AppFlowy-IO/AppFlowy-Collab?rev=0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d#0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collab",
|
||||
|
@ -136,13 +136,13 @@ rocksdb = { git = "https://github.com/rust-rocksdb/rust-rocksdb", rev = "1710120
|
||||
# To switch to the local path, run:
|
||||
# scripts/tool/update_collab_source.sh
|
||||
# ⚠️⚠️⚠️️
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "f148b6ba98a12270c2faffaeaf2ee41b2cd84e7d" }
|
||||
collab = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-entity = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-folder = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-document = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-database = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-plugins = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
collab-user = { version = "0.2", git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "0b9abf42a4888b3b1789cfa68d2d1dbe9cb7e10d" }
|
||||
|
||||
# Working directory: frontend
|
||||
# To update the commit ID, run:
|
||||
|
@ -107,7 +107,7 @@ impl AppFlowyCore {
|
||||
let store_preference = Arc::new(KVStorePreferences::new(&config.storage_path).unwrap());
|
||||
info!("🔥{:?}", &config);
|
||||
|
||||
let task_scheduler = TaskDispatcher::new(Duration::from_secs(2));
|
||||
let task_scheduler = TaskDispatcher::new(Duration::from_secs(10));
|
||||
let task_dispatcher = Arc::new(RwLock::new(task_scheduler));
|
||||
runtime.spawn(TaskRunner::run(task_dispatcher.clone()));
|
||||
|
||||
|
@ -86,6 +86,18 @@ impl From<RowOrder> for RowMetaPB {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Row> for RowMetaPB {
|
||||
fn from(data: Row) -> Self {
|
||||
Self {
|
||||
id: data.id.into_inner(),
|
||||
document_id: None,
|
||||
icon: None,
|
||||
cover: None,
|
||||
is_document_empty: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<RowDetail> for RowMetaPB {
|
||||
fn from(row_detail: RowDetail) -> Self {
|
||||
Self {
|
||||
|
@ -36,7 +36,9 @@ pub(crate) async fn get_database_data_handler(
|
||||
.get_database_id_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
let database_editor = manager.get_database_editor(&database_id).await?;
|
||||
let data = database_editor.open_database(view_id.as_ref()).await?;
|
||||
let data = database_editor
|
||||
.async_open_database_view(view_id.as_ref())
|
||||
.await?;
|
||||
trace!(
|
||||
"layout: {:?}, rows: {}, fields: {}",
|
||||
data.layout_type,
|
||||
@ -57,9 +59,7 @@ pub(crate) async fn get_all_rows_handler(
|
||||
.get_database_id_with_view_id(view_id.as_ref())
|
||||
.await?;
|
||||
let database_editor = manager.get_database_editor(&database_id).await?;
|
||||
let row_details = database_editor
|
||||
.get_all_row_details(view_id.as_ref())
|
||||
.await?;
|
||||
let row_details = database_editor.get_all_rows(view_id.as_ref()).await?;
|
||||
let rows = row_details
|
||||
.into_iter()
|
||||
.map(|detail| RowMetaPB::from(detail.as_ref().clone()))
|
||||
|
@ -20,12 +20,15 @@ use crate::services::share::csv::{CSVExport, CSVFormat};
|
||||
use crate::services::sort::Sort;
|
||||
use crate::utils::cache::AnyTypeCache;
|
||||
use crate::DatabaseUser;
|
||||
use arc_swap::ArcSwap;
|
||||
use async_trait::async_trait;
|
||||
use collab_database::database::Database;
|
||||
use collab_database::entity::DatabaseView;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{Cell, Cells, Row, RowCell, RowDetail, RowId};
|
||||
use collab_database::views::{DatabaseLayout, FilterMap, LayoutSetting, OrderObjectPosition};
|
||||
use collab_database::views::{
|
||||
DatabaseLayout, FilterMap, LayoutSetting, OrderObjectPosition, RowOrder,
|
||||
};
|
||||
use collab_entity::CollabType;
|
||||
use collab_integrate::collab_builder::{AppFlowyCollabBuilder, CollabBuilderConfig};
|
||||
use flowy_error::{internal_error, ErrorCode, FlowyError, FlowyResult};
|
||||
@ -35,11 +38,15 @@ use lib_infra::priority_task::TaskDispatcher;
|
||||
use lib_infra::util::timestamp;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{broadcast, RwLock};
|
||||
use std::time::Duration;
|
||||
use tokio::sync::{broadcast, oneshot, RwLock};
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::{debug, error, event, info, instrument, trace, warn};
|
||||
|
||||
type OpenDatabaseResult = oneshot::Sender<FlowyResult<DatabasePB>>;
|
||||
|
||||
pub struct DatabaseEditor {
|
||||
database_id: String,
|
||||
pub(crate) database: Arc<RwLock<Database>>,
|
||||
pub cell_cache: CellCache,
|
||||
pub(crate) database_views: Arc<DatabaseViews>,
|
||||
@ -48,6 +55,8 @@ pub struct DatabaseEditor {
|
||||
notification_sender: Arc<DebounceNotificationSender>,
|
||||
user: Arc<dyn DatabaseUser>,
|
||||
collab_builder: Arc<AppFlowyCollabBuilder>,
|
||||
is_opening: ArcSwap<bool>,
|
||||
opening_ret_txs: Arc<RwLock<Vec<OpenDatabaseResult>>>,
|
||||
database_cancellation: Arc<RwLock<Option<CancellationToken>>>,
|
||||
}
|
||||
|
||||
@ -101,12 +110,15 @@ impl DatabaseEditor {
|
||||
database.clone(),
|
||||
)?;
|
||||
let this = Arc::new(Self {
|
||||
database_id: database_id.clone(),
|
||||
user,
|
||||
database,
|
||||
cell_cache,
|
||||
database_views,
|
||||
notification_sender,
|
||||
collab_builder,
|
||||
is_opening: Default::default(),
|
||||
opening_ret_txs: Arc::new(Default::default()),
|
||||
database_cancellation,
|
||||
});
|
||||
observe_block_event(&database_id, &this).await;
|
||||
@ -509,7 +521,7 @@ impl DatabaseEditor {
|
||||
}
|
||||
|
||||
pub async fn duplicate_row(&self, view_id: &str, row_id: &RowId) -> FlowyResult<()> {
|
||||
let (row_detail, index) = {
|
||||
let (row, index) = {
|
||||
let mut database = self.database.write().await;
|
||||
|
||||
let params = database
|
||||
@ -524,22 +536,12 @@ impl DatabaseEditor {
|
||||
index,
|
||||
row_order
|
||||
);
|
||||
let row_detail = database.get_row_detail(&row_order.id).await;
|
||||
(row_detail, index)
|
||||
let row = database.get_row(&row_order.id).await;
|
||||
(row, index)
|
||||
};
|
||||
|
||||
match row_detail {
|
||||
None => {
|
||||
error!(
|
||||
"Failed to duplicate row: {:?}. Row is not exist before duplicating",
|
||||
row_id
|
||||
);
|
||||
},
|
||||
Some(row_detail) => {
|
||||
for view in self.database_views.editors().await {
|
||||
view.v_did_create_row(&row_detail, index).await;
|
||||
}
|
||||
},
|
||||
for view in self.database_views.editors().await {
|
||||
view.v_did_create_row(&row, index).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@ -596,7 +598,7 @@ impl DatabaseEditor {
|
||||
if let Some(row_detail) = row_detail {
|
||||
trace!("created row: {:?} at {}", row_detail, index);
|
||||
for view in self.database_views.editors().await {
|
||||
view.v_did_create_row(&row_detail, index).await;
|
||||
view.v_did_create_row(&row_detail.row, index).await;
|
||||
}
|
||||
return Ok(Some(row_detail));
|
||||
}
|
||||
@ -677,9 +679,14 @@ impl DatabaseEditor {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_all_row_details(&self, view_id: &str) -> FlowyResult<Vec<Arc<RowDetail>>> {
|
||||
pub async fn get_all_rows(&self, view_id: &str) -> FlowyResult<Vec<Arc<Row>>> {
|
||||
let view_editor = self.database_views.get_view_editor(view_id).await?;
|
||||
Ok(view_editor.v_get_all_row_details().await)
|
||||
Ok(view_editor.v_get_all_rows().await)
|
||||
}
|
||||
|
||||
pub async fn get_all_row_orders(&self, view_id: &str) -> FlowyResult<Vec<RowOrder>> {
|
||||
let orders = self.database.read().await.get_row_orders_for_view(view_id);
|
||||
Ok(orders)
|
||||
}
|
||||
|
||||
pub async fn get_row(&self, view_id: &str, row_id: &RowId) -> Option<Row> {
|
||||
@ -900,7 +907,7 @@ impl DatabaseEditor {
|
||||
new_cell: Cell,
|
||||
) -> FlowyResult<()> {
|
||||
// Get the old row before updating the cell. It would be better to get the old cell
|
||||
let old_row = self.get_row_detail(view_id, row_id).await;
|
||||
let old_row = self.get_row(view_id, row_id).await;
|
||||
self
|
||||
.database
|
||||
.write()
|
||||
@ -923,7 +930,7 @@ impl DatabaseEditor {
|
||||
|
||||
pub async fn clear_cell(&self, view_id: &str, row_id: RowId, field_id: &str) -> FlowyResult<()> {
|
||||
// Get the old row before updating the cell. It would be better to get the old cell
|
||||
let old_row = self.get_row_detail(view_id, &row_id).await;
|
||||
let old_row = self.get_row(view_id, &row_id).await;
|
||||
|
||||
self
|
||||
.database
|
||||
@ -948,13 +955,13 @@ impl DatabaseEditor {
|
||||
view_id: &str,
|
||||
row_id: &RowId,
|
||||
field_id: &str,
|
||||
old_row: Option<RowDetail>,
|
||||
old_row: Option<Row>,
|
||||
) {
|
||||
let option_row = self.get_row_detail(view_id, row_id).await;
|
||||
if let Some(new_row_detail) = option_row {
|
||||
let option_row = self.get_row(view_id, row_id).await;
|
||||
if let Some(row) = option_row {
|
||||
for view in self.database_views.editors().await {
|
||||
view
|
||||
.v_did_update_row(&old_row, &new_row_detail, Some(field_id.to_owned()))
|
||||
.v_did_update_row(&old_row, &row, Some(field_id.to_owned()))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
@ -1153,16 +1160,19 @@ impl DatabaseEditor {
|
||||
let view = self.database_views.get_view_editor(view_id).await?;
|
||||
let mut row_changeset = RowChangeset::new(row_detail.row.id.clone());
|
||||
view
|
||||
.v_move_group_row(&row_detail, &mut row_changeset, to_group, to_row.clone())
|
||||
.v_move_group_row(
|
||||
&row_detail.row,
|
||||
&mut row_changeset,
|
||||
to_group,
|
||||
to_row.clone(),
|
||||
)
|
||||
.await;
|
||||
|
||||
let to_row = if to_row.is_some() {
|
||||
to_row
|
||||
} else {
|
||||
let row_details = self.get_all_row_details(view_id).await?;
|
||||
row_details
|
||||
.last()
|
||||
.map(|row_detail| row_detail.row.id.clone())
|
||||
let row_details = self.get_all_rows(view_id).await?;
|
||||
row_details.last().map(|row| row.id.clone())
|
||||
};
|
||||
if let Some(row_id) = to_row.clone() {
|
||||
self.move_row(view_id, from_row.clone(), row_id).await?;
|
||||
@ -1283,11 +1293,15 @@ impl DatabaseEditor {
|
||||
.read()
|
||||
.await
|
||||
.get_view(view_id)
|
||||
.ok_or_else(|| FlowyError::record_not_found().with_context("Can't find the database view"))?;
|
||||
.ok_or_else(|| {
|
||||
FlowyError::record_not_found()
|
||||
.with_context(format!("Can't find the database view:{}", view_id))
|
||||
})?;
|
||||
Ok(database_view_setting_pb_from_view(view))
|
||||
}
|
||||
|
||||
pub async fn close_database(&self) {
|
||||
info!("Close database: {}", self.database_id);
|
||||
let cancellation = self.database_cancellation.read().await;
|
||||
if let Some(cancellation) = &*cancellation {
|
||||
info!("Cancel database operation");
|
||||
@ -1295,59 +1309,152 @@ impl DatabaseEditor {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn open_database(&self, view_id: &str) -> FlowyResult<DatabasePB> {
|
||||
let view_layout = self.database.read().await.get_database_view_layout(view_id);
|
||||
let new_token = CancellationToken::new();
|
||||
|
||||
if let Some(old_token) = self
|
||||
.database_cancellation
|
||||
.write()
|
||||
.await
|
||||
.replace(new_token.clone())
|
||||
{
|
||||
old_token.cancel();
|
||||
}
|
||||
|
||||
let row_details = self
|
||||
.database_views
|
||||
.get_view_editor(view_id)
|
||||
// Only used in test
|
||||
#[cfg(debug_assertions)]
|
||||
pub async fn open_database_view(&self, view_id: &str) -> FlowyResult<DatabasePB> {
|
||||
let rows = self
|
||||
.get_all_rows(view_id)
|
||||
.await?
|
||||
.v_get_all_row_details()
|
||||
.await;
|
||||
|
||||
let (database_id, fields, is_linked) = {
|
||||
let database = self.database.read().await;
|
||||
let database_id = database.get_database_id();
|
||||
let fields = database
|
||||
.get_all_field_orders()
|
||||
.into_iter()
|
||||
.map(FieldIdPB::from)
|
||||
.collect::<Vec<_>>();
|
||||
let is_linked = database.is_inline_view(view_id);
|
||||
(database_id, fields, is_linked)
|
||||
};
|
||||
|
||||
let rows = row_details
|
||||
.into_iter()
|
||||
.map(|order| RowMetaPB::from(order.as_ref().clone()))
|
||||
.collect::<Vec<RowMetaPB>>();
|
||||
|
||||
trace!(
|
||||
"database: {}, num fields: {}, num row: {}",
|
||||
database_id,
|
||||
fields.len(),
|
||||
rows.len()
|
||||
);
|
||||
self.database_cancellation.write().await.take();
|
||||
let view_layout = self.database.read().await.get_database_view_layout(view_id);
|
||||
let fields = self
|
||||
.database
|
||||
.read()
|
||||
.await
|
||||
.get_all_field_orders()
|
||||
.into_iter()
|
||||
.map(FieldIdPB::from)
|
||||
.collect::<Vec<_>>();
|
||||
Ok(DatabasePB {
|
||||
id: database_id,
|
||||
id: self.database_id.clone(),
|
||||
fields,
|
||||
rows,
|
||||
layout_type: view_layout.into(),
|
||||
is_linked,
|
||||
is_linked: self.database.read().await.is_inline_view(view_id),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn async_open_database_view(&self, view_id: &str) -> FlowyResult<DatabasePB> {
|
||||
info!("Open database: {}, view: {}", self.database_id, view_id);
|
||||
let (tx, rx) = oneshot::channel();
|
||||
self.opening_ret_txs.write().await.push(tx);
|
||||
// Check if the database is currently being opened
|
||||
if !*self.is_opening.load_full() {
|
||||
self.is_opening.store(Arc::new(true));
|
||||
|
||||
let fut = async {
|
||||
let view_layout = self.database.read().await.get_database_view_layout(view_id);
|
||||
let new_token = CancellationToken::new();
|
||||
if let Some(old_token) = self
|
||||
.database_cancellation
|
||||
.write()
|
||||
.await
|
||||
.replace(new_token.clone())
|
||||
{
|
||||
old_token.cancel();
|
||||
}
|
||||
|
||||
let row_orders = self.database.read().await.get_row_orders_for_view(view_id);
|
||||
let cloned_database = Arc::downgrade(&self.database);
|
||||
let cloned_row_orders = row_orders.clone();
|
||||
let opening_database_views = self.database_views.clone();
|
||||
tokio::spawn(async move {
|
||||
const CHUNK_SIZE: usize = 10;
|
||||
let mut loaded_rows = vec![];
|
||||
for chunk_row_orders in cloned_row_orders.chunks(CHUNK_SIZE) {
|
||||
match cloned_database.upgrade() {
|
||||
None => break,
|
||||
Some(database) => {
|
||||
for row_order in chunk_row_orders {
|
||||
if let Some(database_row) =
|
||||
database.read().await.init_database_row(&row_order.id).await
|
||||
{
|
||||
if let Some(row) = database_row.read().await.get_row() {
|
||||
loaded_rows.push(Arc::new(row));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// stop init database rows
|
||||
if new_token.is_cancelled() {
|
||||
return;
|
||||
}
|
||||
|
||||
if loaded_rows.len() % 100 == 0 {
|
||||
for database_view in opening_database_views.editors().await {
|
||||
let mut view_rows = loaded_rows.clone();
|
||||
database_view.v_filter_rows_and_notify(&mut view_rows).await;
|
||||
database_view.v_sort_rows_and_notify(&mut view_rows).await;
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
|
||||
for database_view in opening_database_views.editors().await {
|
||||
let mut view_rows = loaded_rows.clone();
|
||||
database_view.v_filter_rows_and_notify(&mut view_rows).await;
|
||||
database_view.v_sort_rows_and_notify(&mut view_rows).await;
|
||||
}
|
||||
});
|
||||
|
||||
// Collect database details in a single block holding the `read` lock
|
||||
let (database_id, fields, is_linked) = {
|
||||
let database = self.database.read().await;
|
||||
(
|
||||
database.get_database_id(),
|
||||
database
|
||||
.get_all_field_orders()
|
||||
.into_iter()
|
||||
.map(FieldIdPB::from)
|
||||
.collect::<Vec<_>>(),
|
||||
database.is_inline_view(view_id),
|
||||
)
|
||||
};
|
||||
|
||||
let rows = row_orders
|
||||
.into_iter()
|
||||
.map(RowMetaPB::from)
|
||||
.collect::<Vec<RowMetaPB>>();
|
||||
|
||||
trace!(
|
||||
"database: {}, num fields: {}, num rows: {}",
|
||||
database_id,
|
||||
fields.len(),
|
||||
rows.len()
|
||||
);
|
||||
Ok::<_, FlowyError>(DatabasePB {
|
||||
id: database_id,
|
||||
fields,
|
||||
rows,
|
||||
layout_type: view_layout.into(),
|
||||
is_linked,
|
||||
})
|
||||
};
|
||||
|
||||
let result = fut.await;
|
||||
// Mark that the opening process is complete
|
||||
self.is_opening.store(Arc::new(false));
|
||||
// Clear cancellation token
|
||||
self.database_cancellation.write().await.take();
|
||||
|
||||
// Collect all waiting tasks and send the result
|
||||
let txs = std::mem::take(&mut *self.opening_ret_txs.write().await);
|
||||
for tx in txs {
|
||||
let _ = tx.send(result.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for the result or timeout after 60 seconds
|
||||
match tokio::time::timeout(Duration::from_secs(60), rx).await {
|
||||
Ok(result) => result.map_err(internal_error)?,
|
||||
Err(_) => Err(FlowyError::internal().with_context("Timeout while opening database view")),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn export_csv(&self, style: CSVFormat) -> FlowyResult<String> {
|
||||
let database = self.database.clone();
|
||||
let database_guard = database.read().await;
|
||||
@ -1562,11 +1669,11 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl {
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_all_row_details(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
|
||||
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
|
||||
let view_id = view_id.to_string();
|
||||
let row_orders = self.database.read().await.get_row_orders_for_view(&view_id);
|
||||
trace!("{} has total row orders: {}", view_id, row_orders.len());
|
||||
let mut row_details_list = vec![];
|
||||
let mut all_rows = vec![];
|
||||
// Loading the rows in chunks of 10 rows in order to prevent blocking the main asynchronous runtime
|
||||
const CHUNK_SIZE: usize = 10;
|
||||
let cancellation = self
|
||||
@ -1579,25 +1686,18 @@ impl DatabaseViewOperation for DatabaseViewOperationImpl {
|
||||
let database_read_guard = self.database.read().await;
|
||||
let chunk = chunk.to_vec();
|
||||
let rows = database_read_guard.get_rows_from_row_orders(&chunk).await;
|
||||
for row in rows {
|
||||
if let Some(cancellation) = &cancellation {
|
||||
if cancellation.is_cancelled() {
|
||||
info!("Get all database row is cancelled:{}", view_id);
|
||||
return vec![];
|
||||
}
|
||||
}
|
||||
match database_read_guard.get_row_detail(&row.id).await {
|
||||
None => warn!("Failed to get row detail for row: {}", row.id.as_str()),
|
||||
Some(row_details) => {
|
||||
row_details_list.push(row_details);
|
||||
},
|
||||
if let Some(cancellation) = &cancellation {
|
||||
if cancellation.is_cancelled() {
|
||||
info!("Get all database row is cancelled:{}", view_id);
|
||||
return vec![];
|
||||
}
|
||||
}
|
||||
all_rows.extend(rows);
|
||||
drop(database_read_guard);
|
||||
tokio::task::yield_now().await;
|
||||
}
|
||||
trace!("total row details: {}", row_details_list.len());
|
||||
row_details_list.into_iter().map(Arc::new).collect()
|
||||
trace!("total row details: {}", all_rows.len());
|
||||
all_rows.into_iter().map(Arc::new).collect()
|
||||
}
|
||||
|
||||
async fn remove_row(&self, row_id: &RowId) -> Option<Row> {
|
||||
|
@ -159,25 +159,12 @@ pub(crate) async fn observe_block_event(database_id: &str, database_editor: &Arc
|
||||
BlockEvent::DidFetchRow(row_details) => {
|
||||
for row_detail in row_details {
|
||||
trace!("Did fetch row: {:?}", row_detail.row.id);
|
||||
|
||||
let row_id = row_detail.row.id.clone();
|
||||
let pb = DidFetchRowPB::from(row_detail);
|
||||
send_notification(&row_id, DatabaseNotification::DidFetchRow)
|
||||
.payload(pb)
|
||||
.send();
|
||||
}
|
||||
|
||||
// let cloned_token = token.clone();
|
||||
// tokio::spawn(async move {
|
||||
// tokio::time::sleep(Duration::from_secs(2)).await;
|
||||
// if cloned_token.is_cancelled() {
|
||||
// }
|
||||
// // if let Some(database_editor) = cloned_database_editor.upgrade() {
|
||||
// // TODO(nathan): calculate inserted row with RowsVisibilityChangePB
|
||||
// // for view_editor in database_editor.database_views.editors().await {
|
||||
// // }
|
||||
// // }
|
||||
// });
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,3 @@
|
||||
use collab_database::entity::DatabaseView;
|
||||
use collab_database::views::DatabaseLayout;
|
||||
|
||||
use crate::entities::{
|
||||
DatabaseLayoutPB, DatabaseLayoutSettingPB, DatabaseViewSettingPB, FieldSettingsPB, FilterPB,
|
||||
GroupSettingPB, SortPB,
|
||||
@ -9,6 +6,9 @@ use crate::services::field_settings::FieldSettings;
|
||||
use crate::services::filter::Filter;
|
||||
use crate::services::group::GroupSetting;
|
||||
use crate::services::sort::Sort;
|
||||
use collab_database::entity::DatabaseView;
|
||||
use collab_database::views::DatabaseLayout;
|
||||
use tracing::error;
|
||||
|
||||
pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> DatabaseViewSettingPB {
|
||||
let layout_type: DatabaseLayoutPB = view.layout.into();
|
||||
@ -33,7 +33,10 @@ pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> Database
|
||||
.into_iter()
|
||||
.flat_map(|value| match Filter::try_from(value) {
|
||||
Ok(filter) => Some(FilterPB::from(&filter)),
|
||||
Err(_) => None,
|
||||
Err(err) => {
|
||||
error!("Error converting filter: {:?}", err);
|
||||
None
|
||||
},
|
||||
})
|
||||
.collect::<Vec<FilterPB>>();
|
||||
|
||||
@ -42,7 +45,10 @@ pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> Database
|
||||
.into_iter()
|
||||
.flat_map(|value| match GroupSetting::try_from(value) {
|
||||
Ok(setting) => Some(GroupSettingPB::from(&setting)),
|
||||
Err(_) => None,
|
||||
Err(err) => {
|
||||
error!("Error converting group setting: {:?}", err);
|
||||
None
|
||||
},
|
||||
})
|
||||
.collect::<Vec<GroupSettingPB>>();
|
||||
|
||||
@ -51,7 +57,10 @@ pub(crate) fn database_view_setting_pb_from_view(view: DatabaseView) -> Database
|
||||
.into_iter()
|
||||
.flat_map(|value| match Sort::try_from(value) {
|
||||
Ok(sort) => Some(SortPB::from(&sort)),
|
||||
Err(_) => None,
|
||||
Err(err) => {
|
||||
error!("Error converting sort: {:?}", err);
|
||||
None
|
||||
},
|
||||
})
|
||||
.collect::<Vec<SortPB>>();
|
||||
|
||||
|
@ -180,14 +180,14 @@ impl DatabaseViewEditor {
|
||||
.send();
|
||||
}
|
||||
|
||||
pub async fn v_did_create_row(&self, row_detail: &RowDetail, index: usize) {
|
||||
pub async fn v_did_create_row(&self, row: &Row, index: usize) {
|
||||
// Send the group notification if the current view has groups
|
||||
if let Some(controller) = self.group_controller.write().await.as_mut() {
|
||||
let mut row_details = vec![Arc::new(row_detail.clone())];
|
||||
self.v_filter_rows(&mut row_details).await;
|
||||
let mut rows = vec![Arc::new(row.clone())];
|
||||
self.v_filter_rows(&mut rows).await;
|
||||
|
||||
if let Some(row_detail) = row_details.pop() {
|
||||
let changesets = controller.did_create_row(&row_detail, index);
|
||||
if let Some(row) = rows.pop() {
|
||||
let changesets = controller.did_create_row(&row, index);
|
||||
|
||||
for changeset in changesets {
|
||||
notify_did_update_group_rows(changeset).await;
|
||||
@ -195,9 +195,7 @@ impl DatabaseViewEditor {
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
.gen_did_create_row_view_tasks(index, row_detail.clone())
|
||||
.await;
|
||||
self.gen_did_create_row_view_tasks(index, row.clone()).await;
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all)]
|
||||
@ -244,12 +242,7 @@ impl DatabaseViewEditor {
|
||||
/// Notify the view that the row has been updated. If the view has groups,
|
||||
/// send the group notification with [GroupRowsNotificationPB]. Otherwise,
|
||||
/// send the view notification with [RowsChangePB]
|
||||
pub async fn v_did_update_row(
|
||||
&self,
|
||||
old_row: &Option<RowDetail>,
|
||||
row_detail: &RowDetail,
|
||||
field_id: Option<String>,
|
||||
) {
|
||||
pub async fn v_did_update_row(&self, old_row: &Option<Row>, row: &Row, field_id: Option<String>) {
|
||||
if let Some(controller) = self.group_controller.write().await.as_mut() {
|
||||
let field = self
|
||||
.delegate
|
||||
@ -257,11 +250,11 @@ impl DatabaseViewEditor {
|
||||
.await;
|
||||
|
||||
if let Some(field) = field {
|
||||
let mut row_details = vec![Arc::new(row_detail.clone())];
|
||||
self.v_filter_rows(&mut row_details).await;
|
||||
let mut rows = vec![Arc::new(row.clone())];
|
||||
self.v_filter_rows(&mut rows).await;
|
||||
|
||||
if let Some(row_detail) = row_details.pop() {
|
||||
let result = controller.did_update_group_row(old_row, &row_detail, &field);
|
||||
if let Some(row) = rows.pop() {
|
||||
let result = controller.did_update_group_row(old_row, &row, &field);
|
||||
|
||||
if let Ok(result) = result {
|
||||
let mut group_changes = GroupChangesPB {
|
||||
@ -295,26 +288,34 @@ impl DatabaseViewEditor {
|
||||
// Each row update will trigger a calculations, filter and sort operation. We don't want
|
||||
// to block the main thread, so we spawn a new task to do the work.
|
||||
self
|
||||
.gen_did_update_row_view_tasks(row_detail.row.id.clone(), field_id)
|
||||
.gen_did_update_row_view_tasks(row.id.clone(), field_id)
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn v_filter_rows(&self, row_details: &mut Vec<Arc<RowDetail>>) {
|
||||
self.filter_controller.filter_rows(row_details).await
|
||||
pub async fn v_filter_rows(&self, rows: &mut Vec<Arc<Row>>) {
|
||||
self.filter_controller.filter_rows(rows).await
|
||||
}
|
||||
|
||||
pub async fn v_sort_rows(&self, row_details: &mut Vec<Arc<RowDetail>>) {
|
||||
pub async fn v_filter_rows_and_notify(&self, rows: &mut Vec<Arc<Row>>) {
|
||||
let _ = self.filter_controller.filter_rows_and_notify(rows).await;
|
||||
}
|
||||
|
||||
pub async fn v_sort_rows(&self, rows: &mut Vec<Arc<Row>>) {
|
||||
self.sort_controller.write().await.sort_rows(rows).await
|
||||
}
|
||||
|
||||
pub async fn v_sort_rows_and_notify(&self, rows: &mut Vec<Arc<Row>>) {
|
||||
self
|
||||
.sort_controller
|
||||
.write()
|
||||
.await
|
||||
.sort_rows(row_details)
|
||||
.await
|
||||
.sort_rows_and_notify(rows)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[instrument(level = "info", skip(self))]
|
||||
pub async fn v_get_all_row_details(&self) -> Vec<Arc<RowDetail>> {
|
||||
let mut rows = self.delegate.get_all_row_details(&self.view_id).await;
|
||||
pub async fn v_get_all_rows(&self) -> Vec<Arc<Row>> {
|
||||
let mut rows = self.delegate.get_all_rows(&self.view_id).await;
|
||||
self.v_filter_rows(&mut rows).await;
|
||||
self.v_sort_rows(&mut rows).await;
|
||||
rows
|
||||
@ -322,7 +323,7 @@ impl DatabaseViewEditor {
|
||||
|
||||
pub async fn v_move_group_row(
|
||||
&self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
row_changeset: &mut RowChangeset,
|
||||
to_group_id: &str,
|
||||
to_row_id: Option<RowId>,
|
||||
@ -330,7 +331,7 @@ impl DatabaseViewEditor {
|
||||
let result = self
|
||||
.mut_group_controller(|group_controller, field| {
|
||||
let move_row_context = MoveGroupRowContext {
|
||||
row_detail,
|
||||
row,
|
||||
row_changeset,
|
||||
field: &field,
|
||||
to_group_id,
|
||||
@ -1126,7 +1127,7 @@ impl DatabaseViewEditor {
|
||||
});
|
||||
}
|
||||
|
||||
async fn gen_did_create_row_view_tasks(&self, preliminary_index: usize, row_detail: RowDetail) {
|
||||
async fn gen_did_create_row_view_tasks(&self, preliminary_index: usize, row: Row) {
|
||||
let weak_sort_controller = Arc::downgrade(&self.sort_controller);
|
||||
let weak_calculations_controller = Arc::downgrade(&self.calculations_controller);
|
||||
af_spawn(async move {
|
||||
@ -1134,13 +1135,13 @@ impl DatabaseViewEditor {
|
||||
sort_controller
|
||||
.read()
|
||||
.await
|
||||
.did_create_row(preliminary_index, &row_detail)
|
||||
.did_create_row(preliminary_index, &row)
|
||||
.await;
|
||||
}
|
||||
|
||||
if let Some(calculations_controller) = weak_calculations_controller.upgrade() {
|
||||
calculations_controller
|
||||
.did_receive_row_changed(row_detail.row.clone())
|
||||
.did_receive_row_changed(row.clone())
|
||||
.await;
|
||||
}
|
||||
});
|
||||
|
@ -2,7 +2,7 @@ use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{RowDetail, RowId};
|
||||
use collab_database::rows::{Row, RowDetail, RowId};
|
||||
|
||||
use crate::services::cell::CellCache;
|
||||
use crate::services::database_view::{
|
||||
@ -52,8 +52,8 @@ impl FilterDelegate for DatabaseViewFilterDelegateImpl {
|
||||
self.0.get_fields(view_id, field_ids).await
|
||||
}
|
||||
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
|
||||
self.0.get_all_row_details(view_id).await
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
|
||||
self.0.get_all_rows(view_id).await
|
||||
}
|
||||
|
||||
async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)> {
|
||||
|
@ -2,7 +2,7 @@ use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{RowDetail, RowId};
|
||||
use collab_database::rows::{Row, RowId};
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
|
||||
@ -96,10 +96,10 @@ impl GroupControllerDelegate for GroupControllerDelegateImpl {
|
||||
self.delegate.get_field(field_id).await
|
||||
}
|
||||
|
||||
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
|
||||
let mut row_details = self.delegate.get_all_row_details(view_id).await;
|
||||
self.filter_controller.filter_rows(&mut row_details).await;
|
||||
row_details
|
||||
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
|
||||
let mut rows = self.delegate.get_all_rows(view_id).await;
|
||||
self.filter_controller.filter_rows(&mut rows).await;
|
||||
rows
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ pub trait DatabaseViewOperation: Send + Sync + 'static {
|
||||
async fn get_row_detail(&self, view_id: &str, row_id: &RowId) -> Option<(usize, Arc<RowDetail>)>;
|
||||
|
||||
/// Returns all the rows in the view
|
||||
async fn get_all_row_details(&self, view_id: &str) -> Vec<Arc<RowDetail>>;
|
||||
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
|
||||
|
||||
async fn remove_row(&self, row_id: &RowId) -> Option<Row>;
|
||||
|
||||
|
@ -2,7 +2,7 @@ use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::RowDetail;
|
||||
use collab_database::rows::Row;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::services::cell::CellCache;
|
||||
@ -59,18 +59,17 @@ impl SortDelegate for DatabaseViewSortDelegateImpl {
|
||||
self.delegate.get_sort(view_id, sort_id).await.map(Arc::new)
|
||||
}
|
||||
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>> {
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>> {
|
||||
let view_id = view_id.to_string();
|
||||
let mut row_details = self.delegate.get_all_row_details(&view_id).await;
|
||||
self.filter_controller.filter_rows(&mut row_details).await;
|
||||
row_details
|
||||
let mut rows = self.delegate.get_all_rows(&view_id).await;
|
||||
self.filter_controller.filter_rows(&mut rows).await;
|
||||
rows
|
||||
}
|
||||
|
||||
async fn filter_row(&self, row_detail: &RowDetail) -> bool {
|
||||
let row_detail = row_detail.clone();
|
||||
let mut row_details = vec![Arc::new(row_detail)];
|
||||
self.filter_controller.filter_rows(&mut row_details).await;
|
||||
!row_details.is_empty()
|
||||
async fn filter_row(&self, row: &Row) -> bool {
|
||||
let mut rows = vec![Arc::new(row.clone())];
|
||||
self.filter_controller.filter_rows(&mut rows).await;
|
||||
!rows.is_empty()
|
||||
}
|
||||
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field> {
|
||||
|
@ -5,18 +5,19 @@ use chrono::{DateTime, FixedOffset, Local, NaiveDateTime, NaiveTime, Offset, Tim
|
||||
use chrono_tz::Tz;
|
||||
use collab::preclude::Any;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::fields::{TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::fields::{Field, TypeOptionData, TypeOptionDataBuilder};
|
||||
use collab_database::rows::Cell;
|
||||
use collab_database::template::date_parse::cast_string_to_timestamp;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
|
||||
|
||||
use crate::entities::{DateCellDataPB, DateFilterPB};
|
||||
use crate::entities::{DateCellDataPB, DateFilterPB, FieldType};
|
||||
use crate::services::cell::{CellDataChangeset, CellDataDecoder};
|
||||
use crate::services::field::{
|
||||
default_order, DateCellChangeset, DateCellData, DateFormat, TimeFormat, TypeOption,
|
||||
TypeOptionCellDataCompare, TypeOptionCellDataFilter, TypeOptionCellDataSerde,
|
||||
TypeOptionTransform,
|
||||
TypeOptionTransform, CELL_DATA,
|
||||
};
|
||||
use crate::services::sort::SortCondition;
|
||||
|
||||
@ -232,6 +233,17 @@ impl CellDataDecoder for DateTypeOption {
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_cell_with_transform(
|
||||
&self,
|
||||
cell: &Cell,
|
||||
_from_field_type: FieldType,
|
||||
_field: &Field,
|
||||
) -> Option<<Self as TypeOption>::CellData> {
|
||||
let s = cell.get_as::<String>(CELL_DATA)?;
|
||||
let timestamp = cast_string_to_timestamp(&s)?;
|
||||
Some(DateCellData::from_timestamp(timestamp))
|
||||
}
|
||||
|
||||
fn numeric_cell(&self, _cell: &Cell) -> Option<f64> {
|
||||
None
|
||||
}
|
||||
|
@ -47,6 +47,16 @@ impl DateCellData {
|
||||
reminder_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_timestamp(timestamp: i64) -> Self {
|
||||
Self {
|
||||
timestamp: Some(timestamp),
|
||||
end_timestamp: None,
|
||||
include_time: false,
|
||||
is_range: false,
|
||||
reminder_id: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TypeOptionCellData for DateCellData {
|
||||
|
@ -132,7 +132,7 @@ impl NumberTypeOption {
|
||||
match self.format {
|
||||
NumberFormat::Num => {
|
||||
if SCIENTIFIC_NOTATION_REGEX
|
||||
.is_match(&num_cell_data.as_ref())
|
||||
.is_match(num_cell_data.as_ref())
|
||||
.unwrap()
|
||||
{
|
||||
match Decimal::from_scientific(&num_cell_data.as_ref().to_lowercase()) {
|
||||
@ -142,7 +142,7 @@ impl NumberTypeOption {
|
||||
} else {
|
||||
// Test the input string is start with dot and only contains number.
|
||||
// If it is, add a 0 before the dot. For example, ".123" -> "0.123"
|
||||
let num_str = match START_WITH_DOT_NUM_REGEX.captures(&num_cell_data.as_ref()) {
|
||||
let num_str = match START_WITH_DOT_NUM_REGEX.captures(num_cell_data.as_ref()) {
|
||||
Ok(Some(captures)) => match captures.get(0).map(|m| m.as_str().to_string()) {
|
||||
Some(s) => {
|
||||
format!("0{}", s)
|
||||
@ -152,7 +152,7 @@ impl NumberTypeOption {
|
||||
// Extract the number from the string.
|
||||
// For example, "123abc" -> "123". check out the number_type_option_input_test test for
|
||||
// more examples.
|
||||
_ => match EXTRACT_NUM_REGEX.captures(&num_cell_data.as_ref()) {
|
||||
_ => match EXTRACT_NUM_REGEX.captures(num_cell_data.as_ref()) {
|
||||
Ok(Some(captures)) => captures
|
||||
.get(0)
|
||||
.map(|m| m.as_str().to_string())
|
||||
@ -169,7 +169,7 @@ impl NumberTypeOption {
|
||||
},
|
||||
_ => {
|
||||
// If the format is not number, use the format string to format the number.
|
||||
NumberCellFormat::from_format_str(&num_cell_data.as_ref(), &self.format)
|
||||
NumberCellFormat::from_format_str(num_cell_data.as_ref(), &self.format)
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -186,12 +186,12 @@ impl CellDataDecoder for NumberTypeOption {
|
||||
fn decode_cell(&self, cell: &Cell) -> FlowyResult<<Self as TypeOption>::CellData> {
|
||||
let num_cell_data = self.parse_cell(cell)?;
|
||||
Ok(NumberCellData::from(
|
||||
self.format_cell_data(&num_cell_data)?.to_string(),
|
||||
self.format_cell_data(num_cell_data)?.to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
fn stringify_cell_data(&self, cell_data: <Self as TypeOption>::CellData) -> String {
|
||||
match self.format_cell_data(&cell_data) {
|
||||
match self.format_cell_data(cell_data) {
|
||||
Ok(cell_data) => cell_data.to_string(),
|
||||
Err(_) => "".to_string(),
|
||||
}
|
||||
@ -205,7 +205,7 @@ impl CellDataDecoder for NumberTypeOption {
|
||||
) -> Option<<Self as TypeOption>::CellData> {
|
||||
let num_cell = Self::CellData::from(cell);
|
||||
Some(Self::CellData::from(
|
||||
self.format_cell_data(&num_cell).ok()?.to_string(),
|
||||
self.format_cell_data(num_cell).ok()?.to_string(),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -194,12 +194,12 @@ where
|
||||
if let Some(cell_data_cache) = self.cell_data_cache.as_ref() {
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
let key = CellDataCacheKey::new(field, field_type, cell);
|
||||
tracing::trace!(
|
||||
"Cell cache update: field_type:{}, cell: {:?}, cell_data: {:?}",
|
||||
field_type,
|
||||
cell,
|
||||
cell_data
|
||||
);
|
||||
// tracing::trace!(
|
||||
// "Cell cache update: field_type:{}, cell: {:?}, cell_data: {:?}",
|
||||
// field_type,
|
||||
// cell,
|
||||
// cell_data
|
||||
// );
|
||||
cell_data_cache.insert(key.as_ref(), cell_data);
|
||||
}
|
||||
}
|
||||
@ -523,6 +523,7 @@ pub fn is_type_option_cell_transformable(
|
||||
| (FieldType::RichText, FieldType::MultiSelect)
|
||||
| (FieldType::RichText, FieldType::URL)
|
||||
| (FieldType::RichText, FieldType::Number)
|
||||
| (FieldType::RichText, FieldType::DateTime)
|
||||
| (_, FieldType::RichText)
|
||||
)
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ use crate::services::filter::{Filter, FilterChangeset, FilterInner, FilterResult
|
||||
pub trait FilterDelegate: Send + Sync + 'static {
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field>;
|
||||
async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>;
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>;
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
|
||||
async fn get_row(&self, view_id: &str, rows_id: &RowId) -> Option<(usize, Arc<RowDetail>)>;
|
||||
async fn get_all_filters(&self, view_id: &str) -> Vec<Filter>;
|
||||
async fn save_filters(&self, view_id: &str, filters: &[Filter]);
|
||||
@ -129,32 +129,6 @@ impl FilterController {
|
||||
self.task_scheduler.write().await.add_task(task);
|
||||
}
|
||||
|
||||
pub async fn filter_rows(&self, rows: &mut Vec<Arc<RowDetail>>) {
|
||||
let filters = self.filters.read().await;
|
||||
|
||||
if filters.is_empty() {
|
||||
return;
|
||||
}
|
||||
let field_by_field_id = self.get_field_map().await;
|
||||
rows.iter().for_each(|row_detail| {
|
||||
let _ = filter_row(
|
||||
&row_detail.row,
|
||||
&self.result_by_row_id,
|
||||
&field_by_field_id,
|
||||
&self.cell_cache,
|
||||
&filters,
|
||||
);
|
||||
});
|
||||
|
||||
rows.retain(|row_detail| {
|
||||
self
|
||||
.result_by_row_id
|
||||
.get(&row_detail.row.id)
|
||||
.map(|result| *result)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn did_receive_row_changed(&self, row_id: RowId) {
|
||||
if !self.filters.read().await.is_empty() {
|
||||
self
|
||||
@ -338,7 +312,10 @@ impl FilterController {
|
||||
pub async fn process(&self, predicate: &str) -> FlowyResult<()> {
|
||||
let event_type = FilterEvent::from_str(predicate).unwrap();
|
||||
match event_type {
|
||||
FilterEvent::FilterDidChanged => self.filter_all_rows_handler().await?,
|
||||
FilterEvent::FilterDidChanged => {
|
||||
let mut rows = self.delegate.get_rows(&self.view_id).await;
|
||||
self.filter_rows_and_notify(&mut rows).await?
|
||||
},
|
||||
FilterEvent::RowDidChanged(row_id) => self.filter_single_row_handler(row_id).await?,
|
||||
}
|
||||
Ok(())
|
||||
@ -376,42 +353,35 @@ impl FilterController {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn filter_all_rows_handler(&self) -> FlowyResult<()> {
|
||||
pub async fn filter_rows_and_notify(&self, rows: &mut Vec<Arc<Row>>) -> FlowyResult<()> {
|
||||
let filters = self.filters.read().await;
|
||||
|
||||
let field_by_field_id = self.get_field_map().await;
|
||||
let mut visible_rows = vec![];
|
||||
let mut invisible_rows = vec![];
|
||||
|
||||
for (index, row_detail) in self
|
||||
.delegate
|
||||
.get_rows(&self.view_id)
|
||||
.await
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
{
|
||||
for (index, row) in rows.iter_mut().enumerate() {
|
||||
if let Some(is_visible) = filter_row(
|
||||
&row_detail.row,
|
||||
row,
|
||||
&self.result_by_row_id,
|
||||
&field_by_field_id,
|
||||
&self.cell_cache,
|
||||
&filters,
|
||||
) {
|
||||
if is_visible {
|
||||
let row_meta = RowMetaPB::from(row_detail.as_ref().clone());
|
||||
let row_meta = RowMetaPB::from(row.as_ref().clone());
|
||||
visible_rows.push(InsertedRowPB::new(row_meta).with_index(index as i32))
|
||||
} else {
|
||||
invisible_rows.push(row_detail.row.id.clone());
|
||||
invisible_rows.push(row.id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rows.retain(|row| !invisible_rows.iter().any(|id| id == &row.id));
|
||||
let notification = FilterResultNotification {
|
||||
view_id: self.view_id.clone(),
|
||||
invisible_rows,
|
||||
visible_rows,
|
||||
};
|
||||
tracing::trace!("filter result {:?}", filters);
|
||||
tracing::trace!("filter result {:?}", notification);
|
||||
let _ = self
|
||||
.notifier
|
||||
.send(DatabaseViewChanged::FilterNotification(notification));
|
||||
@ -419,6 +389,32 @@ impl FilterController {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn filter_rows(&self, rows: &mut Vec<Arc<Row>>) {
|
||||
let filters = self.filters.read().await;
|
||||
|
||||
if filters.is_empty() {
|
||||
return;
|
||||
}
|
||||
let field_by_field_id = self.get_field_map().await;
|
||||
rows.iter().for_each(|row| {
|
||||
let _ = filter_row(
|
||||
row,
|
||||
&self.result_by_row_id,
|
||||
&field_by_field_id,
|
||||
&self.cell_cache,
|
||||
&filters,
|
||||
);
|
||||
});
|
||||
|
||||
rows.retain(|row| {
|
||||
self
|
||||
.result_by_row_id
|
||||
.get(&row.id)
|
||||
.map(|result| *result)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
}
|
||||
|
||||
async fn get_field_map(&self) -> HashMap<String, Field> {
|
||||
self
|
||||
.delegate
|
||||
|
@ -10,6 +10,7 @@ use collab_database::rows::RowId;
|
||||
use collab_database::views::{FilterMap, FilterMapBuilder};
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use lib_infra::box_any::BoxAny;
|
||||
use tracing::error;
|
||||
|
||||
use crate::entities::{
|
||||
CheckboxFilterPB, ChecklistFilterPB, DateFilterContent, DateFilterPB, FieldType, FilterType,
|
||||
@ -454,8 +455,13 @@ fn get_children(filter_map: FilterMap) -> Vec<Filter> {
|
||||
if let Some(Any::Array(children)) = filter_map.get(FILTER_CHILDREN) {
|
||||
for child in children.iter() {
|
||||
if let Any::Map(child_map) = child {
|
||||
if let Ok(filter) = Filter::try_from(child_map.deref().clone()) {
|
||||
result.push(filter);
|
||||
match Filter::try_from(child_map.deref().clone()) {
|
||||
Ok(filter) => {
|
||||
result.push(filter);
|
||||
},
|
||||
Err(err) => {
|
||||
error!("Failed to deserialize filter: {:?}", err);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use async_trait::async_trait;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{Cell, Cells, Row, RowDetail, RowId};
|
||||
use collab_database::rows::{Cell, Cells, Row, RowId};
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
|
||||
@ -33,7 +33,7 @@ pub trait GroupCustomize: Send + Sync {
|
||||
|
||||
fn create_or_delete_group_when_cell_changed(
|
||||
&mut self,
|
||||
_row_detail: &RowDetail,
|
||||
_row: &Row,
|
||||
_old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>,
|
||||
_cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> {
|
||||
@ -45,7 +45,7 @@ pub trait GroupCustomize: Send + Sync {
|
||||
///
|
||||
fn add_or_remove_row_when_cell_changed(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> Vec<GroupRowsNotificationPB>;
|
||||
|
||||
@ -113,7 +113,7 @@ pub trait GroupController: Send + Sync {
|
||||
///
|
||||
/// * `rows`: rows to be inserted
|
||||
/// * `field`: reference to the field being sorted (currently unused)
|
||||
fn fill_groups(&mut self, rows: &[&RowDetail], field: &Field) -> FlowyResult<()>;
|
||||
fn fill_groups(&mut self, rows: &[&Row], field: &Field) -> FlowyResult<()>;
|
||||
|
||||
/// Create a new group, currently only supports single and multi-select.
|
||||
///
|
||||
@ -137,11 +137,7 @@ pub trait GroupController: Send + Sync {
|
||||
/// Returns a changeset payload to be sent as a notification.
|
||||
///
|
||||
/// * `row_detail`: the newly-created row
|
||||
fn did_create_row(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
index: usize,
|
||||
) -> Vec<GroupRowsNotificationPB>;
|
||||
fn did_create_row(&mut self, row: &Row, index: usize) -> Vec<GroupRowsNotificationPB>;
|
||||
|
||||
/// Called after a row's cell data is changed, this moves the row to the
|
||||
/// correct group. It may also insert a new group and/or remove an old group.
|
||||
@ -153,8 +149,8 @@ pub trait GroupController: Send + Sync {
|
||||
/// * `field`:
|
||||
fn did_update_group_row(
|
||||
&mut self,
|
||||
old_row_detail: &Option<RowDetail>,
|
||||
row_detail: &RowDetail,
|
||||
old_row: &Option<Row>,
|
||||
new_row: &Row,
|
||||
field: &Field,
|
||||
) -> FlowyResult<DidUpdateGroupRowResult>;
|
||||
|
||||
|
@ -3,7 +3,7 @@ use std::marker::PhantomData;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{Cells, Row, RowDetail, RowId};
|
||||
use collab_database::rows::{Cells, Row, RowId};
|
||||
use futures::executor::block_on;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::Serialize;
|
||||
@ -27,7 +27,7 @@ use crate::services::group::{GroupChangeset, GroupsBuilder, MoveGroupRowContext}
|
||||
pub trait GroupControllerDelegate: Send + Sync + 'static {
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field>;
|
||||
|
||||
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>;
|
||||
async fn get_all_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
|
||||
}
|
||||
|
||||
/// [BaseGroupController] is a generic group controller that provides customized implementations
|
||||
@ -86,7 +86,7 @@ where
|
||||
|
||||
fn update_no_status_group(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
other_group_changesets: &[GroupRowsNotificationPB],
|
||||
) -> Option<GroupRowsNotificationPB> {
|
||||
let no_status_group = self.context.get_mut_no_status_group()?;
|
||||
@ -115,8 +115,8 @@ where
|
||||
if !no_status_group_rows.is_empty() {
|
||||
changeset
|
||||
.inserted_rows
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone())));
|
||||
no_status_group.add_row(row_detail.clone());
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
|
||||
no_status_group.add_row(row.clone());
|
||||
}
|
||||
|
||||
// [other_group_delete_rows] contains all the deleted rows except the default group.
|
||||
@ -139,8 +139,8 @@ where
|
||||
.collect::<Vec<&InsertedRowPB>>();
|
||||
|
||||
let mut deleted_row_ids = vec![];
|
||||
for row_detail in &no_status_group.rows {
|
||||
let row_id = row_detail.row.id.to_string();
|
||||
for row in &no_status_group.rows {
|
||||
let row_id = row.id.to_string();
|
||||
if default_group_deleted_rows
|
||||
.iter()
|
||||
.any(|deleted_row| deleted_row.row_meta.id == row_id)
|
||||
@ -150,7 +150,7 @@ where
|
||||
}
|
||||
no_status_group
|
||||
.rows
|
||||
.retain(|row_detail| !deleted_row_ids.contains(&row_detail.row.id));
|
||||
.retain(|row| !deleted_row_ids.contains(&row.id));
|
||||
changeset.deleted_rows.extend(deleted_row_ids);
|
||||
Some(changeset)
|
||||
}
|
||||
@ -179,9 +179,9 @@ where
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, fields(row_count=%rows.len(), group_result))]
|
||||
fn fill_groups(&mut self, rows: &[&RowDetail], _field: &Field) -> FlowyResult<()> {
|
||||
for row_detail in rows {
|
||||
let cell = match row_detail.row.cells.get(&self.grouping_field_id) {
|
||||
fn fill_groups(&mut self, rows: &[&Row], _field: &Field) -> FlowyResult<()> {
|
||||
for row in rows {
|
||||
let cell = match row.cells.get(&self.grouping_field_id) {
|
||||
None => self.placeholder_cell(),
|
||||
Some(cell) => Some(cell.clone()),
|
||||
};
|
||||
@ -192,7 +192,7 @@ where
|
||||
for group in self.context.groups() {
|
||||
if self.can_group(&group.id, &cell_data) {
|
||||
grouped_rows.push(GroupedRow {
|
||||
row_detail: (*row_detail).clone(),
|
||||
row: (*row).clone(),
|
||||
group_id: group.id.clone(),
|
||||
});
|
||||
}
|
||||
@ -201,7 +201,7 @@ where
|
||||
if !grouped_rows.is_empty() {
|
||||
for group_row in grouped_rows {
|
||||
if let Some(group) = self.context.get_mut_group(&group_row.group_id) {
|
||||
group.add_row(group_row.row_detail);
|
||||
group.add_row(group_row.row);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
@ -210,7 +210,7 @@ where
|
||||
|
||||
match self.context.get_mut_no_status_group() {
|
||||
None => {},
|
||||
Some(no_status_group) => no_status_group.add_row((*row_detail).clone()),
|
||||
Some(no_status_group) => no_status_group.add_row((*row).clone()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -229,14 +229,10 @@ where
|
||||
self.context.move_group(from_group_id, to_group_id)
|
||||
}
|
||||
|
||||
fn did_create_row(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
index: usize,
|
||||
) -> Vec<GroupRowsNotificationPB> {
|
||||
fn did_create_row(&mut self, row: &Row, index: usize) -> Vec<GroupRowsNotificationPB> {
|
||||
let mut changesets: Vec<GroupRowsNotificationPB> = vec![];
|
||||
|
||||
let cell = match row_detail.row.cells.get(&self.grouping_field_id) {
|
||||
let cell = match row.cells.get(&self.grouping_field_id) {
|
||||
None => self.placeholder_cell(),
|
||||
Some(cell) => Some(cell.clone()),
|
||||
};
|
||||
@ -252,7 +248,7 @@ where
|
||||
let changeset = GroupRowsNotificationPB::insert(
|
||||
group.id.clone(),
|
||||
vec![InsertedRowPB {
|
||||
row_meta: (*row_detail).clone().into(),
|
||||
row_meta: (*row).clone().into(),
|
||||
index: Some(index as i32),
|
||||
is_new: true,
|
||||
}],
|
||||
@ -263,15 +259,15 @@ where
|
||||
if !suitable_group_ids.is_empty() {
|
||||
for group_id in suitable_group_ids.iter() {
|
||||
if let Some(group) = self.context.get_mut_group(group_id) {
|
||||
group.add_row((*row_detail).clone());
|
||||
group.add_row((*row).clone());
|
||||
}
|
||||
}
|
||||
} else if let Some(no_status_group) = self.context.get_mut_no_status_group() {
|
||||
no_status_group.add_row((*row_detail).clone());
|
||||
no_status_group.add_row((*row).clone());
|
||||
let changeset = GroupRowsNotificationPB::insert(
|
||||
no_status_group.id.clone(),
|
||||
vec![InsertedRowPB {
|
||||
row_meta: (*row_detail).clone().into(),
|
||||
row_meta: (*row).clone().into(),
|
||||
index: Some(index as i32),
|
||||
is_new: true,
|
||||
}],
|
||||
@ -285,8 +281,8 @@ where
|
||||
|
||||
fn did_update_group_row(
|
||||
&mut self,
|
||||
old_row_detail: &Option<RowDetail>,
|
||||
row_detail: &RowDetail,
|
||||
old_row: &Option<Row>,
|
||||
new_row: &Row,
|
||||
field: &Field,
|
||||
) -> FlowyResult<DidUpdateGroupRowResult> {
|
||||
let mut result = DidUpdateGroupRowResult {
|
||||
@ -294,20 +290,17 @@ where
|
||||
deleted_group: None,
|
||||
row_changesets: vec![],
|
||||
};
|
||||
if let Some(cell_data) = get_cell_data_from_row::<P>(Some(&row_detail.row), field) {
|
||||
let old_cell_data =
|
||||
get_cell_data_from_row::<P>(old_row_detail.as_ref().map(|detail| &detail.row), field);
|
||||
if let Ok((insert, delete)) = self.create_or_delete_group_when_cell_changed(
|
||||
row_detail,
|
||||
old_cell_data.as_ref(),
|
||||
&cell_data,
|
||||
) {
|
||||
if let Some(cell_data) = get_cell_data_from_row::<P>(Some(new_row), field) {
|
||||
let old_cell_data = get_cell_data_from_row::<P>(old_row.as_ref(), field);
|
||||
if let Ok((insert, delete)) =
|
||||
self.create_or_delete_group_when_cell_changed(new_row, old_cell_data.as_ref(), &cell_data)
|
||||
{
|
||||
result.inserted_group = insert;
|
||||
result.deleted_group = delete;
|
||||
}
|
||||
|
||||
let mut changesets = self.add_or_remove_row_when_cell_changed(row_detail, &cell_data);
|
||||
if let Some(changeset) = self.update_no_status_group(row_detail, &changesets) {
|
||||
let mut changesets = self.add_or_remove_row_when_cell_changed(new_row, &cell_data);
|
||||
if let Some(changeset) = self.update_no_status_group(new_row, &changesets) {
|
||||
if !changeset.is_empty() {
|
||||
changesets.push(changeset);
|
||||
}
|
||||
@ -356,7 +349,7 @@ where
|
||||
deleted_group: None,
|
||||
row_changesets: vec![],
|
||||
};
|
||||
let cell = match context.row_detail.row.cells.get(&self.grouping_field_id) {
|
||||
let cell = match context.row.cells.get(&self.grouping_field_id) {
|
||||
Some(cell) => Some(cell.clone()),
|
||||
None => self.placeholder_cell(),
|
||||
};
|
||||
@ -364,7 +357,7 @@ where
|
||||
if let Some(cell) = cell {
|
||||
let cell_bytes = get_cell_protobuf(&cell, context.field, None);
|
||||
let cell_data = cell_bytes.parser::<P>()?;
|
||||
result.deleted_group = self.delete_group_when_move_row(&context.row_detail.row, &cell_data);
|
||||
result.deleted_group = self.delete_group_when_move_row(context.row, &cell_data);
|
||||
result.row_changesets = self.move_row(context);
|
||||
} else {
|
||||
tracing::warn!("Unexpected moving group row, changes should not be empty");
|
||||
@ -388,11 +381,7 @@ where
|
||||
|
||||
match group {
|
||||
Some((_index, group_data)) => {
|
||||
let row_ids = group_data
|
||||
.rows
|
||||
.iter()
|
||||
.map(|row| row.row.id.clone())
|
||||
.collect();
|
||||
let row_ids = group_data.rows.iter().map(|row| row.id.clone()).collect();
|
||||
let type_option_data = <Self as GroupCustomize>::delete_group(self, group_id).await?;
|
||||
Ok((row_ids, type_option_data))
|
||||
},
|
||||
@ -446,7 +435,7 @@ where
|
||||
}
|
||||
|
||||
struct GroupedRow {
|
||||
row_detail: RowDetail,
|
||||
row: Row,
|
||||
group_id: String,
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
use async_trait::async_trait;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
|
||||
use flowy_error::FlowyResult;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@ -49,27 +49,25 @@ impl GroupCustomize for CheckboxGroupController {
|
||||
|
||||
fn add_or_remove_row_when_cell_changed(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> Vec<GroupRowsNotificationPB> {
|
||||
let mut changesets = vec![];
|
||||
self.context.iter_mut_status_groups(|group| {
|
||||
let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
|
||||
let is_not_contained = !group.contains_row(&row_detail.row.id);
|
||||
let is_not_contained = !group.contains_row(&row.id);
|
||||
if group.id == CHECK {
|
||||
if !cell_data.is_checked {
|
||||
// Remove the row if the group.id is CHECK but the cell_data is UNCHECK
|
||||
changeset
|
||||
.deleted_rows
|
||||
.push(row_detail.row.id.clone().into_inner());
|
||||
group.remove_row(&row_detail.row.id);
|
||||
changeset.deleted_rows.push(row.id.clone().into_inner());
|
||||
group.remove_row(&row.id);
|
||||
} else {
|
||||
// Add the row to the group if the group didn't contain the row
|
||||
if is_not_contained {
|
||||
changeset
|
||||
.inserted_rows
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone())));
|
||||
group.add_row(row_detail.clone());
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
|
||||
group.add_row(row.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -77,17 +75,15 @@ impl GroupCustomize for CheckboxGroupController {
|
||||
if group.id == UNCHECK {
|
||||
if cell_data.is_checked {
|
||||
// Remove the row if the group.id is UNCHECK but the cell_data is CHECK
|
||||
changeset
|
||||
.deleted_rows
|
||||
.push(row_detail.row.id.clone().into_inner());
|
||||
group.remove_row(&row_detail.row.id);
|
||||
changeset.deleted_rows.push(row.id.clone().into_inner());
|
||||
group.remove_row(&row.id);
|
||||
} else {
|
||||
// Add the row to the group if the group didn't contain the row
|
||||
if is_not_contained {
|
||||
changeset
|
||||
.inserted_rows
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone())));
|
||||
group.add_row(row_detail.clone());
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
|
||||
group.add_row(row.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use async_trait::async_trait;
|
||||
use chrono::{DateTime, Datelike, Days, Duration, Local, NaiveDateTime};
|
||||
use collab_database::database::timestamp;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||
|
||||
@ -73,7 +73,7 @@ impl GroupCustomize for DateGroupController {
|
||||
|
||||
fn create_or_delete_group_when_cell_changed(
|
||||
&mut self,
|
||||
_row_detail: &RowDetail,
|
||||
_row: &Row,
|
||||
_old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>,
|
||||
_cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> {
|
||||
@ -86,10 +86,7 @@ impl GroupCustomize for DateGroupController {
|
||||
{
|
||||
let group = make_group_from_date_cell(&_cell_data.into(), &setting_content);
|
||||
let mut new_group = self.context.add_new_group(group)?;
|
||||
new_group
|
||||
.group
|
||||
.rows
|
||||
.push(RowMetaPB::from(_row_detail.clone()));
|
||||
new_group.group.rows.push(RowMetaPB::from(_row.clone()));
|
||||
inserted_group = Some(new_group);
|
||||
}
|
||||
|
||||
@ -122,7 +119,7 @@ impl GroupCustomize for DateGroupController {
|
||||
|
||||
fn add_or_remove_row_when_cell_changed(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> Vec<GroupRowsNotificationPB> {
|
||||
let mut changesets = vec![];
|
||||
@ -130,17 +127,15 @@ impl GroupCustomize for DateGroupController {
|
||||
self.context.iter_mut_status_groups(|group| {
|
||||
let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
|
||||
if group.id == get_date_group_id(&cell_data.into(), &setting_content) {
|
||||
if !group.contains_row(&row_detail.row.id) {
|
||||
if !group.contains_row(&row.id) {
|
||||
changeset
|
||||
.inserted_rows
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone())));
|
||||
group.add_row(row_detail.clone());
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
|
||||
group.add_row(row.clone());
|
||||
}
|
||||
} else if group.contains_row(&row_detail.row.id) {
|
||||
group.remove_row(&row_detail.row.id);
|
||||
changeset
|
||||
.deleted_rows
|
||||
.push(row_detail.row.id.clone().into_inner());
|
||||
} else if group.contains_row(&row.id) {
|
||||
group.remove_row(&row.id);
|
||||
changeset.deleted_rows.push(row.id.clone().into_inner());
|
||||
}
|
||||
|
||||
if !changeset.is_empty() {
|
||||
|
@ -2,7 +2,7 @@ use async_trait::async_trait;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{Cells, Row, RowDetail, RowId};
|
||||
use collab_database::rows::{Cells, Row, RowId};
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
|
||||
@ -53,7 +53,7 @@ impl GroupController for DefaultGroupController {
|
||||
Some((0, self.group.clone()))
|
||||
}
|
||||
|
||||
fn fill_groups(&mut self, rows: &[&RowDetail], _field: &Field) -> FlowyResult<()> {
|
||||
fn fill_groups(&mut self, rows: &[&Row], _field: &Field) -> FlowyResult<()> {
|
||||
rows.iter().for_each(|row| {
|
||||
self.group.add_row((*row).clone());
|
||||
});
|
||||
@ -71,17 +71,13 @@ impl GroupController for DefaultGroupController {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn did_create_row(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
index: usize,
|
||||
) -> Vec<GroupRowsNotificationPB> {
|
||||
self.group.add_row((*row_detail).clone());
|
||||
fn did_create_row(&mut self, row: &Row, index: usize) -> Vec<GroupRowsNotificationPB> {
|
||||
self.group.add_row((*row).clone());
|
||||
|
||||
vec![GroupRowsNotificationPB::insert(
|
||||
self.group.id.clone(),
|
||||
vec![InsertedRowPB {
|
||||
row_meta: (*row_detail).clone().into(),
|
||||
row_meta: (*row).clone().into(),
|
||||
index: Some(index as i32),
|
||||
is_new: true,
|
||||
}],
|
||||
@ -90,8 +86,8 @@ impl GroupController for DefaultGroupController {
|
||||
|
||||
fn did_update_group_row(
|
||||
&mut self,
|
||||
_old_row_detail: &Option<RowDetail>,
|
||||
_row_detail: &RowDetail,
|
||||
_old_row: &Option<Row>,
|
||||
_new_row: &Row,
|
||||
_field: &Field,
|
||||
) -> FlowyResult<DidUpdateGroupRowResult> {
|
||||
Ok(DidUpdateGroupRowResult {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use async_trait::async_trait;
|
||||
use collab_database::entity::SelectOption;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@ -51,12 +51,12 @@ impl GroupCustomize for MultiSelectGroupController {
|
||||
|
||||
fn add_or_remove_row_when_cell_changed(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> Vec<GroupRowsNotificationPB> {
|
||||
let mut changesets = vec![];
|
||||
self.context.iter_mut_status_groups(|group| {
|
||||
if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row_detail) {
|
||||
if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row) {
|
||||
changesets.push(changeset);
|
||||
}
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
use async_trait::async_trait;
|
||||
use collab_database::entity::SelectOption;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@ -53,12 +53,12 @@ impl GroupCustomize for SingleSelectGroupController {
|
||||
|
||||
fn add_or_remove_row_when_cell_changed(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> Vec<GroupRowsNotificationPB> {
|
||||
let mut changesets = vec![];
|
||||
self.context.iter_mut_status_groups(|group| {
|
||||
if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row_detail) {
|
||||
if let Some(changeset) = add_or_remove_select_option_row(group, cell_data, row) {
|
||||
changesets.push(changeset);
|
||||
}
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use collab_database::entity::SelectOption;
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{Cell, Row, RowDetail};
|
||||
use collab_database::rows::{Cell, Row};
|
||||
|
||||
use crate::entities::{
|
||||
FieldType, GroupRowsNotificationPB, InsertedRowPB, RowMetaPB, SelectOptionCellDataPB,
|
||||
@ -15,30 +15,26 @@ use crate::services::group::{Group, GroupData, MoveGroupRowContext};
|
||||
pub fn add_or_remove_select_option_row(
|
||||
group: &mut GroupData,
|
||||
cell_data: &SelectOptionCellDataPB,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
) -> Option<GroupRowsNotificationPB> {
|
||||
let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
|
||||
if cell_data.select_options.is_empty() {
|
||||
if group.contains_row(&row_detail.row.id) {
|
||||
group.remove_row(&row_detail.row.id);
|
||||
changeset
|
||||
.deleted_rows
|
||||
.push(row_detail.row.id.clone().into_inner());
|
||||
if group.contains_row(&row.id) {
|
||||
group.remove_row(&row.id);
|
||||
changeset.deleted_rows.push(row.id.clone().into_inner());
|
||||
}
|
||||
} else {
|
||||
cell_data.select_options.iter().for_each(|option| {
|
||||
if option.id == group.id {
|
||||
if !group.contains_row(&row_detail.row.id) {
|
||||
if !group.contains_row(&row.id) {
|
||||
changeset
|
||||
.inserted_rows
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone())));
|
||||
group.add_row(row_detail.clone());
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
|
||||
group.add_row(row.clone());
|
||||
}
|
||||
} else if group.contains_row(&row_detail.row.id) {
|
||||
group.remove_row(&row_detail.row.id);
|
||||
changeset
|
||||
.deleted_rows
|
||||
.push(row_detail.row.id.clone().into_inner());
|
||||
} else if group.contains_row(&row.id) {
|
||||
group.remove_row(&row.id);
|
||||
changeset.deleted_rows.push(row.id.clone().into_inner());
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -76,14 +72,14 @@ pub fn move_group_row(
|
||||
) -> Option<GroupRowsNotificationPB> {
|
||||
let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
|
||||
let MoveGroupRowContext {
|
||||
row_detail,
|
||||
row,
|
||||
row_changeset,
|
||||
field,
|
||||
to_group_id,
|
||||
to_row_id,
|
||||
} = context;
|
||||
|
||||
let from_index = group.index_of_row(&row_detail.row.id);
|
||||
let from_index = group.index_of_row(&row.id);
|
||||
let to_index = match to_row_id {
|
||||
None => None,
|
||||
Some(to_row_id) => group.index_of_row(to_row_id),
|
||||
@ -91,40 +87,28 @@ pub fn move_group_row(
|
||||
|
||||
// Remove the row in which group contains it
|
||||
if let Some(from_index) = &from_index {
|
||||
changeset
|
||||
.deleted_rows
|
||||
.push(row_detail.row.id.clone().into_inner());
|
||||
tracing::debug!(
|
||||
"Group:{} remove {} at {}",
|
||||
group.id,
|
||||
row_detail.row.id,
|
||||
from_index
|
||||
);
|
||||
group.remove_row(&row_detail.row.id);
|
||||
changeset.deleted_rows.push(row.id.clone().into_inner());
|
||||
tracing::debug!("Group:{} remove {} at {}", group.id, row.id, from_index);
|
||||
group.remove_row(&row.id);
|
||||
}
|
||||
|
||||
if group.id == *to_group_id {
|
||||
let mut inserted_row = InsertedRowPB::new(RowMetaPB::from((*row_detail).clone()));
|
||||
let mut inserted_row = InsertedRowPB::new(RowMetaPB::from((*row).clone()));
|
||||
match to_index {
|
||||
None => {
|
||||
changeset.inserted_rows.push(inserted_row);
|
||||
tracing::debug!("Group:{} append row:{}", group.id, row_detail.row.id);
|
||||
group.add_row(row_detail.clone());
|
||||
tracing::debug!("Group:{} append row:{}", group.id, row.id);
|
||||
group.add_row(row.clone());
|
||||
},
|
||||
Some(to_index) => {
|
||||
if to_index < group.number_of_row() {
|
||||
tracing::debug!(
|
||||
"Group:{} insert {} at {} ",
|
||||
group.id,
|
||||
row_detail.row.id,
|
||||
to_index
|
||||
);
|
||||
tracing::debug!("Group:{} insert {} at {} ", group.id, row.id, to_index);
|
||||
inserted_row.index = Some(to_index as i32);
|
||||
group.insert_row(to_index, (*row_detail).clone());
|
||||
group.insert_row(to_index, row.clone());
|
||||
} else {
|
||||
tracing::warn!("Move to index: {} is out of bounds", to_index);
|
||||
tracing::debug!("Group:{} append row:{}", group.id, row_detail.row.id);
|
||||
group.add_row((*row_detail).clone());
|
||||
tracing::debug!("Group:{} append row:{}", group.id, row.id);
|
||||
group.add_row(row.clone());
|
||||
}
|
||||
changeset.inserted_rows.push(inserted_row);
|
||||
},
|
||||
@ -138,7 +122,7 @@ pub fn move_group_row(
|
||||
if let Some(cell) = cell {
|
||||
tracing::debug!(
|
||||
"Update content of the cell in the row:{} to group:{}",
|
||||
row_detail.row.id,
|
||||
row.id,
|
||||
group.id
|
||||
);
|
||||
row_changeset
|
||||
|
@ -1,6 +1,6 @@
|
||||
use async_trait::async_trait;
|
||||
use collab_database::fields::{Field, TypeOptionData};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row, RowDetail};
|
||||
use collab_database::rows::{new_cell_builder, Cell, Cells, Row};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
@ -47,7 +47,7 @@ impl GroupCustomize for URLGroupController {
|
||||
|
||||
fn create_or_delete_group_when_cell_changed(
|
||||
&mut self,
|
||||
_row_detail: &RowDetail,
|
||||
_row: &Row,
|
||||
_old_cell_data: Option<&<Self::GroupTypeOption as TypeOption>::CellProtobufType>,
|
||||
_cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> FlowyResult<(Option<InsertedGroupPB>, Option<GroupPB>)> {
|
||||
@ -57,10 +57,7 @@ impl GroupCustomize for URLGroupController {
|
||||
let cell_data: URLCellData = _cell_data.clone().into();
|
||||
let group = Group::new(cell_data.data);
|
||||
let mut new_group = self.context.add_new_group(group)?;
|
||||
new_group
|
||||
.group
|
||||
.rows
|
||||
.push(RowMetaPB::from(_row_detail.clone()));
|
||||
new_group.group.rows.push(RowMetaPB::from(_row.clone()));
|
||||
inserted_group = Some(new_group);
|
||||
}
|
||||
|
||||
@ -91,24 +88,22 @@ impl GroupCustomize for URLGroupController {
|
||||
|
||||
fn add_or_remove_row_when_cell_changed(
|
||||
&mut self,
|
||||
row_detail: &RowDetail,
|
||||
row: &Row,
|
||||
cell_data: &<Self::GroupTypeOption as TypeOption>::CellProtobufType,
|
||||
) -> Vec<GroupRowsNotificationPB> {
|
||||
let mut changesets = vec![];
|
||||
self.context.iter_mut_status_groups(|group| {
|
||||
let mut changeset = GroupRowsNotificationPB::new(group.id.clone());
|
||||
if group.id == cell_data.content {
|
||||
if !group.contains_row(&row_detail.row.id) {
|
||||
if !group.contains_row(&row.id) {
|
||||
changeset
|
||||
.inserted_rows
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row_detail.clone())));
|
||||
group.add_row(row_detail.clone());
|
||||
.push(InsertedRowPB::new(RowMetaPB::from(row.clone())));
|
||||
group.add_row(row.clone());
|
||||
}
|
||||
} else if group.contains_row(&row_detail.row.id) {
|
||||
group.remove_row(&row_detail.row.id);
|
||||
changeset
|
||||
.deleted_rows
|
||||
.push(row_detail.row.id.clone().into_inner());
|
||||
} else if group.contains_row(&row.id) {
|
||||
group.remove_row(&row.id);
|
||||
changeset.deleted_rows.push(row.id.clone().into_inner());
|
||||
}
|
||||
|
||||
if !changeset.is_empty() {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use collab::preclude::encoding::serde::{from_any, to_any};
|
||||
use collab::preclude::Any;
|
||||
use collab_database::database::gen_database_group_id;
|
||||
use collab_database::rows::{RowDetail, RowId};
|
||||
use collab_database::rows::{Row, RowId};
|
||||
use collab_database::views::{GroupMap, GroupMapBuilder, GroupSettingBuilder, GroupSettingMap};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
@ -103,7 +103,7 @@ pub struct GroupData {
|
||||
pub field_id: String,
|
||||
pub is_default: bool,
|
||||
pub is_visible: bool,
|
||||
pub(crate) rows: Vec<RowDetail>,
|
||||
pub(crate) rows: Vec<Row>,
|
||||
}
|
||||
|
||||
impl GroupData {
|
||||
@ -119,18 +119,11 @@ impl GroupData {
|
||||
}
|
||||
|
||||
pub fn contains_row(&self, row_id: &RowId) -> bool {
|
||||
self
|
||||
.rows
|
||||
.iter()
|
||||
.any(|row_detail| &row_detail.row.id == row_id)
|
||||
self.rows.iter().any(|row| &row.id == row_id)
|
||||
}
|
||||
|
||||
pub fn remove_row(&mut self, row_id: &RowId) {
|
||||
match self
|
||||
.rows
|
||||
.iter()
|
||||
.position(|row_detail| &row_detail.row.id == row_id)
|
||||
{
|
||||
match self.rows.iter().position(|row| &row.id == row_id) {
|
||||
None => {},
|
||||
Some(pos) => {
|
||||
self.rows.remove(pos);
|
||||
@ -138,18 +131,18 @@ impl GroupData {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_row(&mut self, row_detail: RowDetail) {
|
||||
match self.rows.iter().find(|r| r.row.id == row_detail.row.id) {
|
||||
pub fn add_row(&mut self, row: Row) {
|
||||
match self.rows.iter().find(|r| r.id == row.id) {
|
||||
None => {
|
||||
self.rows.push(row_detail);
|
||||
self.rows.push(row);
|
||||
},
|
||||
Some(_) => {},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_row(&mut self, index: usize, row_detail: RowDetail) {
|
||||
pub fn insert_row(&mut self, index: usize, row: Row) {
|
||||
if index < self.rows.len() {
|
||||
self.rows.insert(index, row_detail);
|
||||
self.rows.insert(index, row);
|
||||
} else {
|
||||
tracing::error!(
|
||||
"Insert row index:{} beyond the bounds:{},",
|
||||
@ -160,10 +153,7 @@ impl GroupData {
|
||||
}
|
||||
|
||||
pub fn index_of_row(&self, row_id: &RowId) -> Option<usize> {
|
||||
self
|
||||
.rows
|
||||
.iter()
|
||||
.position(|row_detail| &row_detail.row.id == row_id)
|
||||
self.rows.iter().position(|row| &row.id == row_id)
|
||||
}
|
||||
|
||||
pub fn number_of_row(&self) -> usize {
|
||||
|
@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
|
||||
use async_trait::async_trait;
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{Cell, RowDetail, RowId};
|
||||
use collab_database::rows::{Cell, Row, RowId};
|
||||
|
||||
use flowy_error::FlowyResult;
|
||||
|
||||
@ -36,7 +36,7 @@ pub struct GeneratedGroups {
|
||||
}
|
||||
|
||||
pub struct MoveGroupRowContext<'a> {
|
||||
pub row_detail: &'a RowDetail,
|
||||
pub row: &'a Row,
|
||||
pub row_changeset: &'a mut RowChangeset,
|
||||
pub field: &'a Field,
|
||||
pub to_group_id: &'a str,
|
||||
|
@ -5,7 +5,7 @@ use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{Cell, Row, RowDetail, RowId};
|
||||
use collab_database::rows::{Cell, Row, RowId};
|
||||
use rayon::prelude::ParallelSliceMut;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::RwLock;
|
||||
@ -28,8 +28,8 @@ use crate::services::sort::{
|
||||
pub trait SortDelegate: Send + Sync {
|
||||
async fn get_sort(&self, view_id: &str, sort_id: &str) -> Option<Arc<Sort>>;
|
||||
/// Returns all the rows after applying grid's filter
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<RowDetail>>;
|
||||
async fn filter_row(&self, row_detail: &RowDetail) -> bool;
|
||||
async fn get_rows(&self, view_id: &str) -> Vec<Arc<Row>>;
|
||||
async fn filter_row(&self, row_detail: &Row) -> bool;
|
||||
async fn get_field(&self, field_id: &str) -> Option<Field>;
|
||||
async fn get_fields(&self, view_id: &str, field_ids: Option<Vec<String>>) -> Vec<Field>;
|
||||
}
|
||||
@ -95,22 +95,22 @@ impl SortController {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn did_create_row(&self, preliminary_index: usize, row_detail: &RowDetail) {
|
||||
if !self.delegate.filter_row(row_detail).await {
|
||||
pub async fn did_create_row(&self, preliminary_index: usize, row: &Row) {
|
||||
if !self.delegate.filter_row(row).await {
|
||||
return;
|
||||
}
|
||||
|
||||
if !self.sorts.is_empty() {
|
||||
self
|
||||
.gen_task(
|
||||
SortEvent::NewRowInserted(row_detail.clone()),
|
||||
SortEvent::NewRowInserted(row.clone()),
|
||||
QualityOfService::Background,
|
||||
)
|
||||
.await;
|
||||
} else {
|
||||
let result = InsertRowResult {
|
||||
view_id: self.view_id.clone(),
|
||||
row: row_detail.clone(),
|
||||
row: row.clone(),
|
||||
index: preliminary_index,
|
||||
};
|
||||
let _ = self
|
||||
@ -130,31 +130,15 @@ impl SortController {
|
||||
// #[tracing::instrument(name = "process_sort_task", level = "trace", skip_all, err)]
|
||||
pub async fn process(&mut self, predicate: &str) -> FlowyResult<()> {
|
||||
let event_type = SortEvent::from_str(predicate).unwrap();
|
||||
let mut row_details = self.delegate.get_rows(&self.view_id).await;
|
||||
let mut rows = self.delegate.get_rows(&self.view_id).await;
|
||||
|
||||
match event_type {
|
||||
SortEvent::SortDidChanged | SortEvent::DeleteAllSorts => {
|
||||
self.sort_rows(&mut row_details).await;
|
||||
let row_orders = row_details
|
||||
.iter()
|
||||
.map(|row_detail| row_detail.row.id.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let notification = ReorderAllRowsResult {
|
||||
view_id: self.view_id.clone(),
|
||||
row_orders,
|
||||
};
|
||||
|
||||
let _ = self
|
||||
.notifier
|
||||
.send(DatabaseViewChanged::ReorderAllRowsNotification(
|
||||
notification,
|
||||
));
|
||||
self.sort_rows_and_notify(&mut rows).await;
|
||||
},
|
||||
SortEvent::RowDidChanged(row_id) => {
|
||||
let old_row_index = self.row_index_cache.get(&row_id).cloned();
|
||||
|
||||
self.sort_rows(&mut row_details).await;
|
||||
self.sort_rows(&mut rows).await;
|
||||
let new_row_index = self.row_index_cache.get(&row_id).cloned();
|
||||
match (old_row_index, new_row_index) {
|
||||
(Some(old_row_index), Some(new_row_index)) => {
|
||||
@ -176,17 +160,17 @@ impl SortController {
|
||||
_ => tracing::trace!("The row index cache is outdated"),
|
||||
}
|
||||
},
|
||||
SortEvent::NewRowInserted(row_detail) => {
|
||||
self.sort_rows(&mut row_details).await;
|
||||
let row_index = self.row_index_cache.get(&row_detail.row.id).cloned();
|
||||
SortEvent::NewRowInserted(row) => {
|
||||
self.sort_rows(&mut rows).await;
|
||||
let row_index = self.row_index_cache.get(&row.id).cloned();
|
||||
match row_index {
|
||||
Some(row_index) => {
|
||||
let notification = InsertRowResult {
|
||||
view_id: self.view_id.clone(),
|
||||
row: row_detail.clone(),
|
||||
row: row.clone(),
|
||||
index: row_index,
|
||||
};
|
||||
self.row_index_cache.insert(row_detail.row.id, row_index);
|
||||
self.row_index_cache.insert(row.id, row_index);
|
||||
let _ = self
|
||||
.notifier
|
||||
.send(DatabaseViewChanged::InsertRowNotification(notification));
|
||||
@ -210,20 +194,36 @@ impl SortController {
|
||||
self.task_scheduler.write().await.add_task(task);
|
||||
}
|
||||
|
||||
pub async fn sort_rows(&mut self, rows: &mut Vec<Arc<RowDetail>>) {
|
||||
pub async fn sort_rows_and_notify(&mut self, rows: &mut Vec<Arc<Row>>) {
|
||||
self.sort_rows(rows).await;
|
||||
let row_orders = rows
|
||||
.iter()
|
||||
.map(|row| row.id.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let notification = ReorderAllRowsResult {
|
||||
view_id: self.view_id.clone(),
|
||||
row_orders,
|
||||
};
|
||||
|
||||
let _ = self
|
||||
.notifier
|
||||
.send(DatabaseViewChanged::ReorderAllRowsNotification(
|
||||
notification,
|
||||
));
|
||||
}
|
||||
|
||||
pub async fn sort_rows(&mut self, rows: &mut Vec<Arc<Row>>) {
|
||||
if self.sorts.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let fields = self.delegate.get_fields(&self.view_id, None).await;
|
||||
for sort in self.sorts.iter().rev() {
|
||||
rows
|
||||
.par_sort_by(|left, right| cmp_row(&left.row, &right.row, sort, &fields, &self.cell_cache));
|
||||
rows.par_sort_by(|left, right| cmp_row(left, right, sort, &fields, &self.cell_cache));
|
||||
}
|
||||
rows.iter().enumerate().for_each(|(index, row_detail)| {
|
||||
self
|
||||
.row_index_cache
|
||||
.insert(row_detail.row.id.clone(), index);
|
||||
rows.iter().enumerate().for_each(|(index, row)| {
|
||||
self.row_index_cache.insert(row.id.clone(), index);
|
||||
});
|
||||
}
|
||||
|
||||
@ -363,7 +363,7 @@ fn cmp_cell(
|
||||
enum SortEvent {
|
||||
SortDidChanged,
|
||||
RowDidChanged(RowId),
|
||||
NewRowInserted(RowDetail),
|
||||
NewRowInserted(Row),
|
||||
DeleteAllSorts,
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ use std::cmp::Ordering;
|
||||
use anyhow::bail;
|
||||
use collab::preclude::Any;
|
||||
use collab::util::AnyMapExt;
|
||||
use collab_database::rows::{RowDetail, RowId};
|
||||
use collab_database::rows::{Row, RowId};
|
||||
use collab_database::views::{SortMap, SortMapBuilder};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -113,7 +113,7 @@ pub struct ReorderSingleRowResult {
|
||||
#[derive(Clone)]
|
||||
pub struct InsertRowResult {
|
||||
pub view_id: String,
|
||||
pub row: RowDetail,
|
||||
pub row: Row,
|
||||
pub index: usize,
|
||||
}
|
||||
|
||||
|
@ -11,17 +11,17 @@ use crate::database::block_test::script::RowScript::*;
|
||||
#[tokio::test]
|
||||
async fn created_at_field_test() {
|
||||
let mut test = DatabaseRowTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
test
|
||||
.run_scripts(vec![CreateEmptyRow, AssertRowCount(row_count + 1)])
|
||||
.await;
|
||||
|
||||
// Get created time of the new row.
|
||||
let row_detail = test.get_rows().await.last().cloned().unwrap();
|
||||
let row = test.get_rows().await.last().cloned().unwrap();
|
||||
let updated_at_field = test.get_first_field(FieldType::CreatedTime).await;
|
||||
let cell = test
|
||||
.editor
|
||||
.get_cell(&updated_at_field.id, &row_detail.row.id)
|
||||
.get_cell(&updated_at_field.id, &row.id)
|
||||
.await
|
||||
.unwrap();
|
||||
let created_at_timestamp = DateCellData::from(&cell).timestamp.unwrap();
|
||||
@ -34,11 +34,11 @@ async fn created_at_field_test() {
|
||||
#[tokio::test]
|
||||
async fn update_at_field_test() {
|
||||
let mut test = DatabaseRowTest::new().await;
|
||||
let row_detail = test.get_rows().await.remove(0);
|
||||
let row = test.get_rows().await.remove(0);
|
||||
let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await;
|
||||
let cell = test
|
||||
.editor
|
||||
.get_cell(&last_edit_field.id, &row_detail.row.id)
|
||||
.get_cell(&last_edit_field.id, &row.id)
|
||||
.await
|
||||
.unwrap();
|
||||
let old_updated_at = DateCellData::from(&cell).timestamp.unwrap();
|
||||
@ -46,17 +46,17 @@ async fn update_at_field_test() {
|
||||
tokio::time::sleep(Duration::from_millis(1000)).await;
|
||||
test
|
||||
.run_script(UpdateTextCell {
|
||||
row_id: row_detail.row.id.clone(),
|
||||
row_id: row.id.clone(),
|
||||
content: "test".to_string(),
|
||||
})
|
||||
.await;
|
||||
|
||||
// Get the updated time of the row.
|
||||
let row_detail = test.get_rows().await.remove(0);
|
||||
let row = test.get_rows().await.remove(0);
|
||||
let last_edit_field = test.get_first_field(FieldType::LastEditedTime).await;
|
||||
let cell = test
|
||||
.editor
|
||||
.get_cell(&last_edit_field.id, &row_detail.row.id)
|
||||
.get_cell(&last_edit_field.id, &row.id)
|
||||
.await
|
||||
.unwrap();
|
||||
let new_updated_at = DateCellData::from(&cell).timestamp.unwrap();
|
||||
|
@ -37,13 +37,13 @@ impl DatabaseRowTest {
|
||||
self
|
||||
.row_by_row_id
|
||||
.insert(row_detail.row.id.to_string(), row_detail.into());
|
||||
self.row_details = self.get_rows().await;
|
||||
self.rows = self.get_rows().await;
|
||||
},
|
||||
RowScript::UpdateTextCell { row_id, content } => {
|
||||
self.update_text_cell(row_id, &content).await.unwrap();
|
||||
},
|
||||
RowScript::AssertRowCount(expected_row_count) => {
|
||||
assert_eq!(expected_row_count, self.row_details.len());
|
||||
assert_eq!(expected_row_count, self.rows.len());
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -15,10 +15,10 @@ use crate::database::cell_test::script::DatabaseCellTest;
|
||||
async fn grid_cell_update() {
|
||||
let mut test = DatabaseCellTest::new().await;
|
||||
let fields = test.get_fields().await;
|
||||
let rows = &test.row_details;
|
||||
let rows = &test.rows;
|
||||
|
||||
let mut scripts = vec![];
|
||||
for row_detail in rows.iter() {
|
||||
for row in rows.iter() {
|
||||
for field in &fields {
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
if field_type == FieldType::LastEditedTime || field_type == FieldType::CreatedTime {
|
||||
@ -63,7 +63,7 @@ async fn grid_cell_update() {
|
||||
scripts.push(UpdateCell {
|
||||
view_id: test.view_id.clone(),
|
||||
field_id: field.id.clone(),
|
||||
row_id: row_detail.row.id.clone(),
|
||||
row_id: row.id.clone(),
|
||||
changeset: cell_changeset,
|
||||
is_err: false,
|
||||
});
|
||||
@ -134,7 +134,7 @@ async fn update_updated_at_field_on_other_cell_update() {
|
||||
test
|
||||
.run_script(UpdateCell {
|
||||
view_id: test.view_id.clone(),
|
||||
row_id: test.row_details[0].row.id.clone(),
|
||||
row_id: test.rows[0].id.clone(),
|
||||
field_id: text_field.id.clone(),
|
||||
changeset: BoxAny::new("change".to_string()),
|
||||
is_err: false,
|
||||
|
@ -4,7 +4,7 @@ use std::sync::Arc;
|
||||
use collab_database::database::gen_database_view_id;
|
||||
use collab_database::entity::SelectOption;
|
||||
use collab_database::fields::Field;
|
||||
use collab_database::rows::{RowDetail, RowId};
|
||||
use collab_database::rows::{Row, RowId};
|
||||
use lib_infra::box_any::BoxAny;
|
||||
use strum::EnumCount;
|
||||
|
||||
@ -31,7 +31,7 @@ pub struct DatabaseEditorTest {
|
||||
pub view_id: String,
|
||||
pub editor: Arc<DatabaseEditor>,
|
||||
pub fields: Vec<Arc<Field>>,
|
||||
pub row_details: Vec<Arc<RowDetail>>,
|
||||
pub rows: Vec<Arc<Row>>,
|
||||
pub field_count: usize,
|
||||
pub row_by_row_id: HashMap<String, RowMetaPB>,
|
||||
}
|
||||
@ -86,7 +86,7 @@ impl DatabaseEditorTest {
|
||||
.map(Arc::new)
|
||||
.collect();
|
||||
let rows = editor
|
||||
.get_all_row_details(&test.child_view.id)
|
||||
.get_all_rows(&test.child_view.id)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
@ -98,7 +98,7 @@ impl DatabaseEditorTest {
|
||||
view_id,
|
||||
editor,
|
||||
fields,
|
||||
row_details: rows,
|
||||
rows,
|
||||
field_count: FieldType::COUNT,
|
||||
row_by_row_id: HashMap::default(),
|
||||
}
|
||||
@ -108,12 +108,8 @@ impl DatabaseEditorTest {
|
||||
self.editor.get_all_filters(&self.view_id).await.items
|
||||
}
|
||||
|
||||
pub async fn get_rows(&self) -> Vec<Arc<RowDetail>> {
|
||||
self
|
||||
.editor
|
||||
.get_all_row_details(&self.view_id)
|
||||
.await
|
||||
.unwrap()
|
||||
pub async fn get_rows(&self) -> Vec<Arc<Row>> {
|
||||
self.editor.get_all_rows(&self.view_id).await.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_field(&self, field_id: &str, field_type: FieldType) -> Field {
|
||||
|
@ -123,14 +123,10 @@ impl DatabaseFieldTest {
|
||||
} => {
|
||||
let field = self.editor.get_field(&field_id).await.unwrap();
|
||||
|
||||
let rows = self
|
||||
.editor
|
||||
.get_all_row_details(&self.view_id())
|
||||
.await
|
||||
.unwrap();
|
||||
let row_detail = rows.get(row_index).unwrap();
|
||||
let rows = self.editor.get_all_rows(&self.view_id()).await.unwrap();
|
||||
let row = rows.get(row_index).unwrap();
|
||||
|
||||
let cell = row_detail.row.cells.get(&field_id).unwrap().clone();
|
||||
let cell = row.cells.get(&field_id).unwrap().clone();
|
||||
let content = stringify_cell(&cell, &field);
|
||||
assert_eq!(content, expected_content);
|
||||
},
|
||||
|
@ -8,7 +8,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
|
||||
async fn grid_filter_checkbox_is_check_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let expected = 3;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
// The initial number of checked is 3
|
||||
// The initial number of unchecked is 4
|
||||
let scripts = vec![
|
||||
@ -32,7 +32,7 @@ async fn grid_filter_checkbox_is_check_test() {
|
||||
async fn grid_filter_checkbox_is_uncheck_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let expected = 4;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
parent_filter_id: None,
|
||||
|
@ -9,12 +9,12 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
|
||||
async fn grid_filter_checklist_is_incomplete_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let expected = 5;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let option_ids = get_checklist_cell_options(&test).await;
|
||||
|
||||
let scripts = vec![
|
||||
UpdateChecklistCell {
|
||||
row_id: test.row_details[0].row.id.clone(),
|
||||
row_id: test.rows[0].id.clone(),
|
||||
selected_option_ids: option_ids,
|
||||
},
|
||||
CreateDataFilter {
|
||||
@ -37,11 +37,11 @@ async fn grid_filter_checklist_is_incomplete_test() {
|
||||
async fn grid_filter_checklist_is_complete_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let expected = 2;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let option_ids = get_checklist_cell_options(&test).await;
|
||||
let scripts = vec![
|
||||
UpdateChecklistCell {
|
||||
row_id: test.row_details[0].row.id.clone(),
|
||||
row_id: test.rows[0].id.clone(),
|
||||
selected_option_ids: option_ids,
|
||||
},
|
||||
CreateDataFilter {
|
||||
@ -62,10 +62,7 @@ async fn grid_filter_checklist_is_complete_test() {
|
||||
|
||||
async fn get_checklist_cell_options(test: &DatabaseFilterTest) -> Vec<String> {
|
||||
let field = test.get_first_field(FieldType::Checklist).await;
|
||||
let row_cell = test
|
||||
.editor
|
||||
.get_cell(&field.id, &test.row_details[0].row.id)
|
||||
.await;
|
||||
let row_cell = test.editor.get_cell(&field.id, &test.rows[0].id).await;
|
||||
row_cell
|
||||
.map_or(ChecklistCellData::default(), |cell| {
|
||||
ChecklistCellData::from(&cell)
|
||||
|
@ -7,7 +7,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
|
||||
#[tokio::test]
|
||||
async fn grid_filter_date_is_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 3;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -32,7 +32,7 @@ async fn grid_filter_date_is_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_date_after_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 3;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -57,7 +57,7 @@ async fn grid_filter_date_after_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_date_on_or_after_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 3;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -82,7 +82,7 @@ async fn grid_filter_date_on_or_after_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_date_on_or_before_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 4;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -107,7 +107,7 @@ async fn grid_filter_date_on_or_before_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_date_within_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 5;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
|
@ -7,7 +7,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
|
||||
#[tokio::test]
|
||||
async fn grid_filter_number_is_equal_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 1;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -30,7 +30,7 @@ async fn grid_filter_number_is_equal_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_number_is_less_than_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 2;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -54,7 +54,7 @@ async fn grid_filter_number_is_less_than_test() {
|
||||
#[should_panic]
|
||||
async fn grid_filter_number_is_less_than_test2() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 2;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -77,7 +77,7 @@ async fn grid_filter_number_is_less_than_test2() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_number_is_less_than_or_equal_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 3;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -100,7 +100,7 @@ async fn grid_filter_number_is_less_than_or_equal_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_number_is_empty_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 2;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -123,7 +123,7 @@ async fn grid_filter_number_is_empty_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_number_is_not_empty_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 5;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
|
@ -301,7 +301,7 @@ impl DatabaseFilterTest {
|
||||
}
|
||||
},
|
||||
FilterScript::AssertNumberOfVisibleRows { expected } => {
|
||||
let grid = self.editor.open_database(&self.view_id).await.unwrap();
|
||||
let grid = self.editor.open_database_view(&self.view_id).await.unwrap();
|
||||
assert_eq!(grid.rows.len(), expected);
|
||||
},
|
||||
FilterScript::Wait { millisecond } => {
|
||||
|
@ -84,7 +84,7 @@ async fn grid_filter_multi_select_is_test2() {
|
||||
async fn grid_filter_single_select_is_empty_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let expected = 3;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
parent_filter_id: None,
|
||||
@ -109,7 +109,7 @@ async fn grid_filter_single_select_is_test() {
|
||||
let field = test.get_first_field(FieldType::SingleSelect).await;
|
||||
let mut options = test.get_single_select_type_option(&field.id).await;
|
||||
let expected = 2;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
parent_filter_id: None,
|
||||
@ -135,7 +135,7 @@ async fn grid_filter_single_select_is_test2() {
|
||||
let row_details = test.get_rows().await;
|
||||
let mut options = test.get_single_select_type_option(&field.id).await;
|
||||
let option = options.remove(0);
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -152,13 +152,13 @@ async fn grid_filter_single_select_is_test2() {
|
||||
},
|
||||
AssertNumberOfVisibleRows { expected: 2 },
|
||||
UpdateSingleSelectCell {
|
||||
row_id: row_details[1].row.id.clone(),
|
||||
row_id: row_details[1].id.clone(),
|
||||
option_id: option.id.clone(),
|
||||
changed: None,
|
||||
},
|
||||
AssertNumberOfVisibleRows { expected: 3 },
|
||||
UpdateSingleSelectCell {
|
||||
row_id: row_details[1].row.id.clone(),
|
||||
row_id: row_details[1].id.clone(),
|
||||
option_id: "".to_string(),
|
||||
changed: Some(FilterRowChanged {
|
||||
showing_num_of_rows: 0,
|
||||
|
@ -102,7 +102,7 @@ async fn grid_filter_contain_text_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_contain_text_test2() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_detail = test.row_details.clone();
|
||||
let row_detail = test.rows.clone();
|
||||
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -118,7 +118,7 @@ async fn grid_filter_contain_text_test2() {
|
||||
}),
|
||||
},
|
||||
UpdateTextCell {
|
||||
row_id: row_detail[1].row.id.clone(),
|
||||
row_id: row_detail[1].id.clone(),
|
||||
text: "ABC".to_string(),
|
||||
changed: Some(FilterRowChanged {
|
||||
showing_num_of_rows: 1,
|
||||
@ -257,7 +257,7 @@ async fn grid_filter_delete_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_update_empty_text_cell_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_details = test.row_details.clone();
|
||||
let row = test.rows.clone();
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
parent_filter_id: None,
|
||||
@ -273,7 +273,7 @@ async fn grid_filter_update_empty_text_cell_test() {
|
||||
},
|
||||
AssertFilterCount { count: 1 },
|
||||
UpdateTextCell {
|
||||
row_id: row_details[0].row.id.clone(),
|
||||
row_id: row[0].id.clone(),
|
||||
text: "".to_string(),
|
||||
changed: Some(FilterRowChanged {
|
||||
showing_num_of_rows: 1,
|
||||
|
@ -7,7 +7,7 @@ use crate::database::filter_test::script::{DatabaseFilterTest, FilterRowChanged}
|
||||
#[tokio::test]
|
||||
async fn grid_filter_time_is_equal_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 1;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -30,7 +30,7 @@ async fn grid_filter_time_is_equal_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_time_is_less_than_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 1;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -54,7 +54,7 @@ async fn grid_filter_time_is_less_than_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_time_is_less_than_or_equal_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 1;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -77,7 +77,7 @@ async fn grid_filter_time_is_less_than_or_equal_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_time_is_empty_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 6;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
@ -100,7 +100,7 @@ async fn grid_filter_time_is_empty_test() {
|
||||
#[tokio::test]
|
||||
async fn grid_filter_time_is_not_empty_test() {
|
||||
let mut test = DatabaseFilterTest::new().await;
|
||||
let row_count = test.row_details.len();
|
||||
let row_count = test.rows.len();
|
||||
let expected = 1;
|
||||
let scripts = vec![
|
||||
CreateDataFilter {
|
||||
|
@ -184,13 +184,13 @@ async fn change_date_on_moving_row_to_another_group() {
|
||||
let group = test.group_at_index(2).await;
|
||||
let rows = group.clone().rows;
|
||||
let row_id = &rows.first().unwrap().id;
|
||||
let row_detail = test
|
||||
let row = test
|
||||
.get_rows()
|
||||
.await
|
||||
.into_iter()
|
||||
.find(|r| r.row.id.to_string() == *row_id)
|
||||
.find(|r| r.id.to_string() == *row_id)
|
||||
.unwrap();
|
||||
let cell = row_detail.row.cells.get(&date_field.id.clone()).unwrap();
|
||||
let cell = row.cells.get(&date_field.id.clone()).unwrap();
|
||||
let date_cell = DateCellData::from(cell);
|
||||
|
||||
let date_time =
|
||||
|
@ -42,12 +42,12 @@ async fn according_to_text_contains_filter_test() {
|
||||
let scripts = vec![
|
||||
AssertCellExistence {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len() - 1,
|
||||
row_index: test.rows.len() - 1,
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: text_field.id,
|
||||
row_index: test.row_details.len() - 1,
|
||||
row_index: test.rows.len() - 1,
|
||||
|
||||
expected_content: "sample".to_string(),
|
||||
},
|
||||
@ -84,7 +84,7 @@ async fn according_to_empty_text_contains_filter_test() {
|
||||
|
||||
let scripts = vec![AssertCellExistence {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len() - 1,
|
||||
row_index: test.rows.len() - 1,
|
||||
exists: false,
|
||||
}];
|
||||
|
||||
@ -278,7 +278,7 @@ async fn according_to_invalid_date_time_is_filter_test() {
|
||||
AssertRowCount(8),
|
||||
AssertCellExistence {
|
||||
field_id: datetime_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: false,
|
||||
},
|
||||
];
|
||||
|
@ -29,12 +29,12 @@ async fn row_data_payload_with_empty_hashmap_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: false,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: text_field.id,
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: "".to_string(),
|
||||
},
|
||||
@ -64,18 +64,18 @@ async fn row_data_payload_with_unknown_field_id_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: false,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: "".to_string(),
|
||||
},
|
||||
AssertCellExistence {
|
||||
field_id: malformed_field_id.to_string(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: false,
|
||||
},
|
||||
];
|
||||
@ -101,12 +101,12 @@ async fn row_data_payload_with_empty_string_text_data_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: text_field.id,
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: cell_data.to_string(),
|
||||
},
|
||||
@ -133,12 +133,12 @@ async fn row_data_payload_with_text_data_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: cell_data.to_string(),
|
||||
},
|
||||
@ -174,34 +174,34 @@ async fn row_data_payload_with_multi_text_data_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: text_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: text_field.id,
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: text_cell_data.to_string(),
|
||||
},
|
||||
AssertCellExistence {
|
||||
field_id: number_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: number_field.id,
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: "$1,234".to_string(),
|
||||
},
|
||||
AssertCellExistence {
|
||||
field_id: url_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: url_field.id,
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: url_cell_data.to_string(),
|
||||
},
|
||||
@ -228,12 +228,12 @@ async fn row_data_payload_with_date_time_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: date_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: date_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: "2024/03/15".to_string(),
|
||||
},
|
||||
@ -264,7 +264,7 @@ async fn row_data_payload_with_invalid_date_time_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: date_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: false,
|
||||
},
|
||||
];
|
||||
@ -290,12 +290,12 @@ async fn row_data_payload_with_checkbox_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: checkbox_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: checkbox_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: cell_data.to_string(),
|
||||
},
|
||||
@ -336,12 +336,12 @@ async fn row_data_payload_with_select_option_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: multi_select_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertCellContent {
|
||||
field_id: multi_select_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
|
||||
expected_content: stringified_cell_data,
|
||||
},
|
||||
@ -373,12 +373,12 @@ async fn row_data_payload_with_invalid_select_option_id_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: multi_select_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertSelectOptionCellStrict {
|
||||
field_id: multi_select_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
expected_content: first_id,
|
||||
},
|
||||
];
|
||||
@ -414,12 +414,12 @@ async fn row_data_payload_with_too_many_select_option_test() {
|
||||
Wait { milliseconds: 100 },
|
||||
AssertCellExistence {
|
||||
field_id: single_select_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
exists: true,
|
||||
},
|
||||
AssertSelectOptionCellStrict {
|
||||
field_id: single_select_field.id.clone(),
|
||||
row_index: test.row_details.len(),
|
||||
row_index: test.rows.len(),
|
||||
expected_content: stringified_cell_data,
|
||||
},
|
||||
];
|
||||
|
@ -63,14 +63,14 @@ impl DatabasePreFillRowCellTest {
|
||||
self
|
||||
.row_by_row_id
|
||||
.insert(row_detail.row.id.to_string(), row_detail.into());
|
||||
self.row_details = self.get_rows().await;
|
||||
self.rows = self.get_rows().await;
|
||||
},
|
||||
PreFillRowCellTestScript::CreateRowWithPayload { payload } => {
|
||||
let row_detail = self.editor.create_row(payload).await.unwrap().unwrap();
|
||||
self
|
||||
.row_by_row_id
|
||||
.insert(row_detail.row.id.to_string(), row_detail.into());
|
||||
self.row_details = self.get_rows().await;
|
||||
self.rows = self.get_rows().await;
|
||||
},
|
||||
PreFillRowCellTestScript::InsertFilter { filter } => self
|
||||
.editor
|
||||
@ -86,11 +86,7 @@ impl DatabasePreFillRowCellTest {
|
||||
.await
|
||||
.unwrap(),
|
||||
PreFillRowCellTestScript::AssertRowCount(expected_row_count) => {
|
||||
let rows = self
|
||||
.editor
|
||||
.get_all_row_details(&self.view_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
|
||||
assert_eq!(expected_row_count, rows.len());
|
||||
},
|
||||
PreFillRowCellTestScript::AssertCellExistence {
|
||||
@ -98,15 +94,9 @@ impl DatabasePreFillRowCellTest {
|
||||
row_index,
|
||||
exists,
|
||||
} => {
|
||||
let rows = self
|
||||
.editor
|
||||
.get_all_row_details(&self.view_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let row_detail = rows.get(row_index).unwrap();
|
||||
|
||||
let cell = row_detail.row.cells.get(&field_id).cloned();
|
||||
|
||||
let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
|
||||
let row = rows.get(row_index).unwrap();
|
||||
let cell = row.cells.get(&field_id).cloned();
|
||||
assert_eq!(exists, cell.is_some());
|
||||
},
|
||||
PreFillRowCellTestScript::AssertCellContent {
|
||||
@ -116,19 +106,9 @@ impl DatabasePreFillRowCellTest {
|
||||
} => {
|
||||
let field = self.editor.get_field(&field_id).await.unwrap();
|
||||
|
||||
let rows = self
|
||||
.editor
|
||||
.get_all_row_details(&self.view_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let row_detail = rows.get(row_index).unwrap();
|
||||
|
||||
let cell = row_detail
|
||||
.row
|
||||
.cells
|
||||
.get(&field_id)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
|
||||
let row = rows.get(row_index).unwrap();
|
||||
let cell = row.cells.get(&field_id).cloned().unwrap_or_default();
|
||||
let content = stringify_cell(&cell, &field);
|
||||
assert_eq!(content, expected_content);
|
||||
},
|
||||
@ -137,22 +117,10 @@ impl DatabasePreFillRowCellTest {
|
||||
row_index,
|
||||
expected_content,
|
||||
} => {
|
||||
let rows = self
|
||||
.editor
|
||||
.get_all_row_details(&self.view_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let row_detail = rows.get(row_index).unwrap();
|
||||
|
||||
let cell = row_detail
|
||||
.row
|
||||
.cells
|
||||
.get(&field_id)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
|
||||
let row = rows.get(row_index).unwrap();
|
||||
let cell = row.cells.get(&field_id).cloned().unwrap_or_default();
|
||||
let content = SelectOptionIds::from(&cell).join(SELECTION_IDS_SEPARATOR);
|
||||
|
||||
assert_eq!(content, expected_content);
|
||||
},
|
||||
PreFillRowCellTestScript::Wait { milliseconds } => {
|
||||
|
@ -33,7 +33,7 @@ async fn export_and_then_import_meta_csv_test() {
|
||||
let database = test.get_database(&result.database_id).await.unwrap();
|
||||
|
||||
let fields = database.get_fields(&result.view_id, None).await;
|
||||
let rows = database.get_all_row_details(&result.view_id).await.unwrap();
|
||||
let rows = database.get_all_rows(&result.view_id).await.unwrap();
|
||||
assert_eq!(fields[0].field_type, 0);
|
||||
assert_eq!(fields[1].field_type, 1);
|
||||
assert_eq!(fields[2].field_type, 2);
|
||||
@ -46,8 +46,8 @@ async fn export_and_then_import_meta_csv_test() {
|
||||
assert_eq!(fields[9].field_type, 9);
|
||||
|
||||
for field in fields {
|
||||
for (index, row_detail) in rows.iter().enumerate() {
|
||||
if let Some(cell) = row_detail.row.cells.get(&field.id) {
|
||||
for (index, row) in rows.iter().enumerate() {
|
||||
if let Some(cell) = row.cells.get(&field.id) {
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
let s = stringify_cell(cell, &field);
|
||||
match &field_type {
|
||||
@ -89,7 +89,7 @@ async fn export_and_then_import_meta_csv_test() {
|
||||
} else {
|
||||
panic!(
|
||||
"Can not found the cell with id: {} in {:?}",
|
||||
field.id, row_detail.row.cells
|
||||
field.id, row.cells
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -112,7 +112,7 @@ async fn history_database_import_test() {
|
||||
let database = test.get_database(&result.database_id).await.unwrap();
|
||||
|
||||
let fields = database.get_fields(&result.view_id, None).await;
|
||||
let rows = database.get_all_row_details(&result.view_id).await.unwrap();
|
||||
let rows = database.get_all_rows(&result.view_id).await.unwrap();
|
||||
assert_eq!(fields[0].field_type, 0);
|
||||
assert_eq!(fields[1].field_type, 1);
|
||||
assert_eq!(fields[2].field_type, 2);
|
||||
@ -123,8 +123,8 @@ async fn history_database_import_test() {
|
||||
assert_eq!(fields[7].field_type, 7);
|
||||
|
||||
for field in fields {
|
||||
for (index, row_detail) in rows.iter().enumerate() {
|
||||
if let Some(cell) = row_detail.row.cells.get(&field.id) {
|
||||
for (index, row) in rows.iter().enumerate() {
|
||||
if let Some(cell) = row.cells.get(&field.id) {
|
||||
let field_type = FieldType::from(field.field_type);
|
||||
let s = stringify_cell(cell, &field);
|
||||
match &field_type {
|
||||
@ -174,7 +174,7 @@ async fn history_database_import_test() {
|
||||
} else {
|
||||
panic!(
|
||||
"Can not found the cell with id: {} in {:?}",
|
||||
field.id, row_detail.row.cells
|
||||
field.id, row.cells
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -117,14 +117,10 @@ impl DatabaseSortTest {
|
||||
},
|
||||
SortScript::AssertCellContentOrder { field_id, orders } => {
|
||||
let mut cells = vec![];
|
||||
let rows = self
|
||||
.editor
|
||||
.get_all_row_details(&self.view_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let rows = self.editor.get_all_rows(&self.view_id).await.unwrap();
|
||||
let field = self.editor.get_field(&field_id).await.unwrap();
|
||||
for row_detail in rows {
|
||||
if let Some(cell) = row_detail.row.cells.get(&field_id) {
|
||||
for row in rows {
|
||||
if let Some(cell) = row.cells.get(&field_id) {
|
||||
let content = stringify_cell(cell, &field);
|
||||
cells.push(content);
|
||||
} else {
|
||||
|
@ -67,10 +67,10 @@ async fn sort_change_notification_by_update_text_test() {
|
||||
];
|
||||
test.run_scripts(scripts).await;
|
||||
|
||||
let row_details = test.get_rows().await;
|
||||
let row = test.get_rows().await;
|
||||
let scripts = vec![
|
||||
UpdateTextCell {
|
||||
row_id: row_details[1].row.id.clone(),
|
||||
row_id: row[1].id.clone(),
|
||||
text: "E".to_string(),
|
||||
},
|
||||
AssertSortChanged {
|
||||
|
Loading…
Reference in New Issue
Block a user